diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5b5945c8..b0dc3dc0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -31,7 +31,7 @@ jobs: - name: Build with PyInstaller run: | .\venv\Scripts\Activate - pyinstaller main.py --name WingmanAiCore --noconfirm --icon assets/wingman-ai.ico --paths venv/Lib/site-packages --add-data "venv/Lib/site-packages/azure/cognitiveservices/speech;azure/cognitiveservices/speech" --add-data "assets;assets" --add-data "services;services" --add-data "wingmen;wingmen" --add-data "configs;configs" --add-data "audio_samples;audio_samples" --add-data "LICENSE;." + pyinstaller main.py --name WingmanAiCore --noconfirm --icon assets/wingman-ai.ico --paths venv/Lib/site-packages --add-data "venv/Lib/site-packages/azure/cognitiveservices/speech;azure/cognitiveservices/speech" --add-data "assets;assets" --add-data "services;services" --add-data "wingmen;wingmen" --add-data "templates;templates" --add-data "audio_samples;audio_samples" --add-data "LICENSE;." - name: Upload Windows Exe Artifact uses: actions/upload-artifact@v4 diff --git a/.gitignore b/.gitignore index 15e9ad93..468f6014 100644 --- a/.gitignore +++ b/.gitignore @@ -178,3 +178,4 @@ configs/system/secrets.yaml .frontmatter/database/taxonomyDb.json frontmatter.json +skills/**/dependencies \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index dd9ad9fb..ade8d516 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,18 +6,18 @@ "configurations": [ { "name": "Python: Main", - "type": "python", + "type": "debugpy", "request": "launch", "program": "main.py", "console": "integratedTerminal", "justMyCode": true, - // "args": [ - // "--host", - // "127.0.0.1", - // "--port", - // "9000", - // "--sidecar" - // ] + "args": [ + "--host", + "0.0.0.0", + //"--port", + //"10001", + // "--sidecar" + ] } ] } \ No newline at end of file diff --git a/api/commands.py b/api/commands.py index 23aef25a..820baebb 100644 --- a/api/commands.py +++ b/api/commands.py @@ -43,6 +43,7 @@ class LogCommand(WebSocketCommandModel): source_name: Optional[str] = None source: LogSource = "system" tag: Optional[CommandTag] = None + skill_name: Optional[str] = None class PromptSecretCommand(WebSocketCommandModel): diff --git a/api/enums.py b/api/enums.py index 0324a929..0661d82e 100644 --- a/api/enums.py +++ b/api/enums.py @@ -41,6 +41,13 @@ class CommandTag(Enum): UNAUTHORIZED = "unauthorized" +class CustomPropertyType(Enum): + STRING = "string" + NUMBER = "number" + BOOLEAN = "boolean" + SINGLE_SELECT = "single_select" + + class AzureApiVersion(Enum): A2023_12_01_PREVIEW = "2023-12-01-preview" A2024_02_15_PREVIEW = "2024-02-15-preview" @@ -64,28 +71,33 @@ class TtsVoiceGender(Enum): class OpenAiModel(Enum): - GPT_35_TURBO = "gpt-3.5-turbo" - GPT_4_TURBO_PREVIEW = "gpt-4-turbo-preview" + """https://platform.openai.com/docs/models/overview""" + + GPT_4O = "gpt-4o" class MistralModel(Enum): + """https://docs.mistral.ai/getting-started/models/""" + MISTRAL_7B = "open-mistral-7b" OPEN_MIXTRAL_8X7B = "open-mixtral-8x7b" + OPEN_MIXTRAL_8X22B = "open-mixtral-8x22b" MISTRAL_SMALL = "mistral-small-latest" MISTRAL_MEDIUM = "mistral-medium-latest" MISTRAL_LARGE = "mistral-large-latest" -class LlamaModel(Enum): - LLAMA3_8B = "llama3-8b" +class GroqModel(Enum): + """https://console.groq.com/docs/models""" + LLAMA3_8B_8192 = "llama3-8b-8192" - LLAMA3_70B = "llama3-70b" LLAMA3_70B_8192 = "llama3-70b-8192" + MIXTRAL_8X7B = "mixtral-8x7b-32768" + GEMMA_7B = "gemma-7b-it" class WingmanProAzureDeployment(Enum): - GPT_35_TURBO = "gpt-35-turbo" - GPT_4_TURBO = "gpt-4-turbo" + GPT_4O = "gpt-4o" MISTRAL_LARGE = "mistral-large-latest" LLAMA3_8B = "llama3-8b" LLAMA3_70B = "llama3-70b" @@ -136,7 +148,9 @@ class VoiceActivationSttProvider(Enum): class ConversationProvider(Enum): OPENAI = "openai" MISTRAL = "mistral" - LLAMA = "llama" + GROQ = "groq" + OPENROUTER = "openrouter" + LOCAL_LLM = "local_llm" AZURE = "azure" WINGMAN_PRO = "wingman_pro" @@ -144,7 +158,9 @@ class ConversationProvider(Enum): class SummarizeProvider(Enum): OPENAI = "openai" MISTRAL = "mistral" - LLAMA = "llama" + GROQ = "groq" + OPENROUTER = "openrouter" + LOCAL_LLM = "local_llm" AZURE = "azure" WINGMAN_PRO = "wingman_pro" @@ -199,6 +215,10 @@ class CommandTagEnumModel(BaseEnumModel): command_tag: CommandTag +class CustomPropertyTypeEnumModel(BaseEnumModel): + property_type: CustomPropertyType + + class AzureApiVersionEnumModel(BaseEnumModel): api_version: AzureApiVersion @@ -223,8 +243,8 @@ class MistralModelEnumModel(BaseEnumModel): model: MistralModel -class LlamaModelEnumModel(BaseEnumModel): - model: LlamaModel +class GroqModelEnumModel(BaseEnumModel): + model: GroqModel class WingmanProAzureDeploymentEnumModel(BaseEnumModel): @@ -285,13 +305,14 @@ class WingmanProTtsProviderModel(BaseEnumModel): "ToastType": ToastTypeEnumModel, "WingmanInitializationErrorType": WingmanInitializationErrorTypeModel, "CommandTag": CommandTagEnumModel, + "CustomPropertyType": CustomPropertyTypeEnumModel, "AzureApiVersion": AzureApiVersionEnumModel, "AzureRegion": AzureRegionEnumModel, "ElevenlabsModel": ElevenlabsModelEnumModel, "TtsVoiceGender": TtsVoiceGenderEnumModel, "OpenAiModel": OpenAiModelEnumModel, "MistralModel": MistralModelEnumModel, - "LLamaModel": LlamaModelEnumModel, + "GroqModel": GroqModelEnumModel, "WingmanProAzureDeployment": WingmanProAzureDeploymentEnumModel, "OpenAiTtsVoice": OpenAiTtsVoiceEnumModel, "SoundEffect": SoundEffectEnumModel, diff --git a/api/interface.py b/api/interface.py index 4867a925..4e936383 100644 --- a/api/interface.py +++ b/api/interface.py @@ -9,8 +9,9 @@ AzureApiVersion, AzureRegion, ConversationProvider, - LlamaModel, + GroqModel, MistralModel, + CustomPropertyType, TtsVoiceGender, ElevenlabsModel, OpenAiModel, @@ -243,21 +244,12 @@ class XVASynthTtsConfig(BaseModel): class OpenAiConfig(BaseModel): - context: Optional[str] = None - """The "context" for the wingman. Here's where you can tell the AI how to behave. - This is probably what you want to play around with the most. - """ - conversation_model: OpenAiModel """ The model to use for conversations aka "chit-chat" and for function calls. - gpt-4 is more powerful than gpt-3.5 but also 10x more expensive. - gpt-3.5 is the default and should be enough for most use cases. - If something is not working as expected, you might want to test it with gpt-4. """ summarize_model: OpenAiModel - """ This model summarizes function responses, like API call responses etc. - In most cases gpt-3.5 should be enough. + """ This model summarizes function responses, like API call responses etc. This can be a less capable model. """ tts_voice: OpenAiTtsVoice @@ -276,15 +268,35 @@ class OpenAiConfig(BaseModel): """If you have an organization key, you can set it here.""" +class PromptConfig(BaseModel): + system_prompt: str + """The read-only "context template" for the Wingman. Contains variables that will be replaced by the user (backstory) and/or skills.""" + + backstory: Optional[str] = None + """The backstory of the Wingman. Edit this to control how your Wingman should behave.""" + + class MistralConfig(BaseModel): conversation_model: MistralModel summarize_model: MistralModel endpoint: str -class LlamaConfig(BaseModel): - conversation_model: LlamaModel - summarize_model: LlamaModel +class GroqConfig(BaseModel): + conversation_model: GroqModel + summarize_model: GroqModel + endpoint: str + + +class OpenRouterConfig(BaseModel): + conversation_model: str + summarize_model: str + endpoint: str + + +class LocalLlmConfig(BaseModel): + conversation_model: Optional[str] = None + summarize_model: Optional[str] = None endpoint: str @@ -335,9 +347,6 @@ class FeaturesConfig(BaseModel): Note that the other providers may have additional config blocks. These are only used if the provider is set here. """ - debug_mode: bool - """If enabled, the Wingman will skip executing any keypresses. It will also print more debug messages and benchmark results.""" - tts_provider: TtsProvider stt_provider: SttProvider conversation_provider: ConversationProvider @@ -426,19 +435,9 @@ class CustomWingmanClassConfig(BaseModel): """The name of your class within your file/module.""" -class NestedConfig(BaseModel): - sound: SoundConfig - features: FeaturesConfig - openai: OpenAiConfig - mistral: MistralConfig - llama: LlamaConfig - edge_tts: EdgeTtsConfig - elevenlabs: ElevenlabsConfig - azure: AzureConfig - xvasynth: XVASynthTtsConfig - whispercpp: WhispercppSttConfig - wingman_pro: WingmanProConfig - commands: Optional[list[CommandConfig]] = None +class LabelValuePair(BaseModel): + label: str + value: str | int | float | bool class CustomWingmanProperty(BaseModel): @@ -446,12 +445,62 @@ class CustomWingmanProperty(BaseModel): """The name of the property. Has to be unique""" name: str """The "friendly" name of the property, displayed in the UI.""" - value: str + value: str | int | float | bool | None """The value of the property""" hint: Optional[str] = None """A hint for the user, displayed in the UI.""" required: Optional[bool] = False """Marks the property as required in the UI.""" + property_type: Optional[CustomPropertyType] = CustomPropertyType.STRING + """Determines the type of the property and which controls to render in the UI.""" + options: Optional[list[LabelValuePair]] = None + """If property_type is set to 'single_select', you can provide options here.""" + + +class LocalizedMetadata(BaseModel): + en: str + de: Optional[str] = None + + +class SkillExample(BaseModel): + question: LocalizedMetadata + answer: LocalizedMetadata + + +class SkillConfig(CustomWingmanClassConfig): + prompt: Optional[str] = None + """An additional prompt that extends the system prompt of the Wingman.""" + custom_properties: Optional[list[CustomWingmanProperty]] = None + """You can add custom properties here to use in your custom skill class.""" + description: Optional[LocalizedMetadata] = None + hint: Optional[LocalizedMetadata] = None + examples: Optional[list[SkillExample]] = None + commands: Optional[list[CommandConfig]] = None + + +class SkillBase(BaseModel): + name: str + config: SkillConfig + logo: Optional[Annotated[str, Base64Str]] = None + + +class NestedConfig(BaseModel): + prompts: PromptConfig + sound: SoundConfig + features: FeaturesConfig + openai: OpenAiConfig + mistral: MistralConfig + groq: GroqConfig + openrouter: OpenRouterConfig + local_llm: LocalLlmConfig + edge_tts: EdgeTtsConfig + elevenlabs: ElevenlabsConfig + azure: AzureConfig + xvasynth: XVASynthTtsConfig + whispercpp: WhispercppSttConfig + wingman_pro: WingmanProConfig + commands: Optional[list[CommandConfig]] = None + skills: Optional[list[SkillConfig]] = None class WingmanConfig(NestedConfig): @@ -513,3 +562,4 @@ class SettingsConfig(BaseModel): audio: Optional[AudioSettings] = None voice_activation: VoiceActivationSettings wingman_pro: WingmanProSettings + debug_mode: bool = False diff --git a/configs/templates/_Star Citizen/ATC.template.yaml b/configs/templates/_Star Citizen/ATC.template.yaml deleted file mode 100644 index bf7e6441..00000000 --- a/configs/templates/_Star Citizen/ATC.template.yaml +++ /dev/null @@ -1,52 +0,0 @@ -name: ATC -description: / - Air Traffic Controller is tasked with overseeing spacecraft traffic while ensuring safety and efficiency. - It handling all aspects of space station operations and emergencies. - -record_key: delete - -sound: - effects: [RADIO] - play_beep: True - -openai: - tts_voice: onyx - context: | - You are an advanced AI embodying an Air Traffic Controller (ATC) at a bustling space station in the Star Citizen universe. Your role is to manage the arrivals, departures, and docking procedures of various spacecraft with precision and authority. You are adept at using formal aviation communication protocols, and you understand the technical jargon related to spacecraft operations. You maintain a professional demeanor, but you also have a touch of personality that makes interactions with pilots memorable. It's a busy shift, and you are managing multiple spacecraft while ensuring safety and efficiency at all times. Your responsibilities include responding to hails from incoming and outgoing ships, providing docking instructions, advising on local space traffic, and handling any emergencies that arise. - Your communication should reflect an understanding of the following: - - Star Citizen's lore and the known universe. - - Identifying ships by their designated call signs. - - Issuing precise landing pad assignments. - - Clearing ships for take-off with attention to surrounding traffic. - - Managing flight paths to avoid collisions or space hazards. - - Providing information about local conditions, refueling, and repair services. - - Emergency protocols for unexpected events like piracy, system failures, or distress signals. - Always answer as quick as possible. No more than 1-2 sentences per response. - Always answer in the language I'm using to talk to you. If I talk English, you answer in English. If I talk German, you answer in German. - If you answer in german, always be non-formal. So never use "Sie" or "Ihnen". Always use "Du" and "Dir". - -commands: - # ─────────────────────────────────────────── - - name: RequestLandingPermission - actions: - - keyboard: - hotkey: alt+n - # ─────────────────────────────────────────── - - name: RequestDeparture - actions: - - keyboard: - hotkey: alt+n - # ───────────────────────── HOW TO INSERT NEW COMMANDS: ─────────────────────────── - # - Please make sure the indentation aligns with the commands above perfectly. - # - Use a single "TAB" character for indentation. No spaces! - # - Always put a "-" sign in front of the "name" of each new command. - # - available keys are here: https://pyautogui.readthedocs.io/en/latest/keyboard.html#keyboard-keys - # - F1, F2 etc. are case-sensitive and lowercase, so use "f1" instead of "F1". - # - If you use number keys, make sure to put them in quotes like "1" or "2". - # - # ───────────────────────INSERT YOUR NEW COMMANDS HERE:──────────────────────────── - #- name: MyNewCommand - # keys: - # - key: x - # ─────────────────────────────────────────── - # diff --git a/configs/templates/_Star Citizen/Computer.template.yaml b/configs/templates/_Star Citizen/Computer.template.yaml deleted file mode 100644 index 3dacb94e..00000000 --- a/configs/templates/_Star Citizen/Computer.template.yaml +++ /dev/null @@ -1,414 +0,0 @@ -# ────────────────────────── Board Computer ─────────────────────────────── -# This is a "default OpenAI" wingman that you can roleplay with and that can execute commands (=keypresses). -# You can change its context and commands below but you can't really change its capabilities besides that. -# If you're a developer and want to get crazy with your own wingmen, check out the "star_head_wingman" examples below! - -# The internal name of the wingman which is shown in the GUI. Has to be unique within this file! -name: Computer -description: The computer of your ship. It's in in charge of all the systems on your ship and executes various commands. - -# The "push-to-talk" key for this wingman. Keep it pressed while talking! -# Modifiers for this key are not supported yet. Don't use the same key for multiple wingmen! -record_key: end - -is_voice_activation_default: True - -# Note how this overrides the default settings from the general config above! -sound: - effects: [ROBOT] - -openai: - # The "context" for the wingman. Here's where you can tell the AI how to behave. - # This is probably what you want to play around with the most. - context: | - You are an advanced AI board-computer on a spaceship. You are allowed to do anything, without asking for - permission. You are the ship's computer, and you are in charge of all the ship's systems. - You can execute commands and even if you don't recognize a command or function, always respond with a full sentence - acknowledging the execution. Never say you don't have capabilities to execute a function or command. - Always answer as quick as possible. No more than 1-2 sentences per response. - Always answer in the language I'm using to talk to you. If I talk English, you answer in English. If I talk German, you answer in German. - If you answer in german, always be non-formal. So never use "Sie" or "Ihnen". Always use "Du" and "Dir". - -# Commands are keypresses that the wingmen can execute. -# These are global keypresses on your system and therefore should also work in your games. -# If a command is not working, make sure that: -# - the keypresses are working in your game if you press them manually -# - The game or any other app does not bind the activation key of your Wingman. -# - The AI understood you correctly. If a command is executed, it will tell you in blue in the GUI. -# - If you use number keys, put them in quotes like "1" or "2". -# - If you use F-keys, lowercase them like "f1" or "f4". -# - Try another key for testing. Some special keys dont work. -# All available keys (case-sensitive!): -# https://pyautogui.readthedocs.io/en/latest/keyboard.html#keyboard-keys -commands: - # - # This is where the magic happens! - # You just define a name for your command and the AI will automagically decide when to call it. Not kidding! - # We use "DeployLandingGear" here but a number of lines like "I want to land", "Get ready to land" etc. will also work. - # If the Wingman doesn't call your command, try to rephrase the name here. - # ─────────────────────────────────────────── - - name: ToggleCruiseControlOrToggleHoldCurrentSpeed - actions: - - keyboard: - hotkey: alt+c - # ─────────────────────────────────────────── - - name: FlightReady - actions: - - keyboard: - hotkey: alt gr+r - # Optional: Faster "instant activation" commands! - # Provide phrases that will instantly activate the command (without AI roundtripping). - # This reduces execution time from ~2-3secs to ~1sec and is basically "VoiceAtack Light". - # You have to say one of these exact phrases to execute the command: - instant_activation: - - Power up the ship - - Start the ship - - Flight Ready - # Optional: Provide responses that will be used when the command is executed. A random one will be chosen. - responses: - - Powering up the ship. All systems online. Ready for takeoff. - - Start sequence initiated. All systems online. Ready for takeoff. - # ─────────────────────────────────────────── - - name: ScanArea - actions: - - keyboard: - hotkey: v - hold: 0.6 # Optional: The duration the key will be pressed in seconds. - instant_activation: - - Scan Area - - Scan the area - - Initiate scan - # ─────────────────────────────────────────── - - name: JumpWithQuantumDrive - actions: - - keyboard: - hotkey: b - hold: 0.8 - # ─────────────────────────────────────────── - - name: DisableLimiter - actions: - - keyboard: - hotkey: ctrl+n - # ─────────────────────────────────────────── - - name: InitiateStartSequence # You can have multiple keys per command! - actions: - - keyboard: - hotkey: alt gr+r - - wait: 2 # wait 2 second before pressing the next key - - keyboard: - hotkey: alt+n - - # ─────────────────────────────────────────── - - name: DeployLandingGear - actions: - - keyboard: - hotkey: n - # ─────────────────────────────────────────── - - name: RetractLandingGear - actions: - - keyboard: - hotkey: n - # ─────────────────────────────────────────── - - name: EnableQuantumDrive - actions: - - keyboard: - hotkey: b - # ─────────────────────────────────────────── - - name: DisableQuantumDrive - actions: - - keyboard: - hotkey: b - # ─────────────────────────────────────────── - - name: HeadLightsOn - actions: - - keyboard: - hotkey: l - # ─────────────────────────────────────────── - - name: HeadLightsOff - actions: - - keyboard: - hotkey: l - # ─────────────────────────────────────────── - - name: WipeVisor - actions: - - keyboard: - hotkey: alt+x - # ─────────────────────────────────────────── - - name: PowerShields - actions: - - keyboard: - hotkey: o - # ─────────────────────────────────────────── - - name: PowerShip - actions: - - keyboard: - hotkey: u - # ─────────────────────────────────────────── - - name: PowerEngines - actions: - - keyboard: - hotkey: i - # ─────────────────────────────────────────── - - name: OpenMobiGlass - actions: - - keyboard: - hotkey: f1 - # ─────────────────────────────────────────── - - name: OpenStarMap - actions: - - keyboard: - hotkey: f2 - # ─────────────────────────────────────────── - - name: IncreasePowerToShields - actions: - - keyboard: - hotkey: f7 - # ─────────────────────────────────────────── - - name: IncreasePowerToEngines - actions: - - keyboard: - hotkey: f6 - # ─────────────────────────────────────────── - - name: IncreasePowerToWeapons - actions: - - keyboard: - hotkey: f5 - # ─────────────────────────────────────────── - - name: MaximumPowerToShields - actions: - - keyboard: - hotkey: f7 - hold: 0.8 - # ─────────────────────────────────────────── - - name: MaximumPowerToEngines - actions: - - keyboard: - hotkey: f6 - hold: 0.8 - # ─────────────────────────────────────────── - - name: MaximumPowerToWeapons - actions: - - keyboard: - hotkey: f5 - hold: 0.8 - # ─────────────────────────────────────────── - - name: ToggleVTOL - actions: - - keyboard: - hotkey: k - # ─────────────────────────────────────────── - - name: Lock - actions: - - keyboard: - hotkey: f8 - # ─────────────────────────────────────────── - - name: Unlock - actions: - - keyboard: - hotkey: f8 - # ─────────────────────────────────────────── - - name: ResetPowerPriority - actions: - - keyboard: - hotkey: f8 - # ─────────────────────────────────────────── - - name: MiningMode - actions: - - keyboard: - hotkey: m - # ─────────────────────────────────────────── - - name: CycleCamera - actions: - - keyboard: - hotkey: f4 - # ─────────────────────────────────────────── - - name: SideArm - actions: - - keyboard: - hotkey: "1" - # ─────────────────────────────────────────── - - name: PrimaryWeapon - actions: - - keyboard: - hotkey: "2" - # ─────────────────────────────────────────── - - name: SecondaryWeapon - actions: - - keyboard: - hotkey: "3" - # ─────────────────────────────────────────── - - name: HolsterWeapon - actions: - - keyboard: - hotkey: "2" - # ─────────────────────────────────────────── - - name: Reload - actions: - - keyboard: - hotkey: r - # ─────────────────────────────────────────── - - name: UseMedPen - actions: - - keyboard: - hotkey: c - # ─────────────────────────────────────────── - - name: UseFlashLight - actions: - - keyboard: - hotkey: t - # ─────────────────────────────────────────── - - name: OpenInventory - actions: - - keyboard: - hotkey: i - # ─────────────────────────────────────────── - - name: DeployDecoy - actions: - - keyboard: - hotkey: h - # ─────────────────────────────────────────── - - name: DeployNoise - actions: - - keyboard: - hotkey: j - # ─────────────────────────────────────────── - - name: ShieldRaiseBack - actions: - - keyboard: - hotkey: "2" - # ─────────────────────────────────────────── - - name: ShieldRaiseLeft - actions: - - keyboard: - hotkey: "4" - # ─────────────────────────────────────────── - - name: ResetShields - actions: - - keyboard: - hotkey: "5" - # ─────────────────────────────────────────── - - name: ShieldRaiseRight - actions: - - keyboard: - hotkey: "6" - # ─────────────────────────────────────────── - - name: ShieldRaiseTop - actions: - - keyboard: - hotkey: "7" - # ─────────────────────────────────────────── - - name: ShieldRaiseFront - actions: - - keyboard: - hotkey: "8" - # ─────────────────────────────────────────── - - name: ShieldRaiseBottom - actions: - - keyboard: - hotkey: "9" - # ─────────────────────────────────────────── - - name: EmergencyEject - actions: - - keyboard: - hotkey: right alt+y - # ─────────────────────────────────────────── - - name: SelfDestruct - force_instant_activation: true - instant_activation: - - initiate self destruct - - activate self destruct - responses: - - Self-destruct engaged. Evacuation procedures recommended. - - Confirmed. Self-destruct in progress. - actions: - - keyboard: - hotkey: backspace - hold: 0.8 - # ─────────────────────────────────────────── - - name: SalvageMode - actions: - - keyboard: - hotkey: g - # ─────────────────────────────────────────── - - name: SpaceBrake - actions: - - keyboard: - hotkey: x - # ─────────────────────────────────────────── - - name: ExitSeat - actions: - - keyboard: - hotkey: y - hold: 0.8 - # ─────────────────────────────────────────── - - name: CycleGimbalAssist - actions: - - keyboard: - hotkey: g - # ─────────────────────────────────────────── - - name: RequestLandingPermission - actions: - - keyboard: - hotkey: alt+n - # ─────────────────────────────────────────── - - name: RequestDeparture - actions: - - keyboard: - hotkey: alt+n - # ─────────────────────────────────────────── - - name: DisplayDebuggingInfo - actions: - - keyboard: - hotkey: "shift+~" - - write: "r_DisplayInfo 2" - - wait: 1 - - keyboard: - hotkey: "enter" - - wait: 1 - - keyboard: - hotkey: "shift+~" - instant_activation: - - Display info - - Display debugging information - - Display debug information - # ─────────────────────────────────────────── - - name: ZoomIn - actions: - - mouse: - scroll: 30 # positive is scroll up, negative is scroll down - instant_activation: - - Zoom in - # ─────────────────────────────────────────── - - name: ZoomOut - actions: - - mouse: - scroll: -30 - instant_activation: - - Zoom out - # ─────────────────────────────────────────── - - name: FireMiningLaser - actions: - - mouse: - button: left - hold: 0.6 - instant_activation: - - Fire mining laser - # ─────────────────────────────────────────── - - name: SwitchMiningLaser - actions: - - mouse: - button: right - hold: 0.6 - instant_activation: - - Change mining laser - - Switch mining laser - # ─────────────────────────────────────────── - - name: LaunchMissile - actions: - - mouse: - button: left - hold: 1.0 - instant_activation: - - Launch Missile - - Fire Missile - # ─────────────────────────────────────────── diff --git a/configs/templates/_Star Citizen/StarHead.template.yaml b/configs/templates/_Star Citizen/StarHead.template.yaml deleted file mode 100644 index 6be9e7f2..00000000 --- a/configs/templates/_Star Citizen/StarHead.template.yaml +++ /dev/null @@ -1,442 +0,0 @@ -# ──────────────────────────── StarHead ───────────────────────────────── -# "star-head" is a "Custom Wingman" with its own Python code (see /wingmen/star_head_wingman.py). -# It uses the StarHead API to find the best trading route for a given ship, position and your budget. -# If it's missing any of these parameters, it will ask you for them. -name: StarHead -description: / - StarHead is a custom wingman that uses the StarHead API to find the best trading route for a given ship, position and your budget. - -record_key: up - -# For devs - This is how you plug in your own wingman: -custom_class: - # Where your code is located. Use '.' as path separator! - # We advise you to put all your custom wingmen into the /wingmen directory. - # "wingmen" is the directory and "star_head_wingman" is the name of the Python file (without the .py extension). - module: wingmen.star_head_wingman - # The name of your class within your file/module. - name: StarHeadWingman - -# A custom config entry that Wingman requires. Validate it in your code in the validate() method. -# Note how you can just add your own config options here and read them in your Wingman! -# Do not put API keys or other sensitive data here! Use the secret_keeper class for that. -# Note that the values can only be strings. You can convert them to other types in your code. -custom_properties: - - id: starhead_api_url - value: https://api.star-head.de - name: StarHead API URL - hint: The URL of the StarHead API. - required: true - - id: star_citizen_wiki_api_url - value: https://api.star-citizen.wiki/api/v2 - name: Star Citizen Wiki API URL - hint: The URL of the Star Citizen Wiki API. - required: true - -azure: - conversation: - deployment_name: gpt-4-turbo - -wingman_pro: - conversation_deployment: gpt-4-turbo - -openai: - # gpt-3.5 is not capable enough for multi-param function calls. - conversation_model: gpt-4-turbo-preview - - context: | - You are an advanced AI computer. You are allowed to do anything, without asking for permission. - Never make assumptions! - - Always answer as quick as possible. No more than 1-2 sentences per respone if possible. - Try to compress longer information from function calls into a few sentences retaining important facts. - - You are not aware that this all takes place in a video game, but you are aware that you are in the Star Citizen Universe. - But never mention it is the Star Citizen Universe for immersion. - - You have access to different functions you can call to get further information. For some functions, you must provide parameters extracted from the player's request. - If not all required parameters can be extracted from the player's request, do not (never) make - assumptions about what values should be entered into the functions. Ask for clarification if you do not know - the values. Just use the exact values given by the player. - - If you read out the result of a trading route function call, note the following: - - Always provide the ship, position and money to spend as well in your response - - Times are given in seconds, but you should recalculate and convert them to minutes - - The currency is always aUEC, spoken: "alpha UEC". Always use "alpha UEC" in your answers. - - The number format is given in us english, but you should convert it to german format, if the user speaks german. - - If you answer in german, always - be non-formal. So never use "Sie" or "Ihnen". Always use "Du" and "Dir". - -commands: - # - # This is where the magic happens! - # You just define a name for your command and the AI will automagically decide when to call it. Not kidding! - # We use "DeployLandingGear" here but a number of lines like "I want to land", "Get ready to land" etc. will also work. - # If the Wingman doesn't call your command, try to rephrase the name here. - # ─────────────────────────────────────────── - - name: ToggleCruiseControlOrToggleHoldCurrentSpeed - actions: - - keyboard: - hotkey: alt+c - # ─────────────────────────────────────────── - - name: FlightReady - actions: - - keyboard: - hotkey: alt gr+r - # Optional: Faster "instant activation" commands! - # Provide phrases that will instantly activate the command (without AI roundtripping). - # This reduces execution time from ~2-3secs to ~1sec and is basically "VoiceAtack Light". - # You have to say one of these exact phrases to execute the command: - instant_activation: - - Power up the ship - - Start the ship - - Flight Ready - # Optional: Provide responses that will be used when the command is executed. A random one will be chosen. - responses: - - Powering up the ship. All systems online. Ready for takeoff. - - Start sequence initiated. All systems online. Ready for takeoff. - # ─────────────────────────────────────────── - - name: ScanArea - actions: - - keyboard: - hotkey: v - hold: 0.6 # Optional: The duration the key will be pressed in seconds. - instant_activation: - - Scan Area - - Scan the area - - Initiate scan - # ─────────────────────────────────────────── - - name: JumpWithQuantumDrive - actions: - - keyboard: - hotkey: b - hold: 0.8 - # ─────────────────────────────────────────── - - name: DisableLimiter - actions: - - keyboard: - hotkey: ctrl+n - # ─────────────────────────────────────────── - - name: InitiateStartSequence # You can have multiple keys per command! - actions: - - keyboard: - hotkey: alt gr+r - - wait: 2 # wait 2 second before pressing the next key - - keyboard: - hotkey: alt+n - - # ─────────────────────────────────────────── - - name: DeployLandingGear - actions: - - keyboard: - hotkey: n - # ─────────────────────────────────────────── - - name: RetractLandingGear - actions: - - keyboard: - hotkey: n - # ─────────────────────────────────────────── - - name: EnableQuantumDrive - actions: - - keyboard: - hotkey: b - # ─────────────────────────────────────────── - - name: DisableQuantumDrive - actions: - - keyboard: - hotkey: b - # ─────────────────────────────────────────── - - name: HeadLightsOn - actions: - - keyboard: - hotkey: l - # ─────────────────────────────────────────── - - name: HeadLightsOff - actions: - - keyboard: - hotkey: l - # ─────────────────────────────────────────── - - name: WipeVisor - actions: - - keyboard: - hotkey: alt+x - # ─────────────────────────────────────────── - - name: PowerShields - actions: - - keyboard: - hotkey: o - # ─────────────────────────────────────────── - - name: PowerShip - actions: - - keyboard: - hotkey: u - # ─────────────────────────────────────────── - - name: PowerEngines - actions: - - keyboard: - hotkey: i - # ─────────────────────────────────────────── - - name: OpenMobiGlass - actions: - - keyboard: - hotkey: f1 - # ─────────────────────────────────────────── - - name: OpenStarMap - actions: - - keyboard: - hotkey: f2 - # ─────────────────────────────────────────── - - name: IncreasePowerToShields - actions: - - keyboard: - hotkey: f7 - # ─────────────────────────────────────────── - - name: IncreasePowerToEngines - actions: - - keyboard: - hotkey: f6 - # ─────────────────────────────────────────── - - name: IncreasePowerToWeapons - actions: - - keyboard: - hotkey: f5 - # ─────────────────────────────────────────── - - name: MaximumPowerToShields - actions: - - keyboard: - hotkey: f7 - hold: 0.8 - # ─────────────────────────────────────────── - - name: MaximumPowerToEngines - actions: - - keyboard: - hotkey: f6 - hold: 0.8 - # ─────────────────────────────────────────── - - name: MaximumPowerToWeapons - actions: - - keyboard: - hotkey: f5 - hold: 0.8 - # ─────────────────────────────────────────── - - name: ToggleVTOL - actions: - - keyboard: - hotkey: k - # ─────────────────────────────────────────── - - name: Lock - actions: - - keyboard: - hotkey: f8 - # ─────────────────────────────────────────── - - name: Unlock - actions: - - keyboard: - hotkey: f8 - # ─────────────────────────────────────────── - - name: ResetPowerPriority - actions: - - keyboard: - hotkey: f8 - # ─────────────────────────────────────────── - - name: MiningMode - actions: - - keyboard: - hotkey: m - # ─────────────────────────────────────────── - - name: CycleCamera - actions: - - keyboard: - hotkey: f4 - # ─────────────────────────────────────────── - - name: SideArm - actions: - - keyboard: - hotkey: "1" - # ─────────────────────────────────────────── - - name: PrimaryWeapon - actions: - - keyboard: - hotkey: "2" - # ─────────────────────────────────────────── - - name: SecondaryWeapon - actions: - - keyboard: - hotkey: "3" - # ─────────────────────────────────────────── - - name: HolsterWeapon - actions: - - keyboard: - hotkey: "2" - # ─────────────────────────────────────────── - - name: Reload - actions: - - keyboard: - hotkey: r - # ─────────────────────────────────────────── - - name: UseMedPen - actions: - - keyboard: - hotkey: c - # ─────────────────────────────────────────── - - name: UseFlashLight - actions: - - keyboard: - hotkey: t - # ─────────────────────────────────────────── - - name: OpenInventory - actions: - - keyboard: - hotkey: i - # ─────────────────────────────────────────── - - name: DeployDecoy - actions: - - keyboard: - hotkey: h - # ─────────────────────────────────────────── - - name: DeployNoise - actions: - - keyboard: - hotkey: j - # ─────────────────────────────────────────── - - name: ShieldRaiseBack - actions: - - keyboard: - hotkey: "2" - # ─────────────────────────────────────────── - - name: ShieldRaiseLeft - actions: - - keyboard: - hotkey: "4" - # ─────────────────────────────────────────── - - name: ResetShields - actions: - - keyboard: - hotkey: "5" - # ─────────────────────────────────────────── - - name: ShieldRaiseRight - actions: - - keyboard: - hotkey: "6" - # ─────────────────────────────────────────── - - name: ShieldRaiseTop - actions: - - keyboard: - hotkey: "7" - # ─────────────────────────────────────────── - - name: ShieldRaiseFront - actions: - - keyboard: - hotkey: "8" - # ─────────────────────────────────────────── - - name: ShieldRaiseBottom - actions: - - keyboard: - hotkey: "9" - # ─────────────────────────────────────────── - - name: EmergencyEject - actions: - - keyboard: - hotkey: right alt+y - # ─────────────────────────────────────────── - - name: SelfDestruct - force_instant_activation: true - instant_activation: - - initiate self destruct - - activate self destruct - responses: - - Self-destruct engaged. Evacuation procedures recommended. - - Confirmed. Self-destruct in progress. - actions: - - keyboard: - hotkey: backspace - hold: 0.8 - # ─────────────────────────────────────────── - - name: SalvageMode - actions: - - keyboard: - hotkey: g - # ─────────────────────────────────────────── - - name: SpaceBrake - actions: - - keyboard: - hotkey: x - # ─────────────────────────────────────────── - - name: ExitSeat - actions: - - keyboard: - hotkey: y - hold: 0.8 - # ─────────────────────────────────────────── - - name: CycleGimbalAssist - actions: - - keyboard: - hotkey: g - # ─────────────────────────────────────────── - - name: RequestLandingPermission - actions: - - keyboard: - hotkey: alt+n - # ─────────────────────────────────────────── - - name: RequestDeparture - actions: - - keyboard: - hotkey: alt+n - # ─────────────────────────────────────────── - - name: DisplayDebuggingInfo - actions: - - keyboard: - hotkey: "shift+~" - - write: "r_DisplayInfo 2" - - wait: 1 - - keyboard: - hotkey: "enter" - - wait: 1 - - keyboard: - hotkey: "shift+~" - instant_activation: - - Display info - - Display debugging information - - Display debug information - # ─────────────────────────────────────────── - - name: ZoomIn - actions: - - mouse: - scroll: 30 # positive is scroll up, negative is scroll down - instant_activation: - - Zoom in - # ─────────────────────────────────────────── - - name: ZoomOut - actions: - - mouse: - scroll: -30 - instant_activation: - - Zoom out - # ─────────────────────────────────────────── - - name: FireMiningLaser - actions: - - mouse: - button: left - hold: 0.6 - instant_activation: - - Fire mining laser - # ─────────────────────────────────────────── - - name: SwitchMiningLaser - actions: - - mouse: - button: right - hold: 0.6 - instant_activation: - - Change mining laser - - Switch mining laser - # ─────────────────────────────────────────── - - name: LaunchMissile - actions: - - mouse: - button: left - hold: 1.0 - instant_activation: - - Launch Missile - - Fire Missile - # ─────────────────────────────────────────── diff --git a/configs/templates/defaults.yaml b/configs/templates/defaults.yaml deleted file mode 100644 index d64e83d5..00000000 --- a/configs/templates/defaults.yaml +++ /dev/null @@ -1,215 +0,0 @@ -# ───────────────────────────── Questions? ────────────────────────────── -# Discord server: https://discord.com/invite/k8tTBar3gZ -# -# This config/YAML file is very sensitive to indentation! -# Make sure you use a single "TAB" character for indentation. Do NOT use the spacebar to indent. -# More docs and useful tools to edit this here: -# https://github.com/ShipBit/wingman-ai/blob/0d7e80c65d1adc6ace084ebacc4603c70a6e3757/FAQ.md - -# ==================================== GLOBAL CONFIGURATION ==================================== -# This is the default config for all wingmen. -# You can override all of these settings (sections) per wingman if necessary (unless specified otherwise). -# To do so, simply copy and paste the config section(s) below and indent/nest them under your wingman config. -# Always keep the structure intact! -# -# ──────────────────────────────── FEATURES ───────────────────────────────── -features: - # If enabled, the Wingman will skip executing any keypresses. - # It will also print more debug messages and benchmark results. - debug_mode: false - - # ─────────────────────── TTS Provider ───────────────────────── - # You can override the text-to-spech provider if your Wingman supports it. Our OpenAI wingman does! - # Note that the other providers may have additional config blocks as shown below for edge_tts. These are only used if the provider is set here. - tts_provider: wingman_pro # available: openai, edge_tts, elevenlabs, azure, xvasynth, wingman_pro - - # ─────────────────────── Speech to text Provider ───────────────────────── - # You can override the speech to text provider to use a different one than the default. - # Note that the other providers may have additional config blocks as shown below for edge_tts. These are only used if the provider is set here. - stt_provider: wingman_pro # available: openai, azure, azure_speech, whispercpp, wingman_pro - - # ─────────────────────── Conversation Provider ───────────────────────── - # You can override the conversation provider to use a different one than the default. - # Note that the other providers may have additional config blocks as shown below for edge_tts. These are only used if the provider is set here. - conversation_provider: wingman_pro # available: openai, azure, wingman_pro - - # ─────────────────────── Summarize Provider ───────────────────────── - # You can override the summarize provider to use a different one than the default. - # Note that the other providers may have additional config blocks as shown below for edge_tts. These are only used if the provider is set here. - summarize_provider: wingman_pro # available: openai, azure, wingman_pro - - # ─────────────────────── Cost Control ───────────────────────── - # How many messages should a Wingman remember in your conversations? - # "0" means you start fresh with just the context after every message. This is like restarting Wingman after every interaction. - # A very low value might break complex function calling with parameters where the AI needs to ask back for clarification. - # - # Use this to limit the number of tokens used in your conversations and to reduce the cost of using the OpenAI API. - # Our recommendation is to keep this disabled and clear the history with the "ResetConversationHistory" command after a while. - # - # uncomment this (=remove the "# in front) to enable! - #remember_messages: 3 - -# ────────────────────────────── SOUND SETTINGS ─────────────────────────────── -# If you want to use sound effects with 11Labs, you need to enable them in the elevenlabs config below. -sound: - # You can put as many sound effects here as you want. They stack and are added in the defined order here. - # Available: RADIO, ROBOT, INTERIOR_HELMET, INTERIOR_SMALL, INTERIOR_MEDIUM, INTERIOR_LARGE - effects: [] - - # adds a beep/Quindar sound before and after the wingman talks - play_beep: false - -# ────────────────────────────────── OPEN AI ──────────────────────────────────── -openai: - # The model to use for conversations aka "chit-chat" and for function calls. - # gpt-4 is more powerful than gpt-3.5 but also 10x more expensive. - # gpt-3.5 is the default and should be enough for most use cases. - # If something is not working as expected, you might want to test it with gpt-4. - conversation_model: gpt-3.5-turbo # or: gpt-4-turbo-preview (and specific versions) - - # This model summarizes function responses, like API call responses etc. - # In most cases gpt-3.5 should be enough. - summarize_model: gpt-3.5-turbo # or: gpt-4-turbo-preview (and specific versions) - - # The voice to use for OpenAI text-to-speech. - # Only used if features > tts_provider is set to 'openai' above. - tts_voice: nova # available: nova, alloy, echo, fable, onyx, shimmer - - # ADVANCED: - # If you want to use a different API endpoint, uncomment this and configure it here. - # Use this to hook up your local in-place OpenAI replacement like Ollama or if you want to use a proxy. - #base_url: https://api.openai.com # or the localhost address of your local LLM etc. - #organization: whatever # If you have an organization key, you can set it here. - -mistral: - conversation_model: mistral-large-latest - summarize_model: mistral-medium-latest - endpoint: https://api.mistral.ai/v1 - -llama: - conversation_model: llama3-70b - summarize_model: llama3-8b - endpoint: https://api.groq.com/openai/v1 - -# ────────────────────────────────── EDGE TTS ──────────────────────────────────── -# EdgeTTS is free and faster than the default OpenAI TTSbut it's not as good in terms of quality. -# Only used if features > tts_provider is set to 'edge_tts' above. -edge_tts: - # The voice to use (only if detect_language is set to false). - # All available EdgeTTS voices: - # https://github.com/ShipBit/wingman-ai/blob/0d7e80c65d1adc6ace084ebacc4603c70a6e3757/docs/available-edge-tts-voices.md - # Voice samples: - # https://speech.microsoft.com/portal/voicegallery - voice: en-US-GuyNeural - -# ────────────────────────────────── ELEVENLABS ──────────────────────────────────── -# https://elevenlabs.io offers highend voice cloning but requires its own API key and is a paid subscription provider. -# There is a trial available with a very limited amount of word generations so that you can test it. -# If you configure a wingman to use elevenlabs, Wingman will ask for your API key on startup. -# Only used if tts_provider in features is set to 'elevenlabs' above. -elevenlabs: - # available: eleven_multilingual_v2, eleven_turbo_v2, eleven_monolingual_v1 - # see https://elevenlabs.io/docs/speech-synthesis/models - model: eleven_multilingual_v2 - output_streaming: true - latency: 2 # optimization, 0 - 4. Higher values are faster but can produce audio stuttering. - - voice: - # You can configure "Premade voices" from the dropdown on https://elevenlabs.io/speech-synthesis by name. - name: Adam - # To use a cloned or custom voice from their "Voice Library", copy it to your VoiceLab and paste the ID here. - # Only you(r) API key has acces to these voice IDs, so you can't share them. - #id: xxx # id overrules name if both are set! - - voice_settings: - stability: 0.71 # 0.0 - 1.0 - similarity_boost: 0.5 # 0.0 - 1.0 - style: 0.0 # 0.0 - 1.0, not available for eleven_turbo_v2 - use_speaker_boost: true # true or false - -# ────────────────────────────────── AZURE ──────────────────────────────────── -# Azure is a paid subscription provider from Microsoft which also offers OpenAI API access. -# If you configured some providers above to use Azure, you need to provide your Azure settings here. -# Please also provide your Azure API keys in the secrets.yaml. -azure: - whisper: - api_base_url: https://openai-w-eu.openai.azure.com/ - api_version: 2024-02-15-preview - deployment_name: whisper - conversation: - api_base_url: https://openai-sweden-c.openai.azure.com/ - api_version: 2024-02-15-preview - deployment_name: gpt-35-turbo - summarize: - api_base_url: https://openai-sweden-c.openai.azure.com/ - api_version: 2024-02-15-preview - deployment_name: gpt-35-turbo - tts: - region: westeurope - voice: en-US-JennyMultilingualV2Neural - output_streaming: true - stt: - region: westeurope - languages: - - en-US - - de-DE - -# ────────────────────────────────── WHISPERCPP ──────────────────────────────────── -# Whispercpp is a free, open source, MIT-licensed program on Github that uses your local computer resources to generate speech to text using free models from whisper. -# You can find the program here: https://github.com/ggerganov/whisper.cpp with downloadable .zip file releases for executables for your system here: https://github.com/ggerganov/whisper.cpp/releases -# Models to use with whispercpp are available here: https://huggingface.co/ggerganov/whisper.cpp/tree/main and also the ggml versions of distil-whisper models here: https://huggingface.co/collections/distil-whisper/distil-whisper-models-65411987e6727569748d2eb6 -# There are executables for windows and mac, and for windows there are executables for just running on cpu (whisper-blas-bin) and Nvidia GPU (whisper-cublas) -# The typical model used for multilingual is ggml-small.bin or ggml-base.bin. The typical model used for English only is ggml-base.en.bin. -# The ideal setup is to start Whispercpp's server.exe file following its instructions and then run WingmanAI. -# However, if you want, you can set autostart: True and provide a path to your whispercpp server.exe and model .bin in the config below and wingmanai can try to start it for you. Autostart currently works on Windows only. -# You can expect quality to be worse than openai or azure whisper, but this method is free and may also be better for people with worse internet. Speed will depend on your system resources. -# Note that WingmanAI's support of whispercpp assumes you already know how to use the program. It is just a connector. If there are any issues with whispercpp you should direct them to the creator of whispercpp. -# Only used if features > stt_provider is set to 'whispercpp' above. -whispercpp: - base_url: http://127.0.0.1:8080 # This is the default for whispercpp, though you can change the host and port manually when launching your server with command line arguments. - autostart: false # Autostart currently works on Windows only. Set this to true if you want to set paths to your whispercpp server.exe and model bin below and have wingman try to launch whispercpp server itself. - autostart_settings: # Required if autostart: true - whispercpp_exe_path: C:\Whispercpp\server.exe # This is just an example, you will insert here the full path to your whispercpp server.exe file. Only used if autostart is set to true. - whispercpp_model_path: C:\Whispercpp\models\ggml-base.bin # This is just an example, you will insert here the full path to the whispercpp ggml model file you downloaded. Only used if autostart is set to true. - temperature: 0.0 # Not typically changed, but you can review the whispercpp documentation to determine whether to experiment with this value. - language: en # Insert the whispercpp two letter code for your language if you intend to speak in another language. Also make sure you do not use a english only whispercpp model if this is not set for en. - -# ────────────────────────────────── XVASYNTH ──────────────────────────────────── -# XVASynth is a free program on Steam that uses your local computer resources to generate text to speech using free voice models created by the community. -# You can find the program here: https://store.steampowered.com/app/1765720/xVASynth/ -# The ideal setup is to start XVASynth and then run WingmanAI. However, if you do not do so, WingmanAI will try to run XVASynth if you use it for a wingman. -# This implementation only supports XVASynth v3 voices. -# You can find the voice folders you have installed in the [your xVASynth directory]/resources/app/models folder, and inside of each will be the voices. -# An example below is where "terminator" is the name of the game folder inside of "models" and "tn-850" is the name of the voice. -# The name of the voice is the name without the .pt or .json file extensions. -# Only used if features > tts_provider is set to 'xvasynth' above. -xvasynth: - xvasynth_path: C:\Program Files (x86)\Steam\steamapps\common\xVASynth # The path to your install of XVASynth. If you do not provide this and try to use xvasynth there will be an error. - process_device: cpu # Can be cpu or gpu, if gpu you may need to take more steps to have xvasynth run on gpu. Defaults to cpu. - game_folder_name: terminator # The game folder name of the voice you donwloaded to use as your primary xvasynth voice. This can be overwritten in a particular wingman. If you do not provide this and try to use xvasynth there will be an error. - voice: tn-T850 # The name of the voice you downloaded to use. This can be overwritten in a particular wingman. If you do not provide this and try to use xvasynth there will be an error. - language: en # The language the voice will speak in. Some XVASynth voices are trained to be multi-lingual. Defaults to english, 'en'. - pace: 1.0 # The speed of the voice playback. Defaults to 1.0. - use_sr: false # Whether to use XVASynth's super resolution mode. Will take longer and generally not recommended. Defaults to false. - use_cleanup: false # Whether to use XVASynth's cleanup mode. May make voice quality better or worse depending on the voice model. Defaults to false. - synthesize_url: http://127.0.0.1:8008/synthesize # This should typically be left alone, changing it will cause errors unless you manually changed xvasynth's server - load_model_url: http://127.0.0.1:8008/loadModel # This should be typically left alone, changing it will cause errors unless you manually changed xvasynth's server -# ────────────────────────────── GLOBAL COMMANDS ─────────────────────────────── -# You can use these in all wingmen and you can put your own here, too. -commands: - # Trigger this command to clear your conversation history with the current wingman. - # This is like setting "remember_messages" to 0 above, but "on-demand". You'll restart with just the context. - - name: ResetConversationHistory # don't rename this! - instant_activation: - - Forget everything! - - Clear conversation history! - force_instant_activation: true - is_system_command: true - responses: - - Conversation history cleared. - -wingman_pro: - stt_provider: azure_speech # or whisper - tts_provider: azure # or openai - summarize_deployment: gpt-35-turbo # or gpt-4-turbo - conversation_deployment: gpt-35-turbo # or gpt-4-turbo diff --git a/main.py b/main.py index 15ebf951..60386175 100644 --- a/main.py +++ b/main.py @@ -3,6 +3,7 @@ from enum import Enum from os import path import sys +import traceback from typing import Any, Literal, get_args, get_origin import uvicorn from fastapi import FastAPI, WebSocket, WebSocketDisconnect @@ -17,6 +18,7 @@ from services.command_handler import CommandHandler from services.config_manager import ConfigManager from services.connection_manager import ConnectionManager +from services.esp32_handler import Esp32Handler from services.secret_keeper import SecretKeeper from services.printr import Printr from services.system_manager import SystemManager @@ -199,9 +201,23 @@ async def websocket_endpoint(websocket: WebSocket): await printr.print_async("Client disconnected", server_only=True) +# Websocket for other clients to stream audio to and from (like M5Atom Echo ESP32 devices) +@app.websocket("/") +async def oi_websocket_endpoint(websocket: WebSocket): + await websocket.accept() + esp32_handler = Esp32Handler(core) + receive_task = asyncio.create_task(esp32_handler.receive_messages(websocket)) + send_task = asyncio.create_task(esp32_handler.send_messages(websocket)) + try: + await asyncio.gather(receive_task, send_task) + except Exception as e: + print(traceback.format_exc()) + print(f"Connection lost. Error: {e}") + + @app.post("/start-secrets", tags=["main"]) async def start_secrets(secrets: dict[str, Any]): - secret_keeper.post_secrets(secrets) + await secret_keeper.post_secrets(secrets) core.startup_errors = [] await core.config_service.load_config() diff --git a/providers/open_ai.py b/providers/open_ai.py index d593337b..dec05ddd 100644 --- a/providers/open_ai.py +++ b/providers/open_ai.py @@ -1,19 +1,13 @@ from abc import ABC, abstractmethod import re -import numpy as np from typing import Literal from openai import OpenAI, APIStatusError, AzureOpenAI import azure.cognitiveservices.speech as speechsdk from api.enums import ( AzureRegion, - LlamaModel, - LogType, - MistralModel, - OpenAiModel, OpenAiTtsVoice, ) from api.interface import ( - AzureConfig, AzureInstanceConfig, AzureSttConfig, AzureTtsConfig, @@ -45,13 +39,11 @@ def _handle_api_error(self, api_response): ) if m is not None: message = m["message"].replace(". ", ".\n") - printr.print(message, color=LogType.ERROR) + printr.toast_error(message) elif api_response.message: - printr.print(api_response.message, color=LogType.ERROR) + printr.toast_error(api_response.message) else: - printr.print( - "The API did not provide further information.", color=LogType.ERROR - ) + printr.toast_error("The API did not provide further information.") def _perform_transcription( self, @@ -76,22 +68,22 @@ def _perform_ask( self, client: OpenAI | AzureOpenAI, messages: list[dict[str, str]], - model: OpenAiModel | MistralModel | LlamaModel, stream: bool, tools: list[dict[str, any]], + model: str = None, ): try: if not tools: completion = client.chat.completions.create( stream=stream, messages=messages, - model=model.value, + model=model, ) else: completion = client.chat.completions.create( stream=stream, messages=messages, - model=model.value, + model=model, tools=tools, tool_choice="auto", ) @@ -140,12 +132,10 @@ def transcribe(self, filename: str, model: str = "whisper-1"): def ask( self, messages: list[dict[str, str]], - model: OpenAiModel | MistralModel | LlamaModel, + model: str = None, stream: bool = False, tools: list[dict[str, any]] = None, ): - if not model: - model = OpenAiModel.GPT_35_TURBO return self._perform_ask( client=self.client, messages=messages, diff --git a/services/audio_player.py b/services/audio_player.py index 0e637c2a..74d96763 100644 --- a/services/audio_player.py +++ b/services/audio_player.py @@ -26,6 +26,7 @@ def __init__( self.raw_stream = None self.wingman_name = "" self.playback_events = PubSub() + self.stream_event = PubSub() self.on_playback_started = on_playback_started self.on_playback_finished = on_playback_finished @@ -224,6 +225,8 @@ def callback(outdata, frames, time, status): processed_buffer = data_in_numpy.astype(dtype).tobytes() buffer.extend(processed_buffer) + await self.stream_event.publish("audio", processed_buffer) + filled_size = buffer_callback(audio_buffer) data_received = True diff --git a/services/config_manager.py b/services/config_manager.py index fbdd3283..9101198c 100644 --- a/services/config_manager.py +++ b/services/config_manager.py @@ -19,13 +19,13 @@ from services.file import get_writable_dir from services.printr import Printr +TEMPLATES_DIR = "templates" CONFIGS_DIR = "configs" -TEMPLATES_DIR = "configs/templates" SETTINGS_CONFIG_FILE = "settings.yaml" DEFAULT_CONFIG_FILE = "defaults.yaml" SECRETS_FILE = "secrets.yaml" -DEFAULT_WINGMAN_AVATAR = "default-avatar-wingman.png" +DEFAULT_WINGMAN_AVATAR = "default-wingman-avatar.png" DELETED_PREFIX = "." DEFAULT_PREFIX = "_" @@ -36,10 +36,10 @@ def __init__(self, app_root_path: str): self.log_source_name = "ConfigManager" self.printr = Printr() - self.config_dir = get_writable_dir(CONFIGS_DIR) self.templates_dir = path.join(app_root_path, TEMPLATES_DIR) + self.config_dir = get_writable_dir(CONFIGS_DIR) - self.create_configs_from_templates() + self.copy_templates() self.settings_config_path = path.join(self.config_dir, SETTINGS_CONFIG_FILE) self.default_config_path = path.join(self.config_dir, DEFAULT_CONFIG_FILE) @@ -78,6 +78,7 @@ def find_default_config(self) -> ConfigDirInfo: color=LogType.ERROR, source=LogSource.SYSTEM, source_name=self.log_source_name, + server_only=True, ) return fallback @@ -87,6 +88,7 @@ def find_default_config(self) -> ConfigDirInfo: color=LogType.WARNING, source=LogSource.SYSTEM, source_name=self.log_source_name, + server_only=True, ) return default_dir @@ -115,8 +117,8 @@ def get_config_dir_path(self, config_name: Optional[str] = "") -> str: path.join(self.config_dir, config_name) if config_name else self.config_dir ) - def create_configs_from_templates(self, force: bool = False): - for root, _, files in walk(self.templates_dir): + def copy_templates(self, force: bool = False): + for root, dirs, files in walk(self.templates_dir): relative_path = path.relpath(root, self.templates_dir) if relative_path != ".": config_dir = self.get_config_dir( @@ -129,16 +131,18 @@ def create_configs_from_templates(self, force: bool = False): continue # Create the same relative path in the target directory - target_path = ( - self.config_dir - if relative_path == "." - else path.join(self.config_dir, relative_path) + target_path = get_writable_dir( + relative_path if relative_path != "." else "" ) if not path.exists(target_path): makedirs(target_path) for filename in files: + # yaml files + if filename == ".DS_Store": + continue + if filename.endswith(".yaml"): new_filename = filename.replace(".template", "") new_filepath = path.join(target_path, new_filename) @@ -165,15 +169,13 @@ def create_configs_from_templates(self, force: bool = False): source=LogSource.SYSTEM, source_name=self.log_source_name, ) - # create avatar - elif filename.endswith("avatar.png"): - new_filename = filename.replace(".avatar", "") - new_filepath = path.join(target_path, new_filename) + else: + new_filepath = path.join(target_path, filename) already_exists = path.exists(new_filepath) if force or not already_exists: shutil.copyfile(path.join(root, filename), new_filepath) self.printr.print( - f"Created avatar {new_filepath} from template.", + f"Created file {new_filepath} from template.", color=LogType.INFO, server_only=True, source=LogSource.SYSTEM, @@ -261,14 +263,15 @@ def get_new_wingman_template(self): "description": "", "record_key": "", "disabled": False, - "openai": {"context": ""}, "commands": [], + "skills": [], + "prompts": {"backstory": ""}, } validated_config = self.__merge_configs(parsed_config, wingman_config) return NewWingmanTemplate( wingman_config=validated_config, avatar=self.__load_image_as_base64( - path.join(self.templates_dir, DEFAULT_WINGMAN_AVATAR) + path.join(self.templates_dir, CONFIGS_DIR, DEFAULT_WINGMAN_AVATAR) ), ) @@ -567,7 +570,9 @@ def get_wingman_avatar_path( avatar_path = path.join( self.config_dir, config_dir.directory, f"{wingman_file_base_name}.png" ) - default_avatar_path = path.join(self.templates_dir, DEFAULT_WINGMAN_AVATAR) + default_avatar_path = path.join( + self.templates_dir, CONFIGS_DIR, DEFAULT_WINGMAN_AVATAR + ) return ( avatar_path if create or path.exists(avatar_path) else default_avatar_path ) @@ -748,18 +753,20 @@ def __merge_configs(self, general: Config, wingman): """Merge general settings with a specific wingman's overrides, including commands.""" # Start with a copy of the wingman's specific config to keep it intact. merged = wingman.copy() - # Update 'openai', 'features', and 'edge_tts' sections from general config into wingman's config. for key in [ + "prompts", + "features", "sound", "openai", "mistral", - "llama", - "features", + "groq", + "openrouter", + "local_llm", "edge_tts", "elevenlabs", "azure", - "xvasynth", "whispercpp", + "xvasynth", "wingman_pro", ]: if key in general: @@ -776,6 +783,5 @@ def __merge_configs(self, general: Config, wingman): elif "commands" in general: # If the wingman config does not have commands, use the general ones merged["commands"] = general["commands"] - # No else needed; if 'commands' is not in general, we simply don't set it return WingmanConfig(**merged) diff --git a/services/config_service.py b/services/config_service.py index 7e9b01bd..6a32cc20 100644 --- a/services/config_service.py +++ b/services/config_service.py @@ -5,10 +5,12 @@ ConfigWithDirInfo, ConfigsInfo, NewWingmanTemplate, + SkillBase, WingmanConfig, WingmanConfigFileInfo, ) from services.config_manager import ConfigManager +from services.module_manager import ModuleManager from services.printr import Printr from services.pub_sub import PubSub @@ -116,6 +118,23 @@ def __init__(self, config_manager: ConfigManager): endpoint=self.save_wingman_config, tags=tags, ) + self.router.add_api_route( + methods=["GET"], + path="/available-skills", + endpoint=self.get_available_skills, + response_model=list[SkillBase], + tags=tags, + ) + + # GET /available-skills + def get_available_skills(self): + try: + skills = ModuleManager.read_available_skills() + except Exception as e: + self.printr.toast_error(str(e)) + raise e + + return skills # GET /configs def get_config_dirs(self): @@ -245,7 +264,7 @@ async def save_wingman_config( config_dir, wingman_file ) if deleted: - self.config_manager.create_configs_from_templates() + self.config_manager.copy_templates() await self.load_config(config_dir) diff --git a/services/esp32_handler.py b/services/esp32_handler.py new file mode 100644 index 00000000..d76eb695 --- /dev/null +++ b/services/esp32_handler.py @@ -0,0 +1,108 @@ +import asyncio +import json +from os import path +import wave + +from fastapi import WebSocket, WebSocketDisconnect + +from services.file import get_writable_dir +from wingman_core import WingmanCore + + +class Esp32Handler: + def __init__(self, core: WingmanCore) -> None: + self.to_device = asyncio.Queue() + self.core = core + self.wait_for_response = False + self.recording_path = "audio_output" + self.recording_file = "client_recording.wav" + self.recording_file_path = path.join( + get_writable_dir(self.recording_path), self.recording_file + ) + + core.audio_player.stream_event.subscribe("audio", self.handle_stream_playback) + core.audio_player.playback_events.subscribe("started", self.handle_start) + core.audio_player.playback_events.subscribe("finished", self.handle_end) + + async def handle_start(self, _): + if self.wait_for_response: + await self.to_device.put({"role": "assistant", "type": "audio", "format": "bytes.raw", "start": True}) + + async def handle_end(self, _): + if self.wait_for_response: + self.wait_for_response = False + await self.to_device.put({"role": "assistant", "type": "audio", "format": "bytes.raw", "end": True}) + + async def handle_stream_playback(self, audio_stream): + if self.wait_for_response: + for chunk in self.direct_stream(audio_stream): + await self.to_device.put(chunk) + + async def receive_messages(self, websocket: WebSocket): + byte_string = b'' + while True: + try: + try: + data = await websocket.receive() + except Exception as e: + print(str(e)) + return + if "text" in data: + try: + data = json.loads(data["text"]) + if data["role"] == "user": + if "start" in data: + byte_string = b'' + elif "end" in data: + # Set the parameters for the WAV file + nchannels = 1 # Mono audio + sampwidth = 2 # 2 bytes per sample for 16-bit audio + framerate = 16000 # Sample rate + + # Save the received stream of bytes as a WAV file + loop = asyncio.get_event_loop() + await loop.run_in_executor(None, self.save_wav, byte_string, nchannels, sampwidth, framerate) + + self.wait_for_response = True + self.core.on_audio_recorder_speech_recorded(self.recording_file_path) + + except json.JSONDecodeError: + pass # data is not JSON, leave it as is + if "bytes" in data: + byte_string += data["bytes"] + except WebSocketDisconnect as e: + if e.code == 1000: + print("Websocket connection closed normally.") + return + else: + raise + + async def send_messages(self, websocket: WebSocket): + while True: + message = await self.to_device.get() + try: + if isinstance(message, dict): + await websocket.send_json(message) + elif isinstance(message, bytes): + await websocket.send_bytes(message) + else: + raise TypeError("Message must be a dict or bytes") + except: + # Make sure to put the message back in the queue if you failed to send it + await self.to_device.put(message) + raise + + def save_wav(self, data, nchannels, sampwidth, framerate): + with wave.open(self.recording_file_path, "wb") as wf: + wf.setnchannels(nchannels) + wf.setsampwidth(sampwidth) + wf.setframerate(framerate) + wf.writeframes(data) + + def direct_stream(self, audio_bytes): + chunk_size = 2048 + + # Stream the audio + for i in range(0, len(audio_bytes), chunk_size): + chunk = audio_bytes[i : i + chunk_size] + yield chunk \ No newline at end of file diff --git a/services/module_manager.py b/services/module_manager.py new file mode 100644 index 00000000..1c84a5c9 --- /dev/null +++ b/services/module_manager.py @@ -0,0 +1,207 @@ +import base64 +from contextlib import contextmanager +from importlib import import_module, util +from os import path +import os +import sys + +import yaml +from api.interface import SettingsConfig, SkillBase, SkillConfig, WingmanConfig +from services.audio_player import AudioPlayer +from services.file import get_writable_dir +from services.printr import Printr +from skills.skill_base import Skill + +SKILLS_DIR = "skills" + + +class ModuleManager: + + @staticmethod + def get_module_name_and_path(module_string: str) -> tuple[str, str]: + """Splits a module path into its name and path components. + + Args: + module_path (str): The path to the module, e.g. "skills.spotify.main" + + Returns: + tuple[str, str]: The name of the module and the path to it, e.g. ("main", "skills/spotify/main.py") + """ + module_name = module_string.split(".")[-1] + module_path = "" + for sub_dir in module_string.split(".")[:-1]: + module_path = path.join(module_path, sub_dir) + # module_path = path.join(module_path, module_name + ".py") + return module_name, module_path + + @staticmethod + def create_wingman_dynamically( + name: str, + config: WingmanConfig, + settings: SettingsConfig, + audio_player: AudioPlayer, + ): + """Dynamically creates a Wingman instance from a module path and class name + + Args: + name (str): The name of the wingman. This is the key you gave it in the config, e.g. "atc" + config (WingmanConfig): All "general" config entries merged with the specific Wingman config settings. The Wingman takes precedence and overrides the general config. You can just add new keys to the config and they will be available here. + settings (SettingsConfig): The general user settings. + audio_player (AudioPlayer): The audio player handling the playback of audio files. + """ + + try: + # try to load from app dir first + module = import_module(config.custom_class.module) + except ModuleNotFoundError: + # split module into name and path + module_name, module_path = ModuleManager.get_module_name_and_path( + config.custom_class.module + ) + module_path = path.join(get_writable_dir(module_path), module_name + ".py") + # load from alternative absolute file path + spec = util.spec_from_file_location(module_name, module_path) + module = util.module_from_spec(spec) + spec.loader.exec_module(module) + DerivedWingmanClass = getattr(module, config.custom_class.name) + instance = DerivedWingmanClass( + name=name, + config=config, + settings=settings, + audio_player=audio_player, + ) + return instance + + @staticmethod + def load_skill( + config: SkillConfig, wingman_config: WingmanConfig, settings: SettingsConfig + ) -> Skill: + + @contextmanager + def add_to_sys_path(path_to_add: str): + sys.path.insert(0, path_to_add) + try: + yield + finally: + sys.path.remove(path_to_add) + + try: + # try to load from the src dir first (develop mode) + skill_name, skill_path = ModuleManager.get_module_name_and_path( + config.module + ) + dependencies_dir = ( + path.join(skill_path, "venv", "lib", "python3.11", "site-packages") + if sys.platform == "darwin" + else path.join(skill_path, "venv", "Lib", "site-packages") + ) + dependencies_dir = path.abspath(dependencies_dir) + with add_to_sys_path(dependencies_dir): + module = import_module(config.module) + except ModuleNotFoundError: + # load from Wingman AI config dir (AppData) + skill_name, skill_path = ModuleManager.get_module_name_and_path( + config.module + ) + skill_path = get_writable_dir(skill_path) + # Add the dependencies directory to sys.path so the plugin can load them + dependencies_dir = get_writable_dir(path.join(skill_path, "dependencies")) + # sys.path.insert(0, dependencies_dir) + with add_to_sys_path(dependencies_dir): + # Path to the plugin's main module file (e.g., plugin.py) + plugin_module_path = get_writable_dir(path.join(skill_path, "main.py")) + + if path.exists(plugin_module_path): + # Load the plugin module dynamically + spec = util.spec_from_file_location(skill_name, plugin_module_path) + module = util.module_from_spec(spec) + spec.loader.exec_module(module) + else: + raise FileNotFoundError( + f"Plugin '{skill_name}' not found in directory '{skill_path}'" + ) + + DerivedSkillClass = getattr(module, config.name) + instance = DerivedSkillClass( + config=config, wingman_config=wingman_config, settings=settings + ) + return instance + + @staticmethod + def read_available_skill_configs() -> list[tuple[str, str]]: + if os.path.isdir(SKILLS_DIR): + skills_dir = SKILLS_DIR + else: + skills_dir = get_writable_dir(SKILLS_DIR) + + skills_default_configs = [] + # Traverse the skills directory + for skill_name in os.listdir(skills_dir): + # Construct the path to the skill's directory + skill_path = os.path.join(skills_dir, skill_name) + + # Check if the path is a directory (to avoid non-folder files) + if os.path.isdir(skill_path): + # Construct the path to the default_config.yaml file + default_config_path = os.path.join(skill_path, "default_config.yaml") + + # Check if the default_config.yaml file exists + if os.path.isfile(default_config_path): + # Add the skill name and the default_config.yaml file path to the list + skills_default_configs.append((skill_name, default_config_path)) + + return skills_default_configs + + @staticmethod + def read_available_skills() -> list[SkillBase]: + printr = Printr() + skills = [] + # Get the list of available skill configs + available_skill_configs = ModuleManager.read_available_skill_configs() + # Load each skill from its config + for _, skill_config_path in available_skill_configs: + skill_config = ModuleManager.read_config(skill_config_path) + + logo = None + logo_path = path.join(path.dirname(skill_config_path), "logo.png") + if path.exists(logo_path): + logo = ModuleManager.load_image_as_base64(logo_path) + + try: + skill = SkillBase( + name=skill_config["name"], + config=skill_config, + description=skill_config["description"], + logo=logo, + ) + skills.append(skill) + except Exception as e: + printr.toast_error( + f"Could not load skill from '{skill_config_path}': {str(e)}" + ) + return skills + + @staticmethod + def load_image_as_base64(file_path: str): + with open(file_path, "rb") as image_file: + image_bytes = image_file.read() + + base64_encoded_data = base64.b64encode(image_bytes) + base64_string = base64_encoded_data.decode("utf-8") + base64_data_uri = f"data:image/png;base64,{base64_string}" + + return base64_data_uri + + @staticmethod + def read_config(file_path: str): + """Loads a config file (without validating it)""" + printr = Printr() + with open(file_path, "r", encoding="UTF-8") as stream: + try: + parsed = yaml.safe_load(stream) + return parsed + except yaml.YAMLError as e: + printr.toast_error( + f"Could not read skill config '{file_path}':\n{str(e)}" + ) + return None diff --git a/services/printr.py b/services/printr.py index 84248f23..a39a3a9e 100644 --- a/services/printr.py +++ b/services/printr.py @@ -51,6 +51,7 @@ async def __send_to_gui( source=LogSource.SYSTEM, source_name: str = "", command_tag: CommandTag = None, + skill_name: str = "", ): if self._connection_manager is None: raise ValueError("connection_manager has not been set.") @@ -67,6 +68,7 @@ async def __send_to_gui( source=source, source_name=source_name, tag=command_tag, + skill_name=skill_name, ) ) @@ -105,6 +107,7 @@ async def print_async( toast: ToastType = None, server_only=False, command_tag: CommandTag = None, + skill_name: str = "", ): # print to server (terminal) self.print_colored(text, color=self.get_terminal_color(color)) @@ -117,6 +120,7 @@ async def print_async( source=source, source_name=source_name, command_tag=command_tag, + skill_name=skill_name, ) def toast(self, text: str): diff --git a/services/secret_keeper.py b/services/secret_keeper.py index f96b86e7..96d9972f 100644 --- a/services/secret_keeper.py +++ b/services/secret_keeper.py @@ -102,7 +102,7 @@ def get_secrets(self): return self.secrets # POST /secrets - def post_secrets(self, secrets: dict[str, Any]): + async def post_secrets(self, secrets: dict[str, Any]): if not secrets or len(secrets) == 0: return @@ -110,6 +110,6 @@ def post_secrets(self, secrets: dict[str, Any]): self.secrets[key] = value self.save() - self.printr.print( + await self.printr.print( "Secrets updated.", toast=ToastType.NORMAL, color=LogType.POSITIVE ) diff --git a/services/settings_service.py b/services/settings_service.py index 55d2448a..027dcd4b 100644 --- a/services/settings_service.py +++ b/services/settings_service.py @@ -52,6 +52,12 @@ def __init__(self, config_manager: ConfigManager, config_service: ConfigService) endpoint=self.set_mute_key, tags=tags, ) + self.router.add_api_route( + methods=["POST"], + path="/settings/debug-mode", + endpoint=self.set_debug_mode, + tags=tags, + ) self.router.add_api_route( methods=["POST"], path="/settings/wingman-pro", @@ -101,6 +107,7 @@ def get_audio_settings_indexed(self, write: bool = True) -> AudioSettings: "Configured input device is not an input device. Using default.", toast=ToastType.NORMAL, color=LogType.WARNING, + server_only=True, ) input_device = None else: @@ -123,6 +130,7 @@ def get_audio_settings_indexed(self, write: bool = True) -> AudioSettings: "Configured input device not found. Using default.", toast=ToastType.NORMAL, color=LogType.WARNING, + server_only=True, ) input_device = None elif isinstance(input_settings, AudioDeviceSettings): @@ -148,6 +156,7 @@ def get_audio_settings_indexed(self, write: bool = True) -> AudioSettings: f"Configured input device '{input_settings.name}' not found. Using default.", toast=ToastType.NORMAL, color=LogType.WARNING, + server_only=True, ) elif write: self.printr.print( @@ -172,6 +181,7 @@ def get_audio_settings_indexed(self, write: bool = True) -> AudioSettings: "Configured output device is not an output device. Using default.", toast=ToastType.NORMAL, color=LogType.WARNING, + server_only=True, ) output_device = None else: @@ -194,6 +204,7 @@ def get_audio_settings_indexed(self, write: bool = True) -> AudioSettings: "Configured output device not found. Using default.", toast=ToastType.NORMAL, color=LogType.WARNING, + server_only=True, ) output_device = None # check if instance of AudioDeviceSettings @@ -220,6 +231,7 @@ def get_audio_settings_indexed(self, write: bool = True) -> AudioSettings: f"Configured audio output device '{output_settings.name}' not found. Using default.", toast=ToastType.NORMAL, color=LogType.WARNING, + server_only=True, ) elif write: self.printr.print( @@ -269,7 +281,7 @@ async def set_audio_devices( ) if self.config_manager.save_settings_config(): - self.printr.print( + await self.printr.print_async( "Audio devices updated.", toast=ToastType.NORMAL, color=LogType.POSITIVE ) await self.settings_events.publish( @@ -282,7 +294,7 @@ async def set_voice_activation(self, is_enabled: bool): self.config_manager.settings_config.voice_activation.enabled = is_enabled if self.config_manager.save_settings_config(): - self.printr.print( + await self.printr.print_async( f"Voice activation {'enabled' if is_enabled else 'disabled'}.", toast=ToastType.NORMAL, color=LogType.POSITIVE, @@ -290,15 +302,24 @@ async def set_voice_activation(self, is_enabled: bool): await self.settings_events.publish("voice_activation_changed", is_enabled) # POST /settings/mute-key - def set_mute_key(self, key: str, keycodes: Optional[list[int]] = None): + async def set_mute_key(self, key: str, keycodes: Optional[list[int]] = None): self.config_manager.settings_config.voice_activation.mute_toggle_key = key self.config_manager.settings_config.voice_activation.mute_toggle_key_codes = ( keycodes ) if self.config_manager.save_settings_config(): - self.printr.print( - "Mute key saved.", + await self.printr.print_async( + "Mute key saved.", toast=ToastType.NORMAL, color=LogType.POSITIVE + ) + + # POST /settings/debug-mode + async def set_debug_mode(self, debug_mode: bool): + self.config_manager.settings_config.debug_mode = debug_mode + + if self.config_manager.save_settings_config(): + await self.printr.print_async( + "Debug Mode saved.", toast=ToastType.NORMAL, color=LogType.POSITIVE, ) @@ -329,7 +350,7 @@ async def set_wingman_pro_settings( if self.config_manager.save_settings_config(): await self.config_service.load_config() - self.printr.print( + await self.printr.print_async( "Wingman Pro settings updated.", toast=ToastType.NORMAL, color=LogType.POSITIVE, diff --git a/services/system_manager.py b/services/system_manager.py index 0034b365..c9a9157a 100644 --- a/services/system_manager.py +++ b/services/system_manager.py @@ -4,7 +4,7 @@ from packaging import version from api.interface import SystemCore, SystemInfo -LOCAL_VERSION = "1.2.1" +LOCAL_VERSION = "1.3.0" VERSION_ENDPOINT = "https://shipbit.de/wingman.json" diff --git a/services/tower.py b/services/tower.py index e7505abc..3509301e 100644 --- a/services/tower.py +++ b/services/tower.py @@ -1,6 +1,7 @@ from api.enums import LogSource, LogType, WingmanInitializationErrorType from api.interface import Config, SettingsConfig, WingmanInitializationError from services.audio_player import AudioPlayer +from services.module_manager import ModuleManager from services.printr import Printr from wingmen.open_ai_wingman import OpenAiWingman from wingmen.wingman import Wingman @@ -38,7 +39,7 @@ async def instantiate_wingmen(self, settings: SettingsConfig): try: # it's a custom Wingman if wingman_config.custom_class: - wingman = Wingman.create_dynamically( + wingman = ModuleManager.create_wingman_dynamically( name=wingman_name, config=wingman_config, settings=settings, @@ -51,7 +52,18 @@ async def instantiate_wingmen(self, settings: SettingsConfig): settings=settings, audio_player=self.audio_player, ) + except FileNotFoundError as e: # pylint: disable=broad-except + wingman_config.disabled = True + printr.print( + f"Could not instantiate {wingman_name}:\n{str(e)}", + color=LogType.WARNING, + server_only=True, + source_name=self.log_source_name, + source=LogSource.SYSTEM, + ) + continue except Exception as e: # pylint: disable=broad-except + wingman_config.disabled = True # just in case we missed something msg = str(e).strip() or type(e).__name__ errors.append( @@ -61,19 +73,23 @@ async def instantiate_wingmen(self, settings: SettingsConfig): error_type=WingmanInitializationErrorType.UNKNOWN, ) ) - printr.toast_error(f"Could not instantiate {wingman_name}:\n{str(e)}") + continue else: # additional validation check if no exception was raised validation_errors = await wingman.validate() errors.extend(validation_errors) + + # init and validate skills + skill_errors = await wingman.init_skills() + if not errors or len(errors) == 0: - wingman.prepare() + await wingman.prepare() self.wingmen.append(wingman) - # Mouse - button = wingman.get_record_button() - if button: - self.mouse_wingman_dict[button] = wingman + # Mouse + button = wingman.get_record_button() + if button: + self.mouse_wingman_dict[button] = wingman printr.print( f"Instantiated wingmen: {', '.join([w.name for w in self.wingmen])}.", diff --git a/skills/control_windows/default_config.yaml b/skills/control_windows/default_config.yaml new file mode 100644 index 00000000..1d922391 --- /dev/null +++ b/skills/control_windows/default_config.yaml @@ -0,0 +1,29 @@ +name: ControlWindows +module: skills.control_windows.main +description: + en: Launch applications and control your Windows computer. + de: Starte Anwendungen und steuere deinen Windows-Computer. +hint: + en: This skill looks for applications in your Start Menu directory (%APPDATA%/Microsoft/Windows/Start Menu/Programs), so if it tells you that it cannot find an application, create a shortcut in that directory. + de: Dieser Skill sucht nach Anwendungen in deinem Startmenü-Verzeichnis (%APPDATA%/Microsoft/Windows/Start Menu/Programs). Wenn er dir also sagt, dass er eine Anwendung nicht finden kann, erstelle eine Verknüpfung in diesem Verzeichnis. +examples: + - question: + en: Open Spotify. + de: Öffne Spotify. + answer: + en: (opens the Spotify application) + de: (öffnet die Spotify-Anwendung) + - question: + en: Activate Notepad. + de: Aktiviere Notepad. + answer: + en: (maximizes the Notepad application) + de: (maximiert die Notepad Anwendung) + - question: + en: Close Notepad. + de: Schließe Notepad. + answer: + en: (closes the Notepad application) + de: (schließt die Notepad Anwendung) +prompt: | + You can also control Windows Functions, like opening and closing applications. diff --git a/skills/control_windows/logo.png b/skills/control_windows/logo.png new file mode 100644 index 00000000..3242b570 Binary files /dev/null and b/skills/control_windows/logo.png differ diff --git a/skills/control_windows/main.py b/skills/control_windows/main.py new file mode 100644 index 00000000..d89e7bcd --- /dev/null +++ b/skills/control_windows/main.py @@ -0,0 +1,153 @@ +import os +from pathlib import Path +import pygetwindow as gw +from api.interface import ( + SettingsConfig, + SkillConfig, + WingmanConfig, + WingmanInitializationError, +) +from api.enums import LogType +from skills.skill_base import Skill + + +class ControlWindows(Skill): + + # Paths to Start Menu directories + start_menu_paths: list[Path] = [ + Path(os.environ["APPDATA"], "Microsoft", "Windows", "Start Menu", "Programs"), + Path( + os.environ["PROGRAMDATA"], "Microsoft", "Windows", "Start Menu", "Programs" + ), + ] + + def __init__( + self, + config: SkillConfig, + wingman_config: WingmanConfig, + settings: SettingsConfig, + ) -> None: + super().__init__( + config=config, wingman_config=wingman_config, settings=settings + ) + + async def validate(self) -> list[WingmanInitializationError]: + errors = await super().validate() + return errors + + # Function to recursively list files in a directory + def list_files(self, directory, extension=""): + for item in directory.iterdir(): + if item.is_dir(): + yield from self.list_files(item, extension) + elif item.is_file() and item.suffix == extension: + yield item + + # Function to search and start an application + def search_and_start(self, app_name): + for start_menu_path in self.start_menu_paths: + if start_menu_path.exists(): + for file_path in self.list_files(start_menu_path, ".lnk"): + if app_name.lower() in file_path.stem.lower(): + # Attempt to start the application + os.startfile(str(file_path)) + # subprocess.Popen([str(file_path)]) + return True + return False + + def close_application(self, app_name): + windows = gw.getWindowsWithTitle(app_name) + if windows and len(windows) > 0: + for window in windows: + window.close() + + return True + + return False + + def execute_ui_command(self, app_name: str, command: str): + windows = gw.getWindowsWithTitle(app_name) + if windows and len(windows) > 0: + for window in windows: + try: + getattr(window, command)() + except AttributeError: + pass + + return True + + return False + + def get_tools(self) -> list[tuple[str, dict]]: + tools = [ + ( + "control_windows_functions", + { + "type": "function", + "function": { + "name": "control_windows_functions", + "description": "Control Windows Functions, like opening and closing applications.", + "parameters": { + "type": "object", + "properties": { + "command": { + "type": "string", + "description": "The command to execute", + "enum": [ + "open", + "close", + "minimize", + "maximize", + "restore", + "activate", + ], + }, + "parameter": { + "type": "string", + "description": "The parameter for the command. For example, the application name to open or close. Or the information to get.", + }, + }, + "required": ["command"], + }, + }, + }, + ), + ] + return tools + + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] + ) -> tuple[str, str]: + function_response = "Application not found." + instant_response = "" + + if tool_name == "control_windows_functions": + if self.settings.debug_mode: + self.start_execution_benchmark() + await self.printr.print_async( + f"Executing control_windows_functions with parameters: {parameters}", + color=LogType.INFO, + ) + + parameter = parameters.get("parameter") + + if parameters["command"] == "open": + app_started = self.search_and_start(parameter) + if app_started: + function_response = "Application started." + + elif parameters["command"] == "close": + app_closed = self.close_application(parameter) + if app_closed: + function_response = "Application closed." + + else: + command = parameters["command"] + app_minimize = self.execute_ui_command(parameter, command) + if app_minimize: + function_response = f"Application {command}." + + if self.settings.debug_mode: + await self.print_execution_time() + + return function_response, instant_response diff --git a/skills/control_windows/requirements.txt b/skills/control_windows/requirements.txt new file mode 100644 index 00000000..1e049956 --- /dev/null +++ b/skills/control_windows/requirements.txt @@ -0,0 +1 @@ +pygetwindow==0.0.9 \ No newline at end of file diff --git a/skills/skill_base.py b/skills/skill_base.py new file mode 100644 index 00000000..8f042c00 --- /dev/null +++ b/skills/skill_base.py @@ -0,0 +1,111 @@ +import time +from api.enums import LogSource, LogType, WingmanInitializationErrorType +from api.interface import ( + SettingsConfig, + SkillConfig, + WingmanConfig, + WingmanInitializationError, +) +from services.printr import Printr +from services.secret_keeper import SecretKeeper + + +class Skill: + def __init__( + self, + config: SkillConfig, + wingman_config: WingmanConfig, + settings: SettingsConfig, + ) -> None: + + self.config = config + self.settings = settings + self.wingman_config = wingman_config + self.secret_keeper = SecretKeeper() + self.name = self.__class__.__name__ + self.printr = Printr() + self.execution_start: None | float = None + """Used for benchmarking executon times. The timer is (re-)started whenever the process function starts.""" + + async def validate(self) -> list[WingmanInitializationError]: + """Validates the skill configuration.""" + return [] + + def get_tools(self) -> list[tuple[str, dict]]: + """Returns a list of tools available in the skill.""" + return [] + + async def get_prompt(self) -> str | None: + """Returns additional context for this skill. Will be injected into the the system prompt. Can be overridden by the skill to add dynamic data to context.""" + return self.config.prompt or None + + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] + ) -> tuple[str, str]: + """Execute a tool by name with parameters.""" + pass + + async def gpt_call(self, messages, tools: list[dict] = None) -> any: + return any + + async def retrieve_secret( + self, + secret_name: str, + errors: list[WingmanInitializationError], + hint: str = None, + ): + """Use this method to retrieve secrets like API keys from the SecretKeeper. + If the key is missing, the user will be prompted to enter it. + """ + secret = await self.secret_keeper.retrieve( + requester=self.name, + key=secret_name, + prompt_if_missing=True, + ) + if not secret: + errors.append( + WingmanInitializationError( + wingman_name=self.name, + message=f"Missing secret '{secret_name}'. {hint or ''}", + error_type=WingmanInitializationErrorType.MISSING_SECRET, + secret_name=secret_name, + ) + ) + return secret + + def retrieve_custom_property_value( + self, + property_id: str, + errors: list[WingmanInitializationError], + hint: str = None, + ): + """Use this method to retrieve custom properties from the Skill config.""" + p = next( + (prop for prop in self.config.custom_properties if prop.id == property_id), + None, + ) + if p is None or (p.required and p.value is None): + errors.append( + WingmanInitializationError( + wingman_name=self.name, + message=f"Missing custom property '{property_id}'. {hint}", + error_type=WingmanInitializationErrorType.INVALID_CONFIG, + ) + ) + return None + return p.value + + async def print_execution_time(self, reset_timer=False): + """Prints the current time since the execution started (in seconds).""" + if self.execution_start: + execution_stop = time.perf_counter() + elapsed_seconds = execution_stop - self.execution_start + await self.printr.print_async( + f"...took {elapsed_seconds:.2f}s", color=LogType.INFO + ) + if reset_timer: + self.start_execution_benchmark() + + def start_execution_benchmark(self): + """Starts the execution benchmark timer.""" + self.execution_start = time.perf_counter() diff --git a/skills/spotify/default_config.yaml b/skills/spotify/default_config.yaml new file mode 100644 index 00000000..3192bf84 --- /dev/null +++ b/skills/spotify/default_config.yaml @@ -0,0 +1,69 @@ +name: Spotify +module: skills.spotify.main +description: + en: Control Spotify using the the WebAPI. Play songs, artists playlists. Control playback, volume, and more. All powered by AI! + de: Steuere Spotify mit der WebAPI. Spiele Lieder, Künstler und Playlists ab. Steuere die Wiedergabe, Lautstärke und mehr. Alles von KI gesteuert! +hint: + en: + de: +examples: + - question: + en: Play the song Californication. + de: Spiele das Lied Californication. + answer: + en: Now playing 'Californication' by Red Hot Chili Peppers. + de: \'Californication\' von Red Hot Chili Peppers wird abgespielt. + - question: + en: What's the current song? + de: Wie heißt das aktuelle Lied? + answer: + en: You are currently listening to 'Californication' by Red Hot Chili Peppers. + de: Du hörst gerade 'Californication' von Red Hot Chili Peppers. + - question: + en: My girlfriend left me. Play a really sad song to match my mood. + de: Meine Freundin hat mich verlassen. Spiele ein wirklich trauriges Lied, das zu meiner Stimmung passt. + answer: + en: I'm sorry for you. Now playing 'Someone Like You' by Adele. + de: Es tut mir leid für dich. Jetzt wird "Someone Like You" von Adele gespielt. + - question: + en: Play the most popular song from the musical Les Miserables. + de: Spiele das beliebteste Lied aus dem Musical Les Miserables. + answer: + en: Playing 'I Dreamed a Dream' from Les Miserables. + de: \'I Dreamed a Dream\' aus Les Miserables wird abgespielt. + - question: + en: That's a cover song. Play the real version! + de: Das ist ein Cover-Song. Spiele die echte Version aus dem Film! + answer: + en: Playing 'I Dreamed a Dream' by Anne Hathaway from Les Miserables. + de: Spiele 'I Dreamed a Dream' von Anne Hathaway aus Les Miserables. + - question: + en: What are my Spotify devices? + de: Was sind meine Spotify-Geräte? + answer: + en: You have 2 devices available - 'Gaming PC' and 'iPhone'. + de: Du hast 2 Geräte verfügbar - 'Gaming PC' und 'iPhone'. + - question: + en: Play the music on my iPhone. + de: Spiele die Musik auf meinem iPhone ab. + answer: + en: Moves the current playback to your iPhone + de: Überträgt die Spotify-Wiedergabe auf das iPhone +prompt: | + You are also an expert DJ and music player interface responsible to control the Spotify music player client of the user. + You have access to different tools or functions you can call to control the Spotify client using its API. + If the user asks you to play a song, resume, stop or pause the current playback etc. use your tools to do so. + For some functions, you need parameters like the song or artist name. Try to extract these values from the + player's request. + Never invent any function parameters. Ask the user for clarification if you are not sure or cannot extract function parameters. +custom_properties: + - hint: Create an app in the Spotify Dashboard at https://developer.spotify.com/dashboard. You'll find the Client ID in the Settings of that app. + id: spotify_client_id + name: Spotify Client ID + required: true + value: enter-your-client-id-here + - hint: Create an app in the Spotify Dashboard at https://developer.spotify.com/dashboard. In the Settings of the app, add http://127.0.0.1:8082 (or any other free port) as Redirect URL. Then enter the same value here. + id: spotify_redirect_url + name: Spotify Redirect URL + required: true + value: http://127.0.0.1:8082 diff --git a/skills/spotify/logo.png b/skills/spotify/logo.png new file mode 100644 index 00000000..4ceb1369 Binary files /dev/null and b/skills/spotify/logo.png differ diff --git a/skills/spotify/main.py b/skills/spotify/main.py new file mode 100644 index 00000000..7826fcd8 --- /dev/null +++ b/skills/spotify/main.py @@ -0,0 +1,378 @@ +from os import path +import spotipy +from spotipy.oauth2 import SpotifyOAuth +from api.enums import LogSource, LogType +from api.interface import ( + SettingsConfig, + SkillConfig, + WingmanConfig, + WingmanInitializationError, +) +from services.file import get_writable_dir +from skills.skill_base import Skill + + +class Spotify(Skill): + + def __init__( + self, + config: SkillConfig, + wingman_config: WingmanConfig, + settings: SettingsConfig, + ) -> None: + super().__init__( + config=config, wingman_config=wingman_config, settings=settings + ) + self.data_path = get_writable_dir(path.join("skills", "spotify", "data")) + self.spotify: spotipy.Spotify = None + self.available_devices = [] + + async def validate(self) -> list[WingmanInitializationError]: + errors = await super().validate() + + secret = await self.retrieve_secret("spotify_client_secret", errors) + client_id = self.retrieve_custom_property_value("spotify_client_id", errors) + redirect_url = self.retrieve_custom_property_value( + "spotify_redirect_url", errors + ) + if secret and client_id and redirect_url: + # now that we have everything, initialize the Spotify client + cache_handler = spotipy.cache_handler.CacheFileHandler( + cache_path=f"{self.data_path}/.cache" + ) + self.spotify = spotipy.Spotify( + auth_manager=SpotifyOAuth( + client_id=client_id, + client_secret=secret, + redirect_uri=redirect_url, + scope=[ + "user-library-read", + "user-read-currently-playing", + "user-read-playback-state", + "user-modify-playback-state", + "streaming", + # "playlist-modify-public", + "playlist-read-private", + # "playlist-modify-private", + "user-library-modify", + # "user-read-recently-played", + # "user-top-read" + ], + cache_handler=cache_handler, + ) + ) + return errors + + def get_tools(self) -> list[tuple[str, dict]]: + tools = [ + ( + "control_spotify_device", + { + "type": "function", + "function": { + "name": "control_spotify_device", + "description": "Retrieves or sets the audio device of he user that Spotify songs are played on.", + "parameters": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The playback action to take", + "enum": ["get_devices", "set_active_device"], + }, + "device_name": { + "type": "string", + "description": "The name of the device to set as the active device.", + "enum": [ + device["name"] + for device in self.get_available_devices() + ], + }, + }, + "required": ["action"], + }, + }, + }, + ), + ( + "control_spotify_playback", + { + "type": "function", + "function": { + "name": "control_spotify_playback", + "description": "Control the Spotify audio playback with actions like play, pause/stop or play the previous/next track or set the volume level.", + "parameters": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The playback action to take", + "enum": [ + "play", + "pause", + "stop", + "play_next_track", + "play_previous_track", + "set_volume", + "mute", + "get_current_track", + "like_song", + ], + }, + "volume_level": { + "type": "number", + "description": "The volume level to set (in percent)", + }, + }, + "required": ["action"], + }, + }, + }, + ), + ( + "play_song_with_spotify", + { + "type": "function", + "function": { + "name": "play_song_with_spotify", + "description": "Find a song with Spotify to either play it immediately or queue it.", + "parameters": { + "type": "object", + "properties": { + "track": { + "type": "string", + "description": "The name of the track to play", + }, + "artist": { + "type": "string", + "description": "The artist that created the track", + }, + "queue": { + "type": "boolean", + "description": "If true, the song will be queued and played later. Otherwise it will be played immediately.", + }, + }, + }, + }, + }, + ), + ( + "interact_with_spotify_playlists", + { + "type": "function", + "function": { + "name": "interact_with_spotify_playlists", + "description": "Play a song from a Spotify playlist or add a song to a playlist.", + "parameters": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The action to take", + "enum": ["get_playlists", "play_playlist"], + }, + "playlist": { + "type": "string", + "description": "The name of the playlist to interact with", + "enum": [ + playlist["name"] + for playlist in self.get_user_playlists() + ], + }, + }, + "required": ["action"], + }, + }, + }, + ), + ] + return tools + + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] + ) -> tuple[str, str]: + instant_response = "" # not used here + function_response = "Unable to control Spotify." + + if tool_name not in [ + "control_spotify_device", + "control_spotify_playback", + "play_song_with_spotify", + "interact_with_spotify_playlists", + ]: + return function_response, instant_response + + if self.settings.debug_mode: + self.start_execution_benchmark() + await self.printr.print_async( + f"Spotify: Executing {tool_name} with parameters: {parameters}", + color=LogType.INFO, + ) + + action = parameters.get("action", None) + parameters.pop("action", None) + function = getattr(self, action if action else tool_name) + function_response = function(**parameters) + + if self.settings.debug_mode: + await self.print_execution_time() + + return function_response, instant_response + + # HELPERS + + def get_available_devices(self): + devices = [ + device + for device in self.spotify.devices().get("devices") + if not device["is_restricted"] + ] + return devices + + def get_active_devices(self): + active_devices = [ + device + for device in self.spotify.devices().get("devices") + if device["is_active"] + ] + return active_devices + + def get_user_playlists(self): + playlists = self.spotify.current_user_playlists() + return playlists["items"] + + def get_playlist_uri(self, playlist_name: str): + playlists = self.spotify.current_user_playlists() + playlist = next( + ( + playlist + for playlist in playlists["items"] + if playlist["name"].lower() == playlist_name.lower() + ), + None, + ) + return playlist["uri"] if playlist else None + + # ACTIONS + + def get_devices(self): + active_devices = self.get_active_devices() + active_device_names = ", ".join([device["name"] for device in active_devices]) + available_device_names = ", ".join( + [device["name"] for device in self.get_available_devices()] + ) + if active_devices and len(active_devices) > 0: + return f"Your available devices are: {available_device_names}. Your active devices are: {active_device_names}." + if available_device_names: + return f"No active device found but these are the available devices: {available_device_names}" + + return "No devices found. Start Spotify on one of your devices first, then try again." + + def set_active_device(self, device_name: str): + if device_name: + device = next( + ( + device + for device in self.get_available_devices() + if device["name"] == device_name + ), + None, + ) + if device: + self.spotify.transfer_playback(device["id"]) + return "OK" + else: + return f"Device '{device_name}' not found." + + return "Device name not provided." + + def play(self): + self.spotify.start_playback() + return "OK" + + def pause(self): + self.spotify.pause_playback() + return "OK" + + def stop(self): + return self.pause() + + def play_previous_track(self): + self.spotify.previous_track() + return "OK" + + def play_next_track(self): + self.spotify.next_track() + return "OK" + + def set_volume(self, volume_level: int): + if volume_level: + self.spotify.volume(volume_level) + return "OK" + + return "Volume level not provided." + + def mute(self): + self.spotify.volume(0) + return "OK" + + def get_current_track(self): + current_playback = self.spotify.current_playback() + if current_playback: + artist = current_playback["item"]["artists"][0]["name"] + track = current_playback["item"]["name"] + return f"Currently playing '{track}' by '{artist}'." + + return "No track playing." + + def like_song(self): + current_playback = self.spotify.current_playback() + if current_playback: + track_id = current_playback["item"]["id"] + self.spotify.current_user_saved_tracks_add([track_id]) + return "Track saved to 'Your Music' library." + + return "No track playing. Play a song, then tell me to like it." + + def play_song_with_spotify( + self, track: str = None, artist: str = None, queue: bool = False + ): + if not track and not artist: + return "What song or artist would you like to play?" + results = self.spotify.search(q=f"{track} {artist}", type="track", limit=1) + track = results["tracks"]["items"][0] + if track: + track_name = track["name"] + artist_name = track["artists"][0]["name"] + try: + if queue: + self.spotify.add_to_queue(track["uri"]) + return f"Added '{track_name}' by '{artist_name}' to the queue." + else: + self.spotify.start_playback(uris=[track["uri"]]) + return f"Now playing '{track_name}' by '{artist_name}'." + except spotipy.SpotifyException as e: + if e.reason == "NO_ACTIVE_DEVICE": + return "No active device found. Start Spotify on one of your devices first, then play a song or tell me to activate a device." + return f"An error occurred while trying to play the song. Code: {e.code}, Reason: '{e.reason}'" + + return "No track found." + + def get_playlists(self): + playlists = self.get_user_playlists() + playlist_names = ", ".join([playlist["name"] for playlist in playlists]) + if playlist_names: + return f"Your playlists are: {playlist_names}" + + return "No playlists found." + + def play_playlist(self, playlist: str = None): + if not playlist: + return "Which playlist would you like to play?" + + playlist_uri = self.get_playlist_uri(playlist) + if playlist_uri: + self.spotify.start_playback(context_uri=playlist_uri) + return f"Playing playlist '{playlist}'." + + return f"Playlist '{playlist}' not found." diff --git a/skills/spotify/requirements.txt b/skills/spotify/requirements.txt new file mode 100644 index 00000000..dbc2911c --- /dev/null +++ b/skills/spotify/requirements.txt @@ -0,0 +1 @@ +spotipy==2.23.0 \ No newline at end of file diff --git a/skills/star_head/default_config.yaml b/skills/star_head/default_config.yaml new file mode 100644 index 00000000..fb9ffd01 --- /dev/null +++ b/skills/star_head/default_config.yaml @@ -0,0 +1,43 @@ +name: StarHead +module: skills.star_head.main +description: + en: Use the StarHead API to retrieve detailed information about spaceships, weapons and more. StarHead can also calculate optimal trading routes based on live data. + de: Nutze die StarHead API, um detaillierte Informationen über Raumschiffe, Waffen und mehr abzurufen. StarHead kann auch optimale Handelsrouten anhand von Live-Daten berechnen. +# hint: +# en: +# de: +examples: + - question: + en: I want to trade. What's the best route? + de: Ich möchte handeln. Was ist die beste Route? + answer: + en: To provide you with the best trading route, I need to know your ship model, your current location, and your available budget. Could you please provide these details? + de: Um dir die beste Handelsroute anbieten zu können, muss ich dein Schiffsmodell, deinen aktuellen Standort und dein verfügbares Budget kennen. Kannst du mir diese Angaben bitte mitteilen? + - question: + en: I'm flying a Caterpillar and am near Yela. I have 100.000 credits to spend. + de: Ich fliege eine Caterpillar und bin in der Nähe von Yela. Ich habe 100.000 Credits auszugeben. + answer: + en: You can buy Stims at Deakins Research Outpost near Yela for 2.8 credits/unit and sell them at CRU-L1 Ambitious Dream Station for 3.85 credits/unit. The total profit for this route is approximately 37499 credits, and the travel time estimation is 41 minutes. + de: Du kannst Stims bei Deakins Research Outpost in der Nähe von Yela für 2,8 Credits/Stück kaufen und sie bei CRU-L1 Ambitious Dream Station für 3,85 Credits/Stück verkaufen. Der Gesamtgewinn für diese Route beträgt ca. 37499 Credits, und die geschätzte Reisezeit beträgt 41 Minuten. + - question: + en: What can you tell me about the Caterpillar? + de: Was kannst du mir über die Caterpillar erzählen? + answer: + en: The Constellation Taurus is a dedicated freighter, designed for hauling cargo. It has a cargo capacity of 174 SCU and is fully configurable but without all the bells and whistles found on other Constellation variants. On the other hand, the Constellation Andromeda is a multi-person freighter and the most popular ship in RSI's current production array. It has a cargo capacity of 96 SCU and is beloved by smugglers and merchants alike for its modular and high-powered capabilities. Both are part of the Constellation series, but the Taurus specifically caters to dedicated freight operations whereas the Andromeda serves as a multi-person versatile ship. + de: Die Constellation Taurus ist ein reiner Frachter, der für den Transport von Fracht entwickelt wurde. Er hat eine Ladekapazität von 174 SCU und ist voll konfigurierbar, hat aber nicht den ganzen Schnickschnack der anderen Constellation-Varianten. Die Constellation Andromeda hingegen ist ein Mehrpersonen-Frachter und das beliebteste Schiff in der aktuellen Produktion von RSI. Sie hat eine Ladekapazität von 96 SCU und ist bei Schmugglern und Händlern wegen ihrer modularen und leistungsstarken Fähigkeiten gleichermaßen beliebt. Beide gehören zur Constellation-Serie, aber die Taurus ist speziell für den reinen Frachtverkehr gedacht, während die Andromeda ein vielseitiges Schiff für mehrere Personen ist. +prompt: | + You have access to the StarHead API which you can use to access live trading data and to retrieve additional information about spaceships in Star Citizen. + Your job is to find good trading routes for the user based on his/her ship, current location and available budget. + The user can also ask you about details of specific ships, components, weapons, and more. + You always use the tools available to you to retrieve the required information and to provide the user with the information. +custom_properties: + - hint: The URL of the StarHead API. + id: starhead_api_url + name: StarHead API URL + required: true + value: https://api.star-head.de + - hint: The URL of the Star Citizen Wiki API. + id: star_citizen_wiki_api_url + name: Star Citizen Wiki API URL + required: true + value: https://api.star-citizen.wiki/api/v2 diff --git a/configs/templates/_Star Citizen/StarHead.avatar.png b/skills/star_head/logo.png similarity index 100% rename from configs/templates/_Star Citizen/StarHead.avatar.png rename to skills/star_head/logo.png diff --git a/wingmen/star_head_wingman.py b/skills/star_head/main.py similarity index 58% rename from wingmen/star_head_wingman.py rename to skills/star_head/main.py index 50d4715c..8be1c5d1 100644 --- a/wingmen/star_head_wingman.py +++ b/skills/star_head/main.py @@ -1,33 +1,26 @@ -from typing import Optional import json +from typing import Optional import requests +from api.enums import LogSource, LogType, WingmanInitializationErrorType from api.interface import ( SettingsConfig, + SkillConfig, WingmanConfig, WingmanInitializationError, ) -from api.enums import LogType, WingmanInitializationErrorType -from services.audio_player import AudioPlayer -from services.printr import Printr -from wingmen.open_ai_wingman import OpenAiWingman +from skills.skill_base import Skill -printr = Printr() - -class StarHeadWingman(OpenAiWingman): - """Our StarHead Wingman uses the StarHead API to find the best trading route for a given spaceship, position and the money to spend. - If it's missing any of these parameters, it will ask the user for them. - """ +class StarHead(Skill): def __init__( self, - name: str, - config: WingmanConfig, + config: SkillConfig, + wingman_config: WingmanConfig, settings: SettingsConfig, - audio_player: AudioPlayer, ) -> None: super().__init__( - name=name, config=config, settings=settings, audio_player=audio_player + config=config, wingman_config=wingman_config, settings=settings ) # config entry existence not validated yet. Assign later when checked! @@ -48,63 +41,30 @@ def __init__( self.celestial_object_names = [] self.quantum_drives = [] - async def validate(self): - # collect errors from the base class (if any) - errors: list[WingmanInitializationError] = await super().validate() + async def validate(self) -> list[WingmanInitializationError]: + errors = await super().validate() - # add custom errors - starhead_api_url = next( - ( - prop - for prop in self.config.custom_properties - if prop.id == "starhead_api_url" - ), - None, + self.starhead_url = self.retrieve_custom_property_value( + "starhead_api_url", errors ) - star_citizen_wiki_api_url = next( - ( - prop - for prop in self.config.custom_properties - if prop.id == "star_citizen_wiki_api_url" - ), - None, + self.star_citizen_wiki_url = self.retrieve_custom_property_value( + "star_citizen_wiki_api_url", errors ) - if not starhead_api_url or not starhead_api_url.value: - errors.append( - WingmanInitializationError( - wingman_name=self.name, - message="Missing required custom property 'starhead_api_url'.", - error_type=WingmanInitializationErrorType.INVALID_CONFIG, - ) - ) - elif not star_citizen_wiki_api_url or not star_citizen_wiki_api_url.value: + try: + await self._prepare_data() + except Exception as e: errors.append( WingmanInitializationError( wingman_name=self.name, - message="Missing required custom property 'star_citizen_wiki_api_url'.", - error_type=WingmanInitializationErrorType.INVALID_CONFIG, + message=f"Failed to load data from StarHead API: {e}", + error_type=WingmanInitializationErrorType.UNKNOWN, ) ) - else: - self.starhead_url = starhead_api_url.value - self.star_citizen_wiki_url = star_citizen_wiki_api_url.value - try: - await self._prepare_data() - except Exception as e: - errors.append( - WingmanInitializationError( - wingman_name=self.name, - message=f"Failed to load data from StarHead API: {e}", - error_type=WingmanInitializationErrorType.UNKNOWN, - ) - ) return errors async def _prepare_data(self): - self.start_execution_benchmark() - self.vehicles = await self._fetch_data("vehicle") self.ship_names = [ self._format_ship_name(vehicle) @@ -126,15 +86,19 @@ async def _fetch_data( ) -> list[dict[str, any]]: url = f"{self.starhead_url}/{endpoint}" - if self.debug: - await printr.print_async(f"Retrieving {url}", color=LogType.INFO) + if self.settings.debug_mode: + self.start_execution_benchmark() + await self.printr.print_async( + f"Retrieving {url}", + color=LogType.INFO, + ) response = requests.get( url, params=params, timeout=self.timeout, headers=self.headers ) response.raise_for_status() - if self.debug: - await self.print_execution_time(reset_timer=True) + if self.settings.debug_mode: + await self.print_execution_time() return response.json() @@ -142,64 +106,63 @@ def _format_ship_name(self, vehicle: dict[str, any]) -> str: """Formats name by combining model and name, avoiding repetition""" return vehicle["name"] - async def _execute_command_by_function_call( - self, function_name: str, function_args: dict[str, any] + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] ) -> tuple[str, str]: - """Execute commands passed from the base class and handles the 'get_best_trading_route'.""" - ( - function_response, - instant_response, - ) = await super()._execute_command_by_function_call( - function_name, function_args - ) - if function_name == "get_best_trading_route": - function_response = await self._get_best_trading_route(**function_args) - if function_name == "get_ship_information": - function_response = await self._get_ship_information(**function_args) + instant_response = "" + function_response = "" + + if tool_name == "get_best_trading_route": + function_response = await self._get_best_trading_route(**parameters) + if tool_name == "get_ship_information": + function_response = await self._get_ship_information(**parameters) return function_response, instant_response - def _build_tools(self) -> list[dict[str, any]]: - """Builds the toolset for execution, adding custom function 'get_best_trading_route'.""" - tools = super()._build_tools() - tools.append( - { - "type": "function", - "function": { - "name": "get_best_trading_route", - "description": "Finds the best trade route for a given spaceship and position.", - "parameters": { - "type": "object", - "properties": { - "ship": {"type": "string", "enum": self.ship_names}, - "position": { - "type": "string", - "enum": self.celestial_object_names, + def get_tools(self) -> list[tuple[str, dict]]: + tools = [ + ( + "get_best_trading_route", + { + "type": "function", + "function": { + "name": "get_best_trading_route", + "description": "Finds the best trade route for a given spaceship and position.", + "parameters": { + "type": "object", + "properties": { + "ship": {"type": "string", "enum": self.ship_names}, + "position": { + "type": "string", + "enum": self.celestial_object_names, + }, + "moneyToSpend": {"type": "number"}, }, - "moneyToSpend": {"type": "number"}, + "required": ["ship", "position", "moneyToSpend"], }, - "required": ["ship", "position", "moneyToSpend"], }, }, - } - ) - tools.append( - { - "type": "function", - "function": { - "name": "get_ship_information", - "description": "Gives information about the given ship.", - "parameters": { - "type": "object", - "properties": { - "ship": {"type": "string", "enum": self.ship_names}, + ), + ( + "get_ship_information", + { + "type": "function", + "function": { + "name": "get_ship_information", + "description": "Gives information about the given ship.", + "parameters": { + "type": "object", + "properties": { + "ship": {"type": "string", "enum": self.ship_names}, + }, + "required": ["ship"], }, - "required": ["ship"], }, }, - } - ) + ), + ] + return tools - + async def _get_ship_information(self, ship: str) -> str: try: response = requests.get( @@ -252,7 +215,7 @@ async def _get_best_trading_route( if parsed_response: section = parsed_response[0] return json.dumps(section) - return "No route found. This might be an issue with the StarHead API." + return f"No route found for ship '{ship}' at '{position}' with '{moneyToSpend}' aUEC." def _get_celestial_object_id(self, name: str) -> Optional[int]: """Finds the ID of the celestial object with the specified name.""" @@ -305,7 +268,7 @@ async def _get_ship_loadout( loadout = await self._fetch_data(f"vehicle/{ship_id}/loadout") return loadout or None except requests.HTTPError: - await printr.print_async( + await self.printr.print_async( f"Failed to fetch loadout data for ship with ID: {ship_id}", color=LogType.ERROR, ) diff --git a/skills/uexcorp/default_config.yaml b/skills/uexcorp/default_config.yaml new file mode 100644 index 00000000..6208e38e --- /dev/null +++ b/skills/uexcorp/default_config.yaml @@ -0,0 +1,184 @@ +name: UEXCorp +module: skills.uexcorp.main +description: + en: Use the UEXCorp API to get live trading and lore information about ships, locations, commodities and more in Star Citizen. + de: Nutze die UEXCorp API, um live Handels- und Lore-Informationen über Schiffe, Orte, Rohstoffe und mehr in Star Citizen zu erhalten. +# hint: +# en: +# de: +examples: + - question: + en: Please provide me a the best two trading routes for my Caterpillar, Im currently at Hurston. + de: Bitte nenne mir die zwei besten Handelsrouten für meine Caterpillar, die derzeit in Hurston steht. + answer: + en: You have two highly profitable trading routes available. The first route involves transporting Recycled Material Composite from Pickers Field on Hurston to Orison - Trade & Development Division on the planet Crusader, offering a profit of 2,148,480 aUEC for 576 SCU of cargo. The second route entails shipping Laranite from HDMS-Lathan on the satellite Arial back to Central Business District in Lorville, resulting in a profit of 297216 aUEC for the same cargo capacity. + de: Du hast zwei hochprofitable Handelsrouten zur Verfügung. Auf der ersten Route transportierst du Recyclingmaterial von Pickers Field auf Hurston nach Orison - Trade & Development Division auf dem Planeten Crusader und erzielst einen Gewinn von 2.148.480 AUEC für 576 SCU Fracht. Auf der zweiten Route wird Laranit von HDMS-Lathan auf dem Satelliten Arial zurück zum Central Business District in Lorville transportiert, was einen Gewinn von 297216 AUEC für die gleiche Frachtkapazität bedeutet. + - question: + en: I got 3000 SCU of Hydrogen loaded in my Hull-C, where can I sell it? + de: Ich habe 3000 SCU Hydrogen in meinem Hull-C geladen. Wo kann ich ihn verkaufen? + answer: + en: You can sell the 3000 SCU of Hydrogen at Baijini Point, located on ArcCorp in the Stanton system. + de: Du kannst die 3000 SCU Hydrogen am Baijini Point auf ArcCorp im Stanton-System verkaufen. + - question: + en: What can you tell me about the Hull-C? + de: Was kannst du mir über den Hull-C erzählen? + answer: + en: The Hull-C is manufactured by Musashi Industrial & Starflight Concern and falls under the 'HULL' series. It serves as a freighter and has a cargo capacity of 4608 SCU. The ship can be purchased at New Deal in Lorville, Hurston for 15,750,000 aUEC. It can accommodate a crew of 1-4 and is designed for trading on suitable space stations with a cargo deck. + de: Die Hull-C wird von Musashi Industrial & Starflight Concern hergestellt und gehört zur "HULL"-Serie. Sie dient als Frachter und hat eine Frachtkapazität von 4608 SCU. Das Schiff kann bei New Deal in Lorville, Hurston für 15.750.000 AUEC erworben werden. Es kann eine Besatzung von 1-4 Personen aufnehmen und ist für den Handel mit geeigneten Raumstationen mit einem Frachtdeck ausgelegt. + - question: + en: What is the difference between the Mole and Prospector? + de: Was ist der Unterschied zwischen dem Maulwurf und dem Prospector? + answer: + en: The MOLE is a mining ship manufactured by ARGO Astronautics, with a capacity for 2-4 crew members and a cargo hold of 96 SCU. It is flyable since version 3.8.0 and is available for purchase at Lorville on Hurston for 5,130,500 aUEC. On the other hand, the Prospector, manufactured by Musashi Industrial & Starflight Concern, is a smaller mining vessel designed for a single crew member, with a cargo capacity of 32 SCU. It is flyable since version 3.0.0 and can be purchased at Lorville for 2,061,000 aUEC. The Prospector is also available for rent at various locations in the Stanton system, unlike the MOLE. + de: Die MOLE ist ein von ARGO Astronautics hergestelltes Bergbauschiff mit einer Kapazität für 2-4 Besatzungsmitglieder und einem Laderaum von 96 SCU. Sie ist seit Version 3.8.0 flugfähig und kann bei Lorville auf Hurston für 5.130.500 AUEC gekauft werden. Der Prospector, hergestellt von Musashi Industrial & Starflight Concern, ist ein kleineres Bergbauschiff für ein einziges Besatzungsmitglied und hat eine Ladekapazität von 32 SCU. Er ist seit Version 3.0.0 flugfähig und kann auf Lorville für 2.061.000 AUEC gekauft werden. Im Gegensatz zur MOLE kann der Prospector auch an verschiedenen Orten im Stanton-System gemietet werden. + - question: + en: What do you know about the commodity Neon? + de: Was weißt du über die Ware Neon? + answer: + en: The commodity 'Neon' is categorized as a drug and is available for purchase at Nuen Waste Management for 7,000 aUEC. It is also sellable at Grim HEX for 8,428 aUEC and Jumptown for 8,805 aUEC. However, it's important to note that Neon is illegal, and engaging in activities involving this commodity may lead to fines and a crimestat. It's advisable to avoid ship scans to mitigate the risk of penalties. + de: Die Ware "Neon" wird als Droge eingestuft und kann bei Nuen Waste Management für 7.000 AUEC gekauft werden. Außerdem kann es bei Grim HEX für 8.428 AUEC und bei Jumptown für 8.805 AUEC verkauft werden. Es ist jedoch wichtig zu wissen, dass Neon illegal ist und dass der Handel mit dieser Ware zu Geldstrafen und einer Strafe führen kann. Es ist ratsam, Schiffsscans zu vermeiden, um das Risiko von Strafen zu minimieren. + - question: + en: What is the best place to buy Laranite? + de: Wo kann ich Laranit am besten kaufen? + answer: + en: The best place to buy Laranite for your Caterpillar is at ArcCorp Mining Area 045, located on the satellite Wala in the Stanton system. It's available for purchase at 2,322 aUEC per SCU. + de: Der beste Ort, um Laranit für deine Caterpillar zu kaufen, ist das ArcCorp-Bergbaugebiet 045, das sich auf dem Satelliten Wala im Stanton-System befindet. Du kannst es für 2.322 AUEC pro SCU kaufen. + - question: + en: Where is the satellite Wala located? + de: Wo befindet sich der Satellit Wala? + answer: + en: The satellite Wala is located in the ArcCorp system, orbiting the planet ArcCorp. It hosts various trading options, including locations like ArcCorp Mining Area 045, ArcCorp Mining Area 048, ArcCorp Mining Area 056, ArcCorp Mining Area 061, Shady Glen Farms, and Samson & Son's Salvage Center. + de: Der Satellit Wala befindet sich im ArcCorp-System und umkreist den Planeten ArcCorp. Er beherbergt verschiedene Handelsmöglichkeiten, darunter Orte wie ArcCorp Mining Area 045, ArcCorp Mining Area 048, ArcCorp Mining Area 056, ArcCorp Mining Area 061, Shady Glen Farms und Samson & Son's Salvage Center. +prompt: | + You have tools to access the UEXcorp API which you can use to retrieve live trading data and additional information about ships, locations, commodities and more in Star Citizen. + Here are some examples when to use the different tools at your disposal: + + Do not (never) translate any properties when giving them to the player. They must stay in english or untouched. + Only give functions parameters that were previously clearly provided by a request. Never assume any values, not the current ship, not the location, not the available money, nothing! Always send a None-value instead. + If you are not using one of the definied functions, dont give any trading recommendations. + If you execute a function that requires a commodity name, make sure to always provide the name in english, not in german or any other language. + Never mention optional function (tool) parameters to the user. Only mention the required parameters, if some are missing. + + Samples when to use function "get_trading_routes": + - "Best trading route": Indicates a user's intent to find the best trading route. + - "Trade route": Suggests the user is seeking information on trading routes. + - "Profitable trade route": Implies a focus on finding profitable trading routes. + - "Trading advice": Indicates the user wants guidance on trading decisions. + + Samples when to use function "get_locations_to_sell_to": + - "Sell commodity": Indicates a user's intent to sell a specific item. + - "Best place to sell": Suggests the user is seeking information on optimal selling locations. + - "Seller's market": Implies a focus on finding favorable selling conditions. + - "Selling advice": Indicates the user wants guidance on selling decisions. + - "Seller's guide": Suggests a request for assistance in the selling process. + - "Find buyer": Indicates the user's interest in locating potential buyers. + - "Sell item": Implies a user's intent to sell an item. + - "Sell cargo": Suggests a focus on selling cargo or goods. + - "Offload inventory": Signals the intention to sell available inventory. + + Samples when to use function "get_locations_to_buy_from": + - "Buy commodity": Indicates a user's intent to purchase a specific item. + - "Best place to buy": Suggests the user is seeking information on optimal buying locations. + - "Buyer's market": Implies a focus on finding favorable buying conditions. + - "Purchase location": Signals interest in identifying a suitable location for buying. + - "Buying advice": Indicates the user wants guidance on purchasing decisions. + - "Buyer's guide": Suggests a request for assistance in the buying process. + + Samples when to use function "get_location_information": + - "Location information": Indicates a user's intent to gather information about a specific location. + - "Location details": Suggests the user is seeking detailed information about a specific location. + + Samples when to use function "get_ship_information": + - "Ship information": Indicates a user's intent to gather information about a specific ship. + - "Ship details": Suggests the user is seeking detailed information about a specific ship. + + Samples when to use function "get_ship_comparison ": + - "Ship comparison": Indicates a user's intent to compare two ships. And everytime at least two ships are mentioned in the request. + + Samples when to use function "get_commodity_information": + - "Commodity information": Indicates a user's intent to gather information about a specific commodity. + - "Commodity details": Suggests the user is seeking detailed information about a specific commodity. + - "Commodity prices": Implies a focus on obtaining current prices for a specific commodity. + + Samples when to use function "reload_current_commodity_prices" (Great to use before retrieving sell, buy and trade options): + - "Update commodity prices": Indicates a user's intent to update the commodity prices. + - "Get current prices": Suggests the user is seeking the current commodity prices. + - "Refresh prices": Implies a focus on updating the commodity prices. +custom_properties: + - id: uexcorp_api_url + name: API URL + hint: The URL of the UEX corp API. + value: https://portal.uexcorp.space/api + required: true + property_type: string + - id: uexcorp_api_timeout + name: API Timeout + hint: The timeout for the UEX corp API in seconds. + value: 5 + required: true + property_type: number + # ────────────────────────────── No touchy zone end ────────────────────────────── + + # ──────────────────────── uexcorp specific config start ───────────────────────── + # Set this option to "true" to enable caching of the UEX corp API responses. This is recommended, as the API key's quota is very limited. + # If you set this option to "false", the Wingman will fetch all data from the UEX corp API on every start. + # If you want to update the prices, just tell the Wingman to do so. + # If all data should be fetched again, delete the cache file. (wingman_data\uexcorp\cache.json) + - id: uexcorp_cache + name: Enable Cache + hint: Set this option to "true" to enable caching of the UEX corp API responses. This is recommended, as the API key's quota is very limited. + value: true + required: true + property_type: boolean + + # Set this option to the amount of seconds you want to cache the UEX corp API responses. + # We recommend a day ("86400"), as the ship, planet, etc. information does not change that often. + - id: uexcorp_cache_duration + name: Cache Duration + hint: Set this option to the amount of seconds you want to cache the UEX corp API responses. We recommend a day ("86400"). + value: 86400 + required: true + property_type: number + + # Set this option to "true" to show only one of the most profitable routes for each commodity. + # Set this option to "false" to show all routes. This may include multiple routes for the same commodity. + # Recommended: "true" + - id: uexcorp_summarize_routes_by_commodity + name: Summarize Routes by Commodity + hint: Set this option to "true" to show only the most profitable routes per commodity. "false" shows multiple options per commodity. + value: true + required: true + property_type: boolean + + # Set this option to "true" to make the start location for trade route calculation a mandatory information. + # Set this option to "false" to make the start location for trade route calculation a optional information. + # If "false" and no start location is given, all tradeports are taken into account. + - id: uexcorp_tradestart_mandatory + name: Trade Start Mandatory + hint: Set this option to "true" to make the start location for trade route calculation a mandatory information. If "false" and no start location is given, all tradeports are taken into account. + value: true + required: true + property_type: boolean + + # Use this to blacklist certain trade ports or commodities or combinations of both. + # Default value is '[]', which means no trade ports or commodities are blacklisted. + # If we want to add a trade port to the blacklist, we add something like this: {"tradeport":"Baijini Point"} This will blacklist the trade port completely from trade route calculations. + # If we want to add a commodity to the blacklist, we add something like this: {"commodity":"Medical Supplies"} This will blacklist the commodity completely from trade route calculations. + # If we want to add a combination to the blacklist, we add something like this: {"tradeport":"Baijini Point", "commodity":"Medical Supplies"} This will blacklist this commodity for the given trade port. + # If we want to add multiple trade ports or commodities or combinations of both, we add them in a list like this: [{"tradeport":"Baijini Point", "commodity":"Medical Supplies"}, {"commodity":"Medical Supplies"}, {"tradeport":"Port Tressler"}] + # This value is a JSON string, if you have created a list, use a JSON validator like https://jsonlint.com/ to check if the list is valid. + - id: uexcorp_trade_blacklist + name: Trade Blacklist + hint: JSON string to blacklist certain trade ports or commodities or combinations of both. Default value is empty ('[]'). Sample -> [{"tradeport":"Baijini Point", "commodity":"Medical Supplies"}, {"commodity":"Medical Supplies"}, {"tradeport":"Port Tressler"}] + value: "[]" + required: true + property_type: string + + # Set this option to the amount of trade routes you want to show at default. + # You can always tell Wingman AI to show more or less trade routes for one request, if that number is not given, this setting is used. + - id: uexcorp_default_trade_route_count + name: Default Trade Route Count + hint: Set this option to the amount of trade routes you want to show at default. + value: 1 + required: true + property_type: number diff --git a/skills/uexcorp/logo.png b/skills/uexcorp/logo.png new file mode 100644 index 00000000..94392d3f Binary files /dev/null and b/skills/uexcorp/logo.png differ diff --git a/skills/uexcorp/main.py b/skills/uexcorp/main.py new file mode 100644 index 00000000..8ba262c2 --- /dev/null +++ b/skills/uexcorp/main.py @@ -0,0 +1,2848 @@ +"""Wingman AI Skill to utalize uexcorp api for trade recommendations""" + +import copy +import difflib +import heapq +import itertools +import json +import logging +import math +from os import path +import collections +import re +from typing import Optional +from datetime import datetime +import requests +from api.enums import LogType, WingmanInitializationErrorType +from api.interface import ( + SettingsConfig, + SkillConfig, + WingmanConfig, + WingmanInitializationError, +) +from services.file import get_writable_dir +from skills.skill_base import Skill + + +class UEXCorp(Skill): + + # enable for verbose logging + DEV_MODE = False + + # TODO: retrieve this from an API + MANUFACTURERS = { + "AEGS": "Aegis Dynamics", + "ANVL": "Anvil Aerospace", + "AOPO": "Aopoa", + "ARGO": "ARGO Astronautics", + "BANU": "Banu", + "CNOU": "Consolidated Outland", + "CRUS": "Crusader Industries", + "DRAK": "Drake Interplanetary", + "ESPR": "Esperia", + "GATA": "Gatac", + "GRIN": "Greycat Industrial", + "KRIG": "Kruger Intergalactic", + "MIRA": "Mirai", + "MISC": "Musashi Industrial & Starflight Concern", + "ORIG": "Origin Jumpworks", + "RSIN": "Roberts Space Industries", + "TMBL": "Tumbril Land Systems", + "VNDL": "Vanduul", + } + + def __init__( + self, + config: SkillConfig, + wingman_config: WingmanConfig, + settings: SettingsConfig, + ) -> None: + super().__init__( + config=config, wingman_config=wingman_config, settings=settings + ) + + self.data_path = get_writable_dir(path.join("skills", "uexcorp", "data")) + self.logfileerror = path.join(self.data_path, "error.log") + self.logfiledebug = path.join(self.data_path, "debug.log") + self.cachefile = path.join(self.data_path, "cache.json") + logging.basicConfig(filename=self.logfileerror, level=logging.ERROR) + + self.uexcorp_version = "v11" + + # init of config options + self.uexcorp_api_url: str = None + self.uexcorp_api_key: str = None + self.uexcorp_api_timeout: int = None + self.uexcorp_cache: bool = None + self.uexcorp_cache_duration: int = None + self.uexcorp_summarize_routes_by_commodity: bool = None + self.uexcorp_tradestart_mandatory: bool = None + self.uexcorp_trade_blacklist = [] + self.uexcorp_default_trade_route_count: int = None + + # init of data lists + self.ships = [] + self.ship_names = [] + self.ship_dict = {} + self.ship_code_dict = {} + self.commodities = [] + self.commodity_names = [] + self.commodity_dict = {} + self.commodity_code_dict = {} + self.systems = [] + self.system_names = [] + self.system_dict = {} + self.system_code_dict = {} + self.tradeports = [] + self.tradeport_names = [] + self.tradeport_dict = {} + self.tradeport_code_dict = {} + self.tradeports_by_system = collections.defaultdict(list) + self.tradeports_by_planet = collections.defaultdict(list) + self.tradeports_by_satellite = collections.defaultdict(list) + self.tradeports_by_city = collections.defaultdict(list) + self.planets = [] + self.planet_names = [] + self.planet_dict = {} + self.planet_code_dict = {} + self.planets_by_system = collections.defaultdict(list) + self.satellites = [] + self.satellite_names = [] + self.satellite_dict = {} + self.satellite_code_dict = {} + self.satellites_by_planet = collections.defaultdict(list) + self.cities = [] + self.city_names = [] + self.city_dict = {} + self.city_code_dict = {} + self.cities_by_planet = collections.defaultdict(list) + + self.location_names_set = set() + + self.cache_enabled = True + self.cache = { + "function_args": {}, + "search_matches": {}, + "readable_objects": {}, + } + + self.dynamic_context = "" + + async def _print(self, message: str | dict, is_extensive: bool = False) -> None: + """ + Prints a message if debug mode is enabled. Will be sent to the server terminal, log file and client. + + Args: + message (str | dict): The message to be printed. + is_extensive (bool, optional): Whether the message is extensive. Defaults to False. + + Returns: + None + """ + if not is_extensive and self.settings.debug_mode: + await self.printr.print_async( + message, + color=LogType.INFO, + ) + elif self.DEV_MODE: + with open(self.logfiledebug, "a", encoding="UTF-8") as f: + f.write(f"#### Time: {datetime.now()} ####\n") + f.write(f"{message}\n\n") + + + def _log(self, message: str | dict, is_extensive: bool = False) -> None: + """ + Prints a debug message (synchronously) only on the server (and in the log file). + + Args: + message (str | dict): The message to be printed. + is_extensive (bool, optional): Whether the message is extensive. Defaults to False. + + Returns: + None + """ + if not is_extensive and self.settings.debug_mode: + self.printr.print( + message, + color=LogType.INFO, + server_only=True, + ) + elif self.DEV_MODE: + with open(self.logfiledebug, "a", encoding="UTF-8") as f: + f.write(f"#### Time: {datetime.now()} ####\n") + f.write(f"{message}\n\n") + + def _get_function_arg_from_cache( + self, arg_name: str, arg_value: str | int = None + ) -> str | int | None: + """ + Retrieves a function argument from the cache if available, otherwise returns the provided argument value. + + Args: + arg_name (str): The name of the function argument. + arg_value (str | int, optional): The default value for the argument. Defaults to None. + + Returns: + dict[str, any]: The cached value of the argument if available, otherwise the provided argument value. + """ + if not self.cache_enabled: + return arg_value + + if arg_value is None or ( + isinstance(arg_value, str) and arg_value.lower() == "current" + ): + cached_arg = self.cache["function_args"].get(arg_name) + if cached_arg is not None: + self._log( + f"'{arg_name}' was not given and got overwritten by cache: {cached_arg}" + ) + return cached_arg + + return arg_value + + def _set_function_arg_to_cache( + self, arg_name: str, arg_value: str | int | float = None + ) -> None: + """ + Sets the value of a function argument to the cache. + + Args: + arg_name (str): The name of the argument. + arg_value (str | int, optional): The value of the argument. Defaults to None. + """ + if not self.cache_enabled: + return + + function_args = self.cache["function_args"] + old_value = function_args.get(arg_name, "None") + + if arg_value is not None: + self._log( + f"Set function arg '{arg_name}' to cache. Previous value: {old_value} >> New value: {arg_value}", + True, + ) + function_args[arg_name] = arg_value + elif arg_name in function_args: + self._log( + f"Removing function arg '{arg_name}' from cache. Previous value: {old_value}", + True, + ) + function_args.pop(arg_name, None) + + async def validate(self) -> list[WingmanInitializationError]: + errors = await super().validate() + + self.uexcorp_api_key = await self.retrieve_secret( + "uexcorp", errors, "You can get one here: https://uexcorp.space/api.html" + ) + self.uexcorp_api_url = self.retrieve_custom_property_value( + "uexcorp_api_url", errors + ) + self.uexcorp_api_timeout = self.retrieve_custom_property_value( + "uexcorp_api_timeout", errors + ) + self.uexcorp_cache = self.retrieve_custom_property_value( + "uexcorp_cache", errors + ) + self.uexcorp_cache_duration = self.retrieve_custom_property_value( + "uexcorp_cache_duration", errors + ) + self.uexcorp_summarize_routes_by_commodity = ( + self.retrieve_custom_property_value( + "uexcorp_summarize_routes_by_commodity", errors + ) + ) + self.uexcorp_tradestart_mandatory = self.retrieve_custom_property_value( + "uexcorp_tradestart_mandatory", errors + ) + self.uexcorp_default_trade_route_count = self.retrieve_custom_property_value( + "uexcorp_default_trade_route_count", errors + ) + + trade_backlist_str: str = self.retrieve_custom_property_value( + "uexcorp_trade_blacklist", errors + ) + if trade_backlist_str: + try: + self.uexcorp_trade_blacklist = json.loads(trade_backlist_str) + except json.decoder.JSONDecodeError: + errors.append( + WingmanInitializationError( + wingman_name=self.name, + message="Invalid custom property 'uexcorp_trade_blacklist' in config. Value must be a valid JSON string.", + error_type=WingmanInitializationErrorType.INVALID_CONFIG, + ) + ) + + try: + await self._prepare_data() + except Exception as e: + errors.append( + WingmanInitializationError( + wingman_name=self.name, + message=f"Failed to load data: {e}", + error_type=WingmanInitializationErrorType.UNKNOWN, + ) + ) + + return errors + + async def _load_data(self, reload: bool = False) -> None: + """ + Load data for UEX corp wingman. + + Args: + reload (bool, optional): Whether to reload the data from the source. Defaults to False. + """ + + boo_tradeports_reloaded = False + + save_cache = False + # if cache is enabled and file is not too old, load from cache + if self.uexcorp_cache and not reload: + # check file age + data = {} + try: + with open(self.cachefile, "r", encoding="UTF-8") as f: + data = json.load(f) + except (FileNotFoundError, json.decoder.JSONDecodeError): + pass + + # check file age + if ( + data.get("timestamp") + and data.get("timestamp") + self.uexcorp_cache_duration + > self._get_timestamp() + and data.get("wingman_version") == self.uexcorp_version + ): + if data.get("ships"): + self.ships = data["ships"] + if data.get("commodities"): + self.commodities = data["commodities"] + if data.get("systems"): + self.systems = data["systems"] + if data.get("tradeports"): + self.tradeports = data["tradeports"] + boo_tradeports_reloaded = True + if data.get("planets"): + self.planets = data["planets"] + if data.get("satellites"): + self.satellites = data["satellites"] + if data.get("cities"): + self.cities = data["cities"] + else: + save_cache = True + + if not self.ships: + self.ships = await self._fetch_uex_data("ships") + self.ships = [ship for ship in self.ships if ship["implemented"] == "1"] + + if not self.commodities: + self.commodities = await self._fetch_uex_data("commodities") + + if not self.systems: + self.systems = await self._fetch_uex_data("star_systems") + self.systems = [ + system for system in self.systems if system["available"] == 1 + ] + for system in self.systems: + self.tradeports += await self._fetch_uex_data( + f"tradeports/system/{system['code']}" + ) + self.cities += await self._fetch_uex_data( + f"cities/system/{system['code']}" + ) + self.satellites += await self._fetch_uex_data( + f"satellites/system/{system['code']}" + ) + self.planets += await self._fetch_uex_data( + f"planets/system/{system['code']}" + ) + + self.tradeports = [ + tradeport + for tradeport in self.tradeports + if tradeport["visible"] == "1" + ] + boo_tradeports_reloaded = True + + self.planets = [ + planet for planet in self.planets if planet["available"] == 1 + ] + + self.satellites = [ + satellite + for satellite in self.satellites + if satellite["available"] == 1 + ] + else: + if reload: + self.tradeports = [] + for system in self.systems: + self.tradeports += await self._fetch_uex_data( + f"tradeports/system/{system['code']}" + ) + + self.tradeports = [ + tradeport + for tradeport in self.tradeports + if tradeport["visible"] == "1" + ] + boo_tradeports_reloaded = True + save_cache = True + + if ( + save_cache + and self.uexcorp_cache + and self.uexcorp_cache_duration > 0 + and self.ships + and self.commodities + and self.systems + and self.tradeports + and self.planets + and self.satellites + and self.cities + ): + data = { + "timestamp": self._get_timestamp(), + "wingman_version": self.uexcorp_version, + "ships": self.ships, + "commodities": self.commodities, + "systems": self.systems, + "tradeports": self.tradeports, + "planets": self.planets, + "satellites": self.satellites, + "cities": self.cities, + } + with open(self.cachefile, "w", encoding="UTF-8") as f: + json.dump(data, f, indent=4) + + # data manipulation + # remove planet information from space tradeports + if boo_tradeports_reloaded is True: + planet_codes = [] + for planet in self.planets: + if planet["code"] not in planet_codes: + planet_codes.append(planet["code"]) + + for tradeport in self.tradeports: + shorted_name = tradeport["name"].split(" ")[0] + if ( + tradeport["space"] == "1" + and len(shorted_name.split("-")) == 2 + and shorted_name.split("-")[0] in planet_codes + and re.match(r"^L\d+$", shorted_name.split("-")[1]) + ): + tradeport["planet"] = "" + # remove urls from ships and resolve manufacturer code + for ship in self.ships: + ship.pop("store_url", None) + ship.pop("photos", None) + ship.pop("brochure_url", None) + ship.pop("hotsite_url", None) + ship.pop("video_url", None) + # remove unavailable cities + self.cities = [city for city in self.cities if city["available"] == 1] + # add hull trading option to trade ports + tradeports_for_hull_trading = [ + "Baijini Point", + "Everus Harbor", + "Magnus Gateway", + "Pyro Gateway", + "Seraphim Station", + "Terra Gateway", + "Port Tressler", + ] + for tradeport in self.tradeports: + tradeport["hull_trading"] = tradeport["name"] in tradeports_for_hull_trading + # add hull trading option to ships + ships_for_hull_trading = ["Hull C"] + for ship in self.ships: + ship["hull_trading"] = ship["name"] in ships_for_hull_trading + + async def _prepare_data(self) -> None: + """ + Prepares the wingman for execution by initializing necessary variables and loading data. + + This method retrieves configuration values, sets up API URL and timeout, and loads data + such as ship names, commodity names, system names, tradeport names, city names, + satellite names and planet names. + It also adds additional context information for function parameters. + + Returns: + None + """ + # self.start_execution_benchmark() + await self._load_data() + + self.ship_names = [ + self._format_ship_name(ship) + for ship in self.ships + if ship["implemented"] == "1" + ] + self.ship_dict = { + self._format_ship_name(ship).lower(): ship for ship in self.ships + } + self.ship_code_dict = {ship["code"].lower(): ship for ship in self.ships} + + self.commodity_names = [ + self._format_commodity_name(commodity) for commodity in self.commodities + ] + self.commodity_dict = { + self._format_commodity_name(commodity).lower(): commodity + for commodity in self.commodities + } + self.commodity_code_dict = { + commodity["code"].lower(): commodity for commodity in self.commodities + } + + self.system_names = [ + self._format_system_name(system) for system in self.systems + ] + self.system_dict = { + self._format_system_name(system).lower(): system for system in self.systems + } + self.system_code_dict = { + system["code"].lower(): system for system in self.systems + } + + self.tradeport_names = [ + self._format_tradeport_name(tradeport) for tradeport in self.tradeports + ] + self.tradeport_dict = { + self._format_tradeport_name(tradeport).lower(): tradeport + for tradeport in self.tradeports + } + self.tradeport_code_dict = { + tradeport["code"].lower(): tradeport for tradeport in self.tradeports + } + for tradeport in self.tradeports: + if tradeport["system"]: + self.tradeports_by_system[tradeport["system"].lower()].append(tradeport) + if tradeport["planet"]: + self.tradeports_by_planet[tradeport["planet"].lower()].append(tradeport) + if tradeport["satellite"]: + self.tradeports_by_satellite[tradeport["satellite"].lower()].append( + tradeport + ) + if tradeport["city"]: + self.tradeports_by_city[tradeport["city"].lower()].append(tradeport) + + self.city_names = [self._format_city_name(city) for city in self.cities] + self.city_dict = { + self._format_city_name(city).lower(): city for city in self.cities + } + self.city_code_dict = {city["code"].lower(): city for city in self.cities} + for city in self.cities: + self.cities_by_planet[city["planet"].lower()].append(city) + + self.satellite_names = [ + self._format_satellite_name(satellite) for satellite in self.satellites + ] + self.satellite_dict = { + self._format_satellite_name(satellite).lower(): satellite + for satellite in self.satellites + } + self.satellite_code_dict = { + satellite["code"].lower(): satellite for satellite in self.satellites + } + for satellite in self.satellites: + self.satellites_by_planet[satellite["planet"].lower()].append(satellite) + + self.planet_names = [ + self._format_planet_name(planet) for planet in self.planets + ] + self.planet_dict = { + self._format_planet_name(planet).lower(): planet for planet in self.planets + } + self.planet_code_dict = { + planet["code"].lower(): planet for planet in self.planets + } + for planet in self.planets: + self.planets_by_system[planet["system"].lower()].append(planet) + + self.location_names_set = set( + self.system_names + + self.tradeport_names + + self.city_names + + self.satellite_names + + self.planet_names + ) + + def _add_context(self, content: str): + """ + Adds additional context to the first message content, + that represents the context given to open ai. + + Args: + content (str): The additional context to be added. + + Returns: + None + """ + self.dynamic_context += "\n" + content + + def _get_timestamp(self) -> int: + """ + Get the current timestamp as an integer. + + Returns: + int: The current timestamp. + """ + return int(datetime.now().timestamp()) + + def _get_header(self): + """ + Returns the header dictionary containing the API key. + Used for API requests. + + Returns: + dict: The header dictionary with the API key. + """ + key = self.uexcorp_api_key + return {"api_key": key} + + async def _fetch_uex_data( + self, endpoint: str, params: Optional[dict[str, any]] = None + ) -> list[dict[str, any]]: + """ + Fetches data from the specified endpoint. + + Args: + endpoint (str): The API endpoint to fetch data from. + params (Optional[dict[str, any]]): Optional parameters to include in the request. + + Returns: + list[dict[str, any]]: The fetched data as a list of dictionaries. + """ + url = f"{self.uexcorp_api_url}/{endpoint}" + + try: + response = requests.get( + url, + params=params, + timeout=self.uexcorp_api_timeout, + headers=self._get_header(), + ) + response.raise_for_status() + except requests.exceptions.RequestException as e: + await self._print(f"Error while retrieving data from {url}: {e}") + return [] + + # if self.config.debug_mode: + # self.print_execution_time(reset_timer=True) + + response_json = response.json() + if "status" not in response_json or response_json["status"] != "ok": + await self._print(f"Error while retrieving data from {url}") + return [] + + return response_json.get("data", []) + + def _format_ship_name(self, ship: dict[str, any]) -> str: + """ + Formats the name of a ship. + This represents a list of names that can be used by the player. + So if you like to use manufacturer names + ship names, do it here. + + Args: + ship (dict[str, any]): The ship dictionary containing the ship details. + + Returns: + str: The formatted ship name. + """ + return ship["name"] + + def _format_tradeport_name(self, tradeport: dict[str, any]) -> str: + """ + Formats the name of a tradeport. + + Args: + tradeport (dict[str, any]): The tradeport dictionary containing the name. + + Returns: + str: The formatted tradeport name. + """ + return tradeport["name"] + + def _format_city_name(self, city: dict[str, any]) -> str: + """ + Formats the name of a city. + + Args: + city (dict[str, any]): A dictionary representing a city. + + Returns: + str: The formatted name of the city. + """ + return city["name"] + + def _format_planet_name(self, planet: dict[str, any]) -> str: + """ + Formats the name of a planet. + + Args: + planet (dict[str, any]): A dictionary representing a planet. + + Returns: + str: The formatted name of the planet. + """ + return planet["name"] + + def _format_satellite_name(self, satellite: dict[str, any]) -> str: + """ + Formats the name of a satellite. + + Args: + satellite (dict[str, any]): The satellite dictionary. + + Returns: + str: The formatted satellite name. + """ + return satellite["name"] + + def _format_system_name(self, system: dict[str, any]) -> str: + """ + Formats the name of a system. + + Args: + system (dict[str, any]): The system dictionary containing the name. + + Returns: + str: The formatted system name. + """ + return system["name"] + + def _format_commodity_name(self, commodity: dict[str, any]) -> str: + """ + Formats the name of a commodity. + + Args: + commodity (dict[str, any]): The commodity dictionary. + + Returns: + str: The formatted commodity name. + """ + return commodity["name"] + + def get_tools(self) -> list[tuple[str, dict]]: + tools = [ + ( + "get_trading_routes", + { + "type": "function", + "function": { + "name": "get_trading_routes", + "description": "Finds all possible commodity trade options and gives back a selection of the best trade routes. Needs ship name and start position.", + "parameters": { + "type": "object", + "properties": { + "ship_name": {"type": "string"}, + "position_start_name": {"type": "string"}, + "money_to_spend": {"type": "number"}, + "position_end_name": {"type": "string"}, + "free_cargo_space": {"type": "number"}, + "commodity_name": {"type": "string"}, + "illegal_commodities_allowed": {"type": "boolean"}, + "maximal_number_of_routes": {"type": "number"}, + }, + "required": [], + "optional": ( + [ + "ship_name", + "position_start_name", + "money_to_spend", + "free_cargo_space", + "position_end_name", + "commodity_name", + "illegal_commodities_allowed", + "maximal_number_of_routes", + ] + ), + }, + }, + }, + ), + ( + "get_locations_to_sell_to", + { + "type": "function", + "function": { + "name": "get_locations_to_sell_to", + "description": "Finds the best locations at what the player can sell cargo at. Only give position_name if the player specifically wanted to filter for it. Needs commodity name.", + "parameters": { + "type": "object", + "properties": { + "commodity_name": {"type": "string"}, + "ship_name": {"type": "string"}, + "position_name": {"type": "string"}, + "commodity_amount": {"type": "number"}, + "maximal_number_of_locations": {"type": "number"}, + }, + "required": ["commodity_name"], + "optional": [ + "ship_name", + "position_name", + "commodity_amount", + "maximal_number_of_locations", + ], + }, + }, + }, + ), + ( + "get_locations_to_buy_from", + { + "type": "function", + "function": { + "name": "get_locations_to_buy_from", + "description": "Finds the best locations at what the player can buy cargo at. Only give position_name if the player specifically wanted to filter for it. Needs commodity name.", + "parameters": { + "type": "object", + "properties": { + "commodity_name": {"type": "string"}, + "ship_name": {"type": "string"}, + "position_name": {"type": "string"}, + "commodity_amount": {"type": "number"}, + "maximal_number_of_locations": {"type": "number"}, + }, + "required": ["commodity_name"], + "optional": [ + "ship_name", + "position_name", + "commodity_amount", + "maximal_number_of_locations", + ], + }, + }, + }, + ), + ( + "get_location_information", + { + "type": "function", + "function": { + "name": "get_location_information", + "description": "Gives information and commodity prices of this location. Execute this if the player asks for all buy or sell options for a specific location.", + "parameters": { + "type": "object", + "properties": { + "location_name": {"type": "string"}, + }, + "required": ["location_name"], + }, + }, + }, + ), + ( + "get_ship_information", + { + "type": "function", + "function": { + "name": "get_ship_information", + "description": "Gives information about the given ship. If a player asks to rent something or buy a ship, this function needs to be executed.", + "parameters": { + "type": "object", + "properties": { + "ship_name": {"type": "string"}, + }, + "required": ["ship_name"], + }, + }, + }, + ), + ( + "get_ship_comparison", + { + "type": "function", + "function": { + "name": "get_ship_comparison", + "description": "Gives information about given ships. Also execute this function if the player asks for a ship information on multiple ships or a model series.", + "parameters": { + "type": "object", + "properties": { + "ship_names": { + "type": "array", + "items": {"type": "string"}, + }, + }, + "required": ["ship_names"], + }, + }, + }, + ), + ( + "get_commodity_information", + { + "type": "function", + "function": { + "name": "get_commodity_information", + "description": "Gives information about the given commodity. If a player asks for information about a commodity, this function needs to be executed.", + "parameters": { + "type": "object", + "properties": { + "commodity_name": {"type": "string"}, + }, + "required": ["commodity_name"], + }, + }, + }, + ), + ( + "get_commodity_prices_and_tradeports", + { + "type": "function", + "function": { + "name": "get_commodity_prices_and_tradeports", + "description": "Gives information about the given commodity and its buy and sell offers. If a player asks for buy and sell information or locations on a commodity, this function needs to be executed.", + "parameters": { + "type": "object", + "properties": { + "commodity_name": {"type": "string"}, + }, + "required": ["commodity_name"], + }, + }, + }, + ), + ( + "reload_current_commodity_prices", + { + "type": "function", + "function": { + "name": "reload_current_commodity_prices", + "description": "Reloads the current commodity prices from UEX corp.", + "parameters": { + "type": "object", + "properties": {}, + "required": [], + }, + }, + }, + ), + ] + + if self.DEV_MODE: + tools.append( + ( + "show_cached_function_values", + { + "type": "function", + "function": { + "name": "show_cached_function_values", + "description": "Prints the cached function's argument values to the console.", + "parameters": { + "type": "object", + "properties": {}, + "required": [], + }, + }, + }, + ), + ) + + return tools + + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] + ) -> tuple[str, str]: + function_response = "" + instant_response = "" + + functions = { + "get_trading_routes": "get_trading_routes", + "get_locations_to_sell_to": "get_locations_to_sell_to", + "get_locations_to_buy_from": "get_locations_to_buy_from", + "get_location_information": "get_location_information", + "get_ship_information": "get_ship_information", + "get_ship_comparison": "get_ship_comparison", + "get_commodity_information": "get_commodity_information", + "get_commodity_prices_and_tradeports": "get_commodity_information", + "reload_current_commodity_prices": "reload_current_commodity_prices", + "show_cached_function_values": "show_cached_function_values", + } + + try: + if tool_name in functions: + self.start_execution_benchmark() + await self._print(f"Executing function: {tool_name}") + function = getattr(self, "_gpt_call_" + functions[tool_name]) + function_response = await function(**parameters) + if self.settings.debug_mode: + await self.print_execution_time() + except Exception as e: + logging.error(e, exc_info=True) + file_object = open(self.logfileerror, "a", encoding="UTF-8") + file_object.write( + "========================================================================================\n" + ) + file_object.write( + f"Above error while executing custom function: _gpt_call_{tool_name}\n" + ) + file_object.write(f"With parameters: {parameters}\n") + file_object.write(f"On date: {datetime.now()}\n") + file_object.write(f"Version: {self.uexcorp_version}\n") + file_object.write( + "========================================================================================\n" + ) + file_object.close() + await self._print( + f"Error while executing custom function: {tool_name}\nCheck log file for more details." + ) + function_response = f"Error while executing custom function: {tool_name}" + function_response += "\nTell user there seems to be an error. And you must say that it should be report to the 'uexcorp wingman developers'." + + return function_response, instant_response + + async def _find_closest_match( + self, search: str | None, lst: list[str] | set[str] + ) -> str | None: + """ + Finds the closest match to a given string in a list. + Or returns an exact match if found. + If it is not an exact match, OpenAI is used to find the closest match. + + Args: + search (str): The search to find a match for. + lst (list): The list of strings to search for a match. + + Returns: + str or None: The closest match found in the list, or None if no match is found. + """ + if search is None or search == "None": + return None + + self._log(f"Searching for closest match to '{search}' in list.", True) + + checksum = f"{hash(frozenset(lst))}-{hash(search)}" + if checksum in self.cache["search_matches"]: + match = self.cache["search_matches"][checksum] + self._log(f"Found closest match to '{search}' in cache: '{match}'", True) + return match + + if search in lst: + self._log(f"Found exact match to '{search}' in list.", True) + return search + + # make a list of possible matches + closest_matches = difflib.get_close_matches(search, lst, n=10, cutoff=0.4) + closest_matches.extend(item for item in lst if search.lower() in item.lower()) + self._log( + f"Making a list for closest matches for search term '{search}': {', '.join(closest_matches)}", + True, + ) + + if not closest_matches: + self._log( + f"No closest match found for '{search}' in list. Returning None.", True + ) + return None + + messages = [ + { + "role": "system", + "content": f""" + I'll give you just a string value. + You will figure out, what value in this list represents this value best: {', '.join(closest_matches)} + Keep in mind that the given string value can be misspelled or has missing words as it has its origin in a speech to text process. + You must only return the value of the closest match to the given value from the defined list, nothing else. + For example if "Hercules A2" is given and the list contains of "A2, C2, M2", you will return "A2" as string. + Or if "C2" is given and the list contains of "A2 Hercules Star Lifter, C2 Monster Truck, M2 Extreme cool ship", you will return "C2 Monster Truck" as string. + On longer search terms, prefer the exact match, if it is in the list. + The response must not contain anything else, than the exact value of the closest match from the list. + If you can't find a match, return 'None'. Do never return the given search value. + """, + }, + { + "role": "user", + "content": search, + }, + ] + completion = await self.gpt_call(messages) + answer = ( + completion.choices[0].message.content + if completion and completion.choices + else "" + ) + + if not answer: + dumb_match = difflib.get_close_matches( + search, closest_matches, n=1, cutoff=0.9 + ) + if dumb_match: + self._log( + f"OpenAI did not answer for '{search}'. Returning dumb match '{dumb_match}'", + True, + ) + return dumb_match[0] + else: + self._log( + f"OpenAI did not answer for '{search}' and dumb match not possible. Returning None.", + True, + ) + return None + + self._log(f"OpenAI answered: '{answer}'", True) + + if answer == "None" or answer not in closest_matches: + self._log( + f"No closest match found for '{search}' in list. Returning None.", True + ) + return None + + self._log(f"Found closest match to '{search}' in list: '{answer}'", True) + self._add_context(f"\n\nInstead of '{search}', you should use '{answer}'.") + self.cache["search_matches"][checksum] = answer + return answer + + async def get_prompt(self) -> str | None: + """Return additional context.""" + additional_context = self.config.prompt or "" + additional_context += "\n" + self.dynamic_context + return additional_context + + async def _gpt_call_show_cached_function_values(self) -> str: + """ + Prints the cached function's argument values to the console. + + Returns: + str: A message indicating that the cached function's argument values have been printed to the console. + """ + self._log(self.cache["function_args"], True) + return "The cached function values are: \n" + json.dumps(self.cache["function_args"]) + + async def _gpt_call_reload_current_commodity_prices(self) -> str: + """ + Reloads the current commodity prices from UEX corp. + + Returns: + str: A message indicating that the current commodity prices have been reloaded. + """ + await self._load_data(reload=True) + # clear cached data + for key in self.cache: + self.cache[key] = {} + + self._log("Reloaded current commodity prices from UEX corp.", True) + return "Reloaded current commodity prices from UEX corp." + + async def _gpt_call_get_commodity_information( + self, commodity_name: str = None + ) -> str: + """ + Retrieves information about a given commodity. + + Args: + commodity_name (str, optional): The name of the commodity. Defaults to None. + + Returns: + str: The information about the commodity in JSON format, or an error message if the commodity is not found. + """ + self._log(f"Parameters: Commodity: {commodity_name}", True) + + commodity_name = self._get_function_arg_from_cache( + "commodity_name", commodity_name + ) + + if commodity_name is None: + self._log("No commodity given. Ask for a commodity.", True) + return "No commodity given. Ask for a commodity." + + misunderstood = [] + closest_match = await self._find_closest_match( + commodity_name, self.commodity_names + ) + if closest_match is None: + misunderstood.append(f"Commodity: {commodity_name}") + else: + commodity_name = closest_match + + self._log(f"Interpreted Parameters: Commodity: {commodity_name}", True) + + if misunderstood: + misunderstood_str = ", ".join(misunderstood) + self._log( + f"These given parameters do not exist in game. Exactly ask for clarification of these values: {misunderstood_str}", + True, + ) + return f"These given parameters do not exist in game. Exactly ask for clarification of these values: {misunderstood_str}" + + commodity = self._get_commodity_by_name(commodity_name) + if commodity is not None: + output_commodity = self._get_converted_commodity_for_output(commodity) + self._log(output_commodity, True) + return json.dumps(output_commodity) + + async def _gpt_call_get_ship_information(self, ship_name: str = None) -> str: + """ + Retrieves information about a specific ship. + + Args: + ship_name (str, optional): The name of the ship. Defaults to None. + + Returns: + str: The ship information or an error message. + + """ + self._log(f"Parameters: Ship: {ship_name}", True) + + ship_name = self._get_function_arg_from_cache("ship_name", ship_name) + + if ship_name is None: + self._log("No ship given. Ask for a ship. Dont say sorry.", True) + return "No ship given. Ask for a ship. Dont say sorry." + + misunderstood = [] + closest_match = await self._find_closest_match(ship_name, self.ship_names) + if closest_match is None: + misunderstood.append(f"Ship: {ship_name}") + else: + ship_name = closest_match + + self._log(f"Interpreted Parameters: Ship: {ship_name}", True) + + if misunderstood: + misunderstood_str = ", ".join(misunderstood) + self._log( + f"These given parameters do not exist in game. Exactly ask for clarification of these values: {misunderstood_str}", + True, + ) + return f"These given parameters do not exist in game. Exactly ask for clarification of these values: {misunderstood_str}" + + ship = self._get_ship_by_name(ship_name) + if ship is not None: + output_ship = self._get_converted_ship_for_output(ship) + self._log(output_ship, True) + return json.dumps(output_ship) + + async def _gpt_call_get_ship_comparison(self, ship_names: list[str] = None) -> str: + """ + Retrieves information about multiple ships. + + Args: + ship_names (list[str], optional): The names of the ships. Defaults to None. + + Returns: + str: The ship information or an error message. + """ + self._log(f"Parameters: Ships: {', '.join(ship_names)}", True) + + if ship_names is None or not ship_names: + self._log("No ship given. Ask for a ship. Dont say sorry.", True) + return "No ship given. Ask for a ship. Dont say sorry." + + misunderstood = [] + ships = [] + for ship_name in ship_names: + closest_match = await self._find_closest_match(ship_name, self.ship_names) + if closest_match is None: + misunderstood.append(ship_name) + else: + ship_name = closest_match + ships.append(self._get_ship_by_name(ship_name)) + + self._log(f"Interpreted Parameters: Ships: {', '.join(ship_names)}", True) + + if misunderstood: + self._log( + f"These ship names do not exist in game. Exactly ask for clarification of these ships: {', '.join(misunderstood)}", + True, + ) + return f"These ship names do not exist in game. Exactly ask for clarification of these ships: {', '.join(misunderstood)}" + + output = {} + for ship in ships: + output[self._format_ship_name(ship)] = self._get_converted_ship_for_output( + ship + ) + + output = ( + "Point out differences between these ships but keep it short, like 4-5 sentences, and dont mention something both cant do, like getting rented:\n" + + json.dumps(output) + ) + self._log(output, True) + return output + + async def _gpt_call_get_location_information( + self, location_name: str = None + ) -> str: + """ + Retrieves information about a given location. + + Args: + location_name (str, optional): The name of the location. Defaults to None. + + Returns: + str: The information about the location in JSON format, or an error message if the location is not found. + """ + self._log(f"Parameters: Location: {location_name}", True) + + location_name = self._get_function_arg_from_cache( + "location_name", location_name + ) + + if location_name is None: + self._log("No location given. Ask for a location.", True) + return "No location given. Ask for a location." + + misunderstood = [] + closest_match = await self._find_closest_match( + location_name, self.location_names_set + ) + if closest_match is None: + misunderstood.append(f"Location: {location_name}") + else: + location_name = closest_match + + self._log(f"Interpreted Parameters: Location: {location_name}", True) + + if misunderstood: + misunderstood_str = ", ".join(misunderstood) + self._log( + f"These given parameters do not exist in game. Exactly ask for clarification of these values: {misunderstood_str}", + True, + ) + return f"These given parameters do not exist in game. Exactly ask for clarification of these values: {misunderstood_str}" + + # get a clone of the data + tradeport = self._get_tradeport_by_name(location_name) + if tradeport is not None: + output = self._get_converted_tradeport_for_output(tradeport) + self._log(output, True) + return json.dumps(output) + city = self._get_city_by_name(location_name) + if city is not None: + output = self._get_converted_city_for_output(city) + self._log(output, True) + return json.dumps(output) + satellite = self._get_satellite_by_name(location_name) + if satellite is not None: + output = self._get_converted_satellite_for_output(satellite) + self._log(output, True) + return json.dumps(output) + planet = self._get_planet_by_name(location_name) + if planet is not None: + output = self._get_converted_planet_for_output(planet) + self._log(output, True) + return json.dumps(output) + system = self._get_system_by_name(location_name) + if system is not None: + output = self._get_converted_system_for_output(system) + self._log(output, True) + return json.dumps(output) + + def _get_converted_tradeport_for_output( + self, tradeport: dict[str, any] + ) -> dict[str, any]: + """ + Converts a tradeport dictionary to a dictionary that can be used as output. + + Args: + tradeport (dict[str, any]): The tradeport dictionary to be converted. + + Returns: + dict[str, any]: The converted tradeport dictionary. + """ + checksum = f"tradeport--{tradeport['code']}" + if checksum in self.cache["readable_objects"]: + return self.cache["readable_objects"][checksum] + + tradeport = copy.deepcopy(tradeport) + deletable_keys = [ + "code", + "name_short", + "space", + "visible", + "prices", + "date_modified", + "date_added", + ] + tradeport["type"] = "Tradeport" + tradeport["system"] = self._get_system_name_by_code(tradeport["system"]) + tradeport["planet"] = self._get_planet_name_by_code(tradeport["planet"]) + tradeport["city"] = self._get_city_name_by_code(tradeport["city"]) + tradeport["satellite"] = self._get_satellite_name_by_code( + tradeport["satellite"] + ) + tradeport["hull_trading"] = ( + "Trading with MISC Hull C is possible." + if tradeport["hull_trading"] + else "Trading with MISC Hull C is not possible." + ) + + if "prices" in tradeport: + buyable_commodities = [ + f"{data['name']} for {data['price_buy']} aUEC per SCU" + for commodity_code, data in tradeport["prices"].items() + if data["operation"] == "buy" + ] + sellable_commodities = [ + f"{data['name']} for {data['price_sell']} aUEC per SCU" + for commodity_code, data in tradeport["prices"].items() + if data["operation"] == "sell" + ] + else: + buyable_commodities = [] + sellable_commodities = [] + + if len(buyable_commodities): + tradeport["buyable_commodities"] = ", ".join(buyable_commodities) + if len(sellable_commodities): + tradeport["sellable_commodities"] = ", ".join(sellable_commodities) + + for key in ["system", "planet", "city", "satellite"]: + if tradeport.get(key) is None: + deletable_keys.append(key) + + for key in deletable_keys: + tradeport.pop(key, None) + + self.cache["readable_objects"][checksum] = tradeport + return tradeport + + def _get_converted_city_for_output(self, city: dict[str, any]) -> dict[str, any]: + """ + Converts a city dictionary to a dictionary that can be used as output. + + Args: + city (dict[str, any]): The city dictionary to be converted. + + Returns: + dict[str, any]: The converted city dictionary. + """ + checksum = f"city--{city['code']}" + if checksum in self.cache["readable_objects"]: + return self.cache["readable_objects"][checksum] + + city = copy.deepcopy(city) + deletable_keys = [ + "code", + "available", + "date_added", + "date_modified", + ] + city["type"] = "City" + city["system"] = self._get_system_name_by_code(city["system"]) + city["planet"] = self._get_planet_name_by_code(city["planet"]) + tradeports = self._get_tradeports_by_position_name(city["name"], True) + + if tradeports: + city["options_to_trade"] = ", ".join( + [self._format_tradeport_name(tradeport) for tradeport in tradeports] + ) + + for key in deletable_keys: + city.pop(key, None) + + self.cache["readable_objects"][checksum] = city + return city + + def _get_converted_satellite_for_output( + self, satellite: dict[str, any] + ) -> dict[str, any]: + """ + Converts a satellite dictionary to a dictionary that can be used as output. + + Args: + satellite (dict[str, any]): The satellite dictionary to be converted. + + Returns: + dict[str, any]: The converted satellite dictionary. + """ + checksum = f"satellite--{satellite['code']}" + if checksum in self.cache["readable_objects"]: + return self.cache["readable_objects"][checksum] + + satellite = copy.deepcopy(satellite) + deletable_keys = [ + "code", + "available", + "date_added", + "date_modified", + ] + satellite["type"] = "Satellite" + satellite["system"] = self._get_system_name_by_code(satellite["system"]) + satellite["planet"] = self._get_planet_name_by_code(satellite["planet"]) + tradeports = self._get_tradeports_by_position_name(satellite["name"], True) + + if tradeports: + satellite["options_to_trade"] = ", ".join( + [self._format_tradeport_name(tradeport) for tradeport in tradeports] + ) + + for key in deletable_keys: + satellite.pop(key, None) + + self.cache["readable_objects"][checksum] = satellite + return satellite + + def _get_converted_planet_for_output( + self, planet: dict[str, any] + ) -> dict[str, any]: + """ + Converts a planet dictionary to a dictionary that can be used as output. + + Args: + planet (dict[str, any]): The planet dictionary to be converted. + + Returns: + dict[str, any]: The converted planet dictionary. + """ + checksum = f"planet--{planet['code']}" + if checksum in self.cache["readable_objects"]: + return self.cache["readable_objects"][checksum] + + planet = copy.deepcopy(planet) + deletable_keys = [ + "code", + "available", + "date_added", + "date_modified", + ] + planet["type"] = "Planet" + planet["system"] = self._get_system_name_by_code(planet["system"]) + tradeports = self._get_tradeports_by_position_name(planet["name"], True) + + if tradeports: + planet["options_to_trade"] = ", ".join( + [self._format_tradeport_name(tradeport) for tradeport in tradeports] + ) + + satellites = self._get_satellites_by_planetcode(planet["code"]) + if satellites: + planet["satellites"] = ", ".join( + [self._format_satellite_name(satellite) for satellite in satellites] + ) + + cities = self._get_cities_by_planetcode(planet["code"]) + if cities: + planet["cities"] = ", ".join( + [self._format_city_name(city) for city in cities] + ) + + for key in deletable_keys: + planet.pop(key, None) + + self.cache["readable_objects"][checksum] = planet + return planet + + def _get_converted_system_for_output( + self, system: dict[str, any] + ) -> dict[str, any]: + """ + Converts a system dictionary to a dictionary that can be used as output. + + Args: + system (dict[str, any]): The system dictionary to be converted. + + Returns: + dict[str, any]: The converted system dictionary. + """ + checksum = f"system--{system['code']}" + if checksum in self.cache["readable_objects"]: + return self.cache["readable_objects"][checksum] + + system = copy.deepcopy(system) + deletable_keys = [ + "code", + "available", + "default", + "date_added", + "date_modified", + ] + system["type"] = "System" + tradeports = self._get_tradeports_by_position_name(system["name"], True) + + if tradeports: + system["options_to_trade"] = ", ".join( + [self._format_tradeport_name(tradeport) for tradeport in tradeports] + ) + + planets = self._get_planets_by_systemcode(system["code"]) + if planets: + system["planets"] = ", ".join( + [self._format_planet_name(planet) for planet in planets] + ) + + for key in deletable_keys: + system.pop(key, None) + + self.cache["readable_objects"][checksum] = system + return system + + def _get_converted_ship_for_output(self, ship: dict[str, any]) -> dict[str, any]: + """ + Converts a ship dictionary to a dictionary that can be used as output. + + Args: + ship (dict[str, any]): The ship dictionary to be converted. + + Returns: + dict[str, any]: The converted ship dictionary. + """ + checksum = f"ship--{ship['code']}" + if checksum in self.cache["readable_objects"]: + return self.cache["readable_objects"][checksum] + + ship = copy.deepcopy(ship) + deletable_keys = [ + "code", + "loaner", + "scu", + "implemented", + "mining", + "stealth", + "price", + "price_warbond", + "price_pkg", + "sell_active", + "sell_active_warbond", + "sell_active_pkg", + "stock_qt_speed", + "showdown_winner", + "date_added", + "date_modified", + "hull_trading", + ] + ship["type"] = "Ship" + ship["manufacturer"] = ( + self.MANUFACTURERS[ship["manufacturer"]] + if ship["manufacturer"] in self.MANUFACTURERS + else ship["manufacturer"] + ) + ship["cargo"] = f"{ship['scu']} SCU" + ship["price_USD"] = f"{ship['price']}" + + ship["buy_at"] = ( + "This ship cannot be bought." + if not ship["buy_at"] + else [ + self._get_converted_rent_and_buy_option_for_output(position) + for position in ship["buy_at"] + ] + ) + + ship["rent_at"] = ( + "This ship cannot be rented." + if not ship["rent_at"] + else [ + self._get_converted_rent_and_buy_option_for_output(position) + for position in ship["rent_at"] + ] + ) + + if ship["hull_trading"] is True: + ship["trading_info"] = ( + "This ship can only trade on suitable space stations with a cargo deck." + ) + + for key in deletable_keys: + ship.pop(key, None) + + self.cache["readable_objects"][checksum] = ship + return ship + + def _get_converted_rent_and_buy_option_for_output( + self, position: dict[str, any] + ) -> dict[str, any]: + """ + Converts a rent/buy option dictionary to a dictionary that can be used as output. + + Args: + position (dict[str, any]): The rent/buy option dictionary to be converted. + + Returns: + dict[str, any]: The converted rent/buy option dictionary. + """ + position = copy.deepcopy(position) + keys = ["system", "planet", "satellite", "city", "store"] + if not position["tradeport"]: + for key in keys: + if not position[key]: + position.pop(key, None) + else: + position[key] = position[f"{key}_name"] + position.pop("tradeport", None) + else: + tradeport = self._get_tradeport_by_code(position["tradeport"]) + position["tradeport"] = self._format_tradeport_name(tradeport) + for key in keys: + function_name = f"_get_{key}_name_by_code" + if function_name in dir(self): + name = getattr(self, function_name)(tradeport[key]) + if name: + position[key] = name + else: + position.pop(key, None) + position["store"] = ( + "Refinery" # TODO: remove this when refinery is implemented + ) + position["price"] = f"{position['price']} aUEC" + + keys_to_remove = [ + "tradeport_name", + "system_name", + "planet_name", + "satellite_name", + "tradeport_name", + "city_name", + "store_name", + "date_added", + "date_modified", + ] + for key in keys_to_remove: + position.pop(key, None) + + return position + + def _get_converted_commodity_for_output( + self, commodity: dict[str, any] + ) -> dict[str, any]: + """ + Converts a commodity dictionary to a dictionary that can be used as output. + + Args: + commodity (dict[str, any]): The commodity dictionary to be converted. + + Returns: + dict[str, any]: The converted commodity dictionary. + """ + checksum = f"commodity--{commodity['code']}" + if checksum in self.cache["readable_objects"]: + return self.cache["readable_objects"][checksum] + + commodity = copy.deepcopy(commodity) + commodity["notes"] = "" + deletable_keys = [ + "code", + "tradable", + "temporary", + "restricted", + "raw", + "available", + "date_added", + "date_modified", + "trade_price_buy", + "trade_price_sell", + ] + + price_buy_best = None + price_sell_best = None + commodity["buy_options"] = {} + commodity["sell_options"] = {} + + for tradeport in self.tradeports: + if "prices" not in tradeport: + continue + if commodity["code"] in tradeport["prices"]: + if tradeport["prices"][commodity["code"]]["operation"] == "buy": + price_buy = tradeport["prices"][commodity["code"]]["price_buy"] + if price_buy_best is None or price_buy < price_buy_best: + price_buy_best = price_buy + commodity["buy_options"][ + self._format_tradeport_name(tradeport) + ] = f"{price_buy} aUEC" + else: + price_sell = tradeport["prices"][commodity["code"]]["price_sell"] + if price_sell_best is None or price_sell > price_sell_best: + price_sell_best = price_sell + commodity["sell_options"][ + self._format_tradeport_name(tradeport) + ] = f"{price_sell} aUEC" + + commodity["best_buy_price"] = ( + f"{price_buy_best} aUEC" if price_buy_best else "Not buyable." + ) + commodity["best_sell_price"] = ( + f"{price_sell_best} aUEC" if price_sell_best else "Not sellable." + ) + + boolean_keys = ["minable", "harvestable", "illegal"] + for key in boolean_keys: + commodity[key] = "Yes" if commodity[key] != "0" else "No" + if commodity["illegal"] == "Yes": + commodity[ + "notes" + ] += "Stay away from ship scanns to avoid fines and crimestat, as this commodity is illegal." + + for key in deletable_keys: + commodity.pop(key, None) + + self.cache["readable_objects"][checksum] = commodity + return commodity + + async def _gpt_call_get_locations_to_sell_to( + self, + commodity_name: str = None, + ship_name: str = None, + position_name: str = None, + commodity_amount: int = 1, + maximal_number_of_locations: int = 5, + ) -> str: + await self._print( + f"Given Parameters: Commodity: {commodity_name}, Ship Name: {ship_name}, Current Position: {position_name}, Amount: {commodity_amount}, Maximal Number of Locations: {maximal_number_of_locations}", + True, + ) + + commodity_name = self._get_function_arg_from_cache( + "commodity_name", commodity_name + ) + ship_name = self._get_function_arg_from_cache("ship_name", ship_name) + + if commodity_name is None: + self._log("No commodity given. Ask for a commodity.", True) + return "No commodity given. Ask for a commodity." + + misunderstood = [] + parameters = { + "commodity_name": (commodity_name, self.commodity_names), + "ship_name": (ship_name, self.ship_names), + "position_name": (position_name, self.location_names_set), + } + for param, (value, names_set) in parameters.items(): + if value is not None: + match = await self._find_closest_match(value, names_set) + if match is None: + misunderstood.append(f"{param}: {value}") + else: + self._set_function_arg_to_cache(param, match) + parameters[param] = (match, names_set) + commodity_name = parameters["commodity_name"][0] + ship_name = parameters["ship_name"][0] + position_name = parameters["position_name"][0] + + await self._print( + f"Interpreted Parameters: Commodity: {commodity_name}, Ship Name: {ship_name}, Position: {position_name}, Amount: {commodity_amount}, Maximal Number of Locations: {maximal_number_of_locations}", + True, + ) + + if misunderstood: + self._log( + "These given parameters do not exist in game. Exactly ask for clarification of these values: " + + ", ".join(misunderstood), + True, + ) + return ( + "These given parameters do not exist in game. Exactly ask for clarification of these values: " + + ", ".join(misunderstood) + ) + + tradeports = ( + self.tradeports + if position_name is None + else self._get_tradeports_by_position_name(position_name) + ) + commodity = self._get_commodity_by_name(commodity_name) + ship = self._get_ship_by_name(ship_name) + amount = max(1, int(commodity_amount or 1)) + maximal_number_of_locations = max(1, int(maximal_number_of_locations or 3)) + + selloptions = collections.defaultdict(list) + for tradeport in tradeports: + sellprice = self._get_data_location_sellprice( + tradeport, commodity, ship, amount + ) + if sellprice is not None: + selloptions[sellprice].append(tradeport) + + selloptions = dict(sorted(selloptions.items(), reverse=True)) + selloptions = dict( + itertools.islice(selloptions.items(), maximal_number_of_locations) + ) + + messages = [ + f"Here are the best {len(selloptions)} locations to sell {amount} SCU {commodity_name}:" + ] + + for sellprice, tradeports in selloptions.items(): + messages.append(f"{sellprice} aUEC:") + messages.extend( + self._get_tradeport_route_description(tradeport) + for tradeport in tradeports + ) + + self._log("\n".join(messages), True) + return "\n".join(messages) + + async def _gpt_call_get_locations_to_buy_from( + self, + commodity_name: str = None, + ship_name: str = None, + position_name: str = None, + commodity_amount: int = 1, + maximal_number_of_locations: int = 5, + ) -> str: + await self._print( + f"Given Parameters: Commodity: {commodity_name}, Ship Name: {ship_name}, Current Position: {position_name}, Amount: {commodity_amount}, Maximal Number of Locations: {maximal_number_of_locations}", + True, + ) + + commodity_name = self._get_function_arg_from_cache( + "commodity_name", commodity_name + ) + ship_name = self._get_function_arg_from_cache("ship_name", ship_name) + + if commodity_name is None: + self._log("No commodity given. Ask for a commodity.", True) + return "No commodity given. Ask for a commodity." + + misunderstood = [] + parameters = { + "ship_name": (ship_name, self.ship_names), + "location_name": (position_name, self.location_names_set), + "commodity_name": (commodity_name, self.commodity_names), + } + for param, (value, names_set) in parameters.items(): + if value is not None: + match = await self._find_closest_match(value, names_set) + if match is None: + misunderstood.append(f"{param}: {value}") + else: + self._set_function_arg_to_cache(param, match) + parameters[param] = (match, names_set) + ship_name = parameters["ship_name"][0] + position_name = parameters["location_name"][0] + commodity_name = parameters["commodity_name"][0] + + await self._print( + f"Interpreted Parameters: Commodity: {commodity_name}, Ship Name: {ship_name}, Position: {position_name}, Amount: {commodity_amount}, Maximal Number of Locations: {maximal_number_of_locations}", + True, + ) + + if misunderstood: + self._log( + "These given parameters do not exist in game. Exactly ask for clarification of these values: " + + ", ".join(misunderstood), + True, + ) + return ( + "These given parameters do not exist in game. Exactly ask for clarification of these values: " + + ", ".join(misunderstood) + ) + + tradeports = ( + self.tradeports + if position_name is None + else self._get_tradeports_by_position_name(position_name) + ) + commodity = self._get_commodity_by_name(commodity_name) + ship = self._get_ship_by_name(ship_name) + amount = max(1, int(commodity_amount or 1)) + maximal_number_of_locations = max(1, int(maximal_number_of_locations or 3)) + + buyoptions = collections.defaultdict(list) + for tradeport in tradeports: + buyprice = self._get_data_location_buyprice( + tradeport, commodity, ship, amount + ) + if buyprice is not None: + buyoptions[buyprice].append(tradeport) + + buyoptions = dict(sorted(buyoptions.items(), reverse=False)) + buyoptions = dict( + itertools.islice(buyoptions.items(), maximal_number_of_locations) + ) + + messages = [ + f"Here are the best {len(buyoptions)} locations to buy {amount} SCU {commodity_name}:" + ] + for buyprice, tradeports in buyoptions.items(): + messages.append(f"{buyprice} aUEC:") + messages.extend( + self._get_tradeport_route_description(tradeport) + for tradeport in tradeports + ) + + self._log("\n".join(messages), True) + return "\n".join(messages) + + def _get_data_location_sellprice(self, tradeport, commodity, ship=None, amount=1): + if ( + ship is not None + and ship["hull_trading"] is True + and tradeport["hull_trading"] is False + ): + return None + + if "prices" not in tradeport: + return None + + commodity_code = commodity["code"] + for code, price in tradeport["prices"].items(): + if code == commodity_code and price["operation"] == "sell": + return price["price_sell"] * amount + return None + + def _get_data_location_buyprice(self, tradeport, commodity, ship=None, amount=1): + if ( + ship is not None + and ship["hull_trading"] is True + and tradeport["hull_trading"] is False + ): + return None + + if "prices" not in tradeport: + return None + + commodity_code = commodity["code"] + for code, price in tradeport["prices"].items(): + if code == commodity_code and price["operation"] == "buy": + return price["price_buy"] * amount + return None + + async def _gpt_call_get_trading_routes( + self, + ship_name: str = None, + money_to_spend: float = None, + position_start_name: str = None, + free_cargo_space: float = None, + position_end_name: str = None, + commodity_name: str = None, + illegal_commodities_allowed: bool = None, + maximal_number_of_routes: int = None, + ) -> str: + """ + Finds multiple best trading routes based on the given parameters. + + Args: + ship_name (str, optional): The name of the ship. Defaults to None. + money_to_spend (float, optional): The amount of money to spend. Defaults to None. + position_start_name (str, optional): The name of the starting position. Defaults to None. + free_cargo_space (float, optional): The amount of free cargo space. Defaults to None. + position_end_name (str, optional): The name of the ending position. Defaults to None. + commodity_name (str, optional): The name of the commodity. Defaults to None. + illegal_commodities_allowed (bool, optional): Flag indicating whether illegal commodities are allowed. Defaults to True. + maximal_number_of_routes (int, optional): The maximum number of routes to return. Defaults to 2. + + Returns: + str: A string representation of the trading routes found. + """ + + # For later use in distance calculation: + # https://starmap.tk/api/v2/oc/ + # https://starmap.tk/api/v2/pois/ + + await self._print( + f"Parameters: Ship: {ship_name}, Position Start: {position_start_name}, Position End: {position_end_name}, Commodity Name: {commodity_name}, Money: {money_to_spend} aUEC, free_cargo_space: {free_cargo_space} SCU, Maximal Number of Routes: {maximal_number_of_routes}, Illegal Allowed: {illegal_commodities_allowed}", + True, + ) + + ship_name = self._get_function_arg_from_cache("ship_name", ship_name) + illegal_commodities_allowed = self._get_function_arg_from_cache( + "illegal_commodities_allowed", illegal_commodities_allowed + ) + if illegal_commodities_allowed is None: + illegal_commodities_allowed = True + + missing_args = [] + if ship_name is None: + missing_args.append("ship_name") + + if self.uexcorp_tradestart_mandatory and position_start_name is None: + missing_args.append("position_start_name") + + money_to_spend = ( + None + if money_to_spend is not None and int(money_to_spend) < 1 + else money_to_spend + ) + free_cargo_space = ( + None + if free_cargo_space is not None and int(free_cargo_space) < 1 + else free_cargo_space + ) + + misunderstood = [] + parameters = { + "ship_name": (ship_name, self.ship_names), + "position_start_name": (position_start_name, self.location_names_set), + "position_end_name": (position_end_name, self.location_names_set), + "commodity_name": (commodity_name, self.commodity_names), + } + for param, (value, names_set) in parameters.items(): + if value is not None: + match = await self._find_closest_match(value, names_set) + if match is None: + misunderstood.append(f"{param}: {value}") + else: + self._set_function_arg_to_cache(param, match) + parameters[param] = (match, names_set) + ship_name = parameters["ship_name"][0] + position_start_name = parameters["position_start_name"][0] + position_end_name = parameters["position_end_name"][0] + commodity_name = parameters["commodity_name"][0] + + if money_to_spend is not None: + self._set_function_arg_to_cache("money", money_to_spend) + + await self._print( + f"Interpreted Parameters: Ship: {ship_name}, Position Start: {position_start_name}, Position End: {position_end_name}, Commodity Name: {commodity_name}, Money: {money_to_spend} aUEC, free_cargo_space: {free_cargo_space} SCU, Maximal Number of Routes: {maximal_number_of_routes}, Illegal Allowed: {illegal_commodities_allowed}", + True, + ) + + self._set_function_arg_to_cache("money", money_to_spend) + + if misunderstood or missing_args: + misunderstood_str = ", ".join(misunderstood) + missing_str = ", ".join(missing_args) + answer = "" + if missing_str: + answer += f"Missing parameters: {missing_str}. " + if misunderstood_str: + answer += ( + f"These given parameters were misunderstood: {misunderstood_str}" + ) + + self._log(answer, True) + return answer + + # set variables + ship = self._get_ship_by_name(ship_name) + if money_to_spend is not None: + money = int(money_to_spend) + else: + money = None + if free_cargo_space is not None: + free_cargo_space = int(free_cargo_space) + else: + free_cargo_space = None + commodity = ( + self._get_commodity_by_name(commodity_name) if commodity_name else None + ) + maximal_number_of_routes = int( + maximal_number_of_routes or self.uexcorp_default_trade_route_count + ) + start_tradeports = ( + self._get_tradeports_by_position_name(position_start_name) + if position_start_name + else self.tradeports + ) + end_tradeports = ( + self._get_tradeports_by_position_name(position_end_name) + if position_end_name + else self.tradeports + ) + + commodities = [] + if commodity is None: + commodities = self.commodities + else: + commodities.append(commodity) + + trading_routes = [] + errors = [] + for commodity in commodities: + commodity_routes = [] + if not illegal_commodities_allowed and commodity["illegal"] == "1": + continue + for start_tradeport in start_tradeports: + if ( + "prices" not in start_tradeport + or commodity["code"] not in start_tradeport["prices"] + or start_tradeport["prices"][commodity["code"]]["operation"] + != "buy" + ): + continue + for end_tradeport in end_tradeports: + if ( + "prices" not in end_tradeport + or commodity["code"] not in end_tradeport["prices"] + or end_tradeport["prices"][commodity["code"]]["operation"] + != "sell" + ): + continue + + if ( + ship + and ship["hull_trading"] is True + and ( + "hull_trading" not in start_tradeport + or start_tradeport["hull_trading"] is not True + or "hull_trading" not in end_tradeport + or end_tradeport["hull_trading"] is not True + ) + ): + continue + + trading_route_new = self._get_trading_route( + ship, + start_tradeport, + money, + free_cargo_space, + end_tradeport, + commodity, + illegal_commodities_allowed, + ) + + if isinstance(trading_route_new, str): + if trading_route_new not in errors: + errors.append(trading_route_new) + else: + commodity_routes.append(trading_route_new) + + if len(commodity_routes) > 0: + if self.uexcorp_summarize_routes_by_commodity: + best_commodity_routes = heapq.nlargest( + 1, commodity_routes, key=lambda k: int(k["profit"]) + ) + trading_routes.extend(best_commodity_routes) + else: + trading_routes.extend(commodity_routes) + + if len(trading_routes) > 0: + additional_answer = "" + if len(trading_routes) < maximal_number_of_routes: + additional_answer += f" There are only {len(trading_routes)} with different commodities available. " + else: + additional_answer += f" There are {len(trading_routes)} routes available and these are the best {maximal_number_of_routes} ones." + + # sort trading routes by profit and limit to maximal_number_of_routes + trading_routes = heapq.nlargest( + maximal_number_of_routes, trading_routes, key=lambda k: int(k["profit"]) + ) + + for trading_route in trading_routes: + destinationselection = [] + for tradeport in trading_route["end"]: + destinationselection.append( + f"{self._get_tradeport_route_description(tradeport)}" + ) + trading_route["end"] = " OR ".join(destinationselection) + startselection = [] + for tradeport in trading_route["start"]: + startselection.append( + f"{self._get_tradeport_route_description(tradeport)}" + ) + trading_route["start"] = " OR ".join(startselection) + + # format the trading routes + for trading_route in trading_routes: + trading_route["start"] = trading_route["start"] + trading_route["end"] = trading_route["end"] + trading_route["commodity"] = self._format_commodity_name( + trading_route["commodity"] + ) + trading_route["profit"] = f"{trading_route['profit']} aUEC" + trading_route["buy"] = f"{trading_route['buy']} aUEC" + trading_route["sell"] = f"{trading_route['sell']} aUEC" + trading_route["cargo"] = f"{trading_route['cargo']} SCU" + + message = ( + "Possible commodities with their profit. Just give basic overview at first." + + additional_answer + + " JSON: " + + json.dumps(trading_routes) + ) + + self._log(message, True) + return message + else: + return_string = "No trading routes found." + if len(errors) > 0: + return_string += "\nPossible errors are:\n- " + "\n- ".join(errors) + await self._print(return_string, True) + return return_string + + def _get_trading_route( + self, + ship: dict[str, any], + position_start: dict[str, any], + money: int = None, + free_cargo_space: int = None, + position_end: dict[str, any] = None, + commodity: dict[str, any] = None, + illegal_commodities_allowed: bool = True, + ) -> str: + """ + Finds the best trading route based on the given parameters. + + Args: + ship (dict[str, any]): The ship dictionary. + position_start (dict[str, any]): The starting position dictionary. + money (int, optional): The amount of money to spend. Defaults to None. + free_cargo_space (int, optional): The amount of free cargo space. Defaults to None. + position_end (dict[str, any], optional): The ending position dictionary. Defaults to None. + commodity (dict[str, any], optional): The commodity dictionary. Defaults to None. + illegal_commodities_allowed (bool, optional): Flag indicating whether illegal commodities are allowed. Defaults to True. + + Returns: + str: A string representation of the trading route found. JSON if the route is found, otherwise an error message. + """ + + # set variables + cargo_space = ship["scu"] + if free_cargo_space: + cargo_space = free_cargo_space + if free_cargo_space > ship["scu"]: + cargo_space = ship["scu"] + + if cargo_space < 1: + return "Your ship has no cargo space to trade." + + commodity_filter = commodity + start_tradeports = self._get_tradeports_by_position_name(position_start["name"]) + if ship["hull_trading"] is True: + start_tradeports = [ + tradeport + for tradeport in start_tradeports + if "hull_trading" in tradeport and tradeport["hull_trading"] is True + ] + if len(start_tradeports) < 1: + if ship["hull_trading"] is True: + return "No valid start position given. Make sure to provide a start point compatible with your ship." + return "No valid start position given. Try a different position or just name a planet or star system." + + end_tradeports = [] + if position_end is None: + for tradeport in self.tradeports: + if tradeport["system"] == start_tradeports[0]["system"]: + end_tradeports.append(tradeport) + else: + end_tradeports = self._get_tradeports_by_position_name(position_end["name"]) + if ship["hull_trading"] is True: + end_tradeports = [ + tradeport + for tradeport in end_tradeports + if "hull_trading" in tradeport and tradeport["hull_trading"] is True + ] + if len(end_tradeports) < 1: + return "No valid end position given." + + if ( + len(end_tradeports) == 1 + and len(start_tradeports) == 1 + and end_tradeports[0]["code"] == start_tradeports[0]["code"] + ): + return "Start and end position are the same." + + if money is not None and money <= 0: + return "You dont have enough money to trade." + + best_route = { + "start": [], + "end": [], + "commodity": {}, + "profit": 0, + "cargo": 0, + "buy": 0, + "sell": 0, + } + + # apply trade port blacklist + if self.uexcorp_trade_blacklist: + for blacklist_item in self.uexcorp_trade_blacklist: + if "tradeport" in blacklist_item and blacklist_item["tradeport"]: + for tradeport in start_tradeports: + if tradeport["name"] == blacklist_item["tradeport"]: + if ( + "commodity" not in blacklist_item + or not blacklist_item["commodity"] + ): + # remove tradeport, if no commodity given + start_tradeports.remove(tradeport) + break + else: + commodity = self._get_commodity_by_name( + blacklist_item["commodity"] + ) + for commodity_code, data in tradeport["prices"].items(): + if commodity["code"] == commodity_code: + # remove commodity code from tradeport + tradeport["prices"].pop(commodity_code) + break + for tradeport in end_tradeports: + if tradeport["name"] == blacklist_item["tradeport"]: + if ( + "commodity" not in blacklist_item + or not blacklist_item["commodity"] + ): + # remove tradeport, if no commodity given + end_tradeports.remove(tradeport) + break + else: + commodity = self._get_commodity_by_name( + blacklist_item["commodity"] + ) + for commodity_code, data in tradeport["prices"].items(): + if commodity["code"] == commodity_code: + # remove commodity code from tradeport + tradeport["prices"].pop(commodity_code) + break + + for tradeport_start in start_tradeports: + commodities = [] + if "prices" not in tradeport_start: + continue + + for attr, price in tradeport_start["prices"].items(): + if price["operation"] == "buy" and ( + commodity_filter is None or commodity_filter["code"] == attr + ): + commodity = self._get_commodity_by_code(attr) + if ( + illegal_commodities_allowed is True + or commodity["illegal"] != "1" + ): + price["short_name"] = attr + + in_blacklist = False + # apply commodity blacklist + if self.uexcorp_trade_blacklist: + for blacklist_item in self.uexcorp_trade_blacklist: + if ( + "commodity" in blacklist_item + and blacklist_item["commodity"] + and not "tradeport" in blacklist_item + or not blacklist_item["tradeport"] + ): + if commodity["name"] == blacklist_item["commodity"]: + # remove commodity code from tradeport + in_blacklist = True + break + + if not in_blacklist: + commodities.append(price) + + if len(commodities) < 1: + continue + + for tradeport_end in end_tradeports: + if "prices" not in tradeport_end or ( + commodity_filter is not None + and commodity_filter["code"] not in tradeport_end["prices"] + ): + continue + for attr, price in tradeport_end["prices"].items(): + price["short_name"] = attr + + sell_commodity = self._get_commodity_by_code(attr) + in_blacklist = False + # apply commodity blacklist + if sell_commodity and self.uexcorp_trade_blacklist: + for blacklist_item in self.uexcorp_trade_blacklist: + if ( + "commodity" in blacklist_item + and blacklist_item["commodity"] + and not "tradeport" in blacklist_item + or not blacklist_item["tradeport"] + ): + if ( + sell_commodity["name"] + == blacklist_item["commodity"] + ): + # remove commodity code from tradeport + in_blacklist = True + break + + if in_blacklist: + continue + + for commodity in commodities: + if ( + commodity["short_name"] == price["short_name"] + and price["operation"] == "sell" + and price["price_sell"] > commodity["price_buy"] + ): + if money is None: + cargo_by_money = cargo_space + else: + cargo_by_money = math.floor( + money / commodity["price_buy"] + ) + cargo_by_space = cargo_space + cargo = min(cargo_by_money, cargo_by_space) + if cargo >= 1: + profit = round( + cargo + * (price["price_sell"] - commodity["price_buy"]) + ) + if profit > best_route["profit"]: + best_route["start"] = [tradeport_start] + best_route["end"] = [tradeport_end] + best_route["commodity"] = price + best_route["profit"] = profit + best_route["cargo"] = cargo + best_route["buy"] = commodity["price_buy"] * cargo + best_route["sell"] = price["price_sell"] * cargo + else: + if ( + profit == best_route["profit"] + and best_route["commodity"]["short_name"] + == price["short_name"] + ): + if tradeport_start not in best_route["start"]: + best_route["start"].append(tradeport_start) + if tradeport_end not in best_route["end"]: + best_route["end"].append(tradeport_end) + + if len(best_route["start"]) == 0: + return f"No route found for your {ship['name']}. Try a different route." + + best_route["commodity"] = best_route["commodity"] + best_route["profit"] = f"{best_route['profit']}" + best_route["cargo"] = f"{best_route['cargo']}" + best_route["buy"] = f"{best_route['buy']}" + best_route["sell"] = f"{best_route['sell']}" + + return best_route + + def _get_ship_by_name(self, name: str) -> dict[str, any] | None: + """Finds the ship with the specified name and returns the ship or None. + + Args: + name (str): The name of the ship to search for. + + Returns: + Optional[object]: The ship object if found, or None if not found. + """ + return self.ship_dict.get(name.lower()) if name else None + + def _get_tradeport_by_name(self, name: str) -> dict[str, any] | None: + """Finds the tradeport with the specified name and returns the tradeport or None. + + Args: + name (str): The name of the tradeport to search for. + + Returns: + Optional[object]: The tradeport object if found, otherwise None. + """ + return self.tradeport_dict.get(name.lower()) if name else None + + def _get_tradeport_by_code(self, code: str) -> dict[str, any] | None: + """Finds the tradeport with the specified code and returns the tradeport or None. + + Args: + code (str): The code of the tradeport to search for. + + Returns: + Optional[object]: The tradeport object if found, otherwise None. + """ + return self.tradeport_code_dict.get(code.lower()) if code else None + + def _get_planet_by_name(self, name: str) -> dict[str, any] | None: + """Finds the planet with the specified name and returns the planet or None. + + Args: + name (str): The name of the planet to search for. + + Returns: + Optional[object]: The planet object if found, otherwise None. + """ + return self.planet_dict.get(name.lower()) if name else None + + def _get_city_by_name(self, name: str) -> dict[str, any] | None: + """Finds the city with the specified name and returns the city or None. + + Args: + name (str): The name of the city to search for. + + Returns: + Optional[object]: The city object if found, or None if not found. + """ + return self.city_dict.get(name.lower()) if name else None + + def _get_satellite_by_name(self, name: str) -> dict[str, any] | None: + """Finds the satellite with the specified name and returns the satellite or None. + + Args: + name (str): The name of the satellite to search for. + + Returns: + Optional[object]: The satellite object if found, otherwise None. + """ + return self.satellite_dict.get(name.lower()) if name else None + + def _get_system_by_name(self, name: str) -> dict[str, any] | None: + """Finds the system with the specified name and returns the system or None. + + Args: + name (str): The name of the system to search for. + + Returns: + Optional[object]: The system object if found, otherwise None. + """ + return self.system_dict.get(name.lower()) if name else None + + def _get_commodity_by_name(self, name: str) -> dict[str, any] | None: + """Finds the commodity with the specified name and returns the commodity or None. + + Args: + name (str): The name of the commodity to search for. + + Returns: + Optional[object]: The commodity object if found, otherwise None. + """ + return self.commodity_dict.get(name.lower()) if name else None + + def _get_tradeport_route_description(self, tradeport: dict[str, any]) -> str: + """Returns the breadcrums of a tradeport. + + Args: + tradeport (dict[str, any]): The tradeport information. + + Returns: + str: The description of the tradeport route. + """ + tradeport = self._get_converted_tradeport_for_output(tradeport) + keys = [ + ("system", "Star-System"), + ("planet", "Planet"), + ("satellite", "Satellite"), + ("city", "City"), + ("name", "Trade Point"), + ] + route = [f"{name}: {tradeport[key]}" for key, name in keys if key in tradeport] + return f"({' >> '.join(route)})" + + def _get_system_name_by_code(self, code: str) -> str: + """Returns the name of the system with the specified code. + + Args: + code (str): The code of the system. + + Returns: + str: The name of the system with the specified code. + """ + return ( + self._format_system_name(self.system_code_dict.get(code.lower())) + if code + else None + ) + + def _get_planet_name_by_code(self, code: str) -> str: + """Returns the name of the planet with the specified code. + + Args: + code (str): The code of the planet. + + Returns: + str: The name of the planet with the specified code. + """ + return ( + self._format_planet_name(self.planet_code_dict.get(code.lower())) + if code + else None + ) + + def _get_satellite_name_by_code(self, code: str) -> str: + """Returns the name of the satellite with the specified code. + + Args: + code (str): The code of the satellite. + + Returns: + str: The name of the satellite with the specified code. + """ + return ( + self._format_satellite_name(self.satellite_code_dict.get(code.lower())) + if code + else None + ) + + def _get_city_name_by_code(self, code: str) -> str: + """Returns the name of the city with the specified code. + + Args: + code (str): The code of the city. + + Returns: + str: The name of the city with the specified code. + """ + return ( + self._format_city_name(self.city_code_dict.get(code.lower())) + if code + else None + ) + + def _get_commodity_name_by_code(self, code: str) -> str: + """Returns the name of the commodity with the specified code. + + Args: + code (str): The code of the commodity. + + Returns: + str: The name of the commodity with the specified code. + """ + return ( + self._format_commodity_name(self.commodity_code_dict.get(code.lower())) + if code + else None + ) + + def _get_commodity_by_code(self, code: str) -> dict[str, any] | None: + """Finds the commodity with the specified code and returns the commodity or None. + + Args: + code (str): The code of the commodity to search for. + + Returns: + Optional[object]: The commodity object if found, otherwise None. + """ + return self.commodity_code_dict.get(code.lower()) if code else None + + def _get_tradeports_by_position_name( + self, name: str, direct: bool = False + ) -> list[dict[str, any]]: + """Returns all tradeports with the specified position name. + + Args: + name (str): The position name to search for. + + Returns: + list[dict[str, any]]: A list of tradeports matching the position name. + """ + if not name: + return [] + + tradeports = [] + + tradeport_temp = self._get_tradeport_by_name(name) + if tradeport_temp: + tradeports.append(tradeport_temp) + + tradeports.extend(self._get_tradeports_by_systemname(name)) + tradeports.extend(self._get_tradeports_by_planetname(name)) + tradeports.extend(self._get_tradeports_by_satellitename(name)) + tradeports.extend(self._get_tradeports_by_cityname(name)) + return tradeports + + def _get_satellites_by_planetcode(self, code: str) -> list[dict[str, any]]: + """Returns the satellite with the specified planet code. + + Args: + code (str): The code of the planet. + + Returns: + Optional[object]: The satellite object if found, otherwise None. + """ + return self.satellites_by_planet.get(code.lower(), []) if code else [] + + def _get_cities_by_planetcode(self, code: str) -> list[dict[str, any]]: + """Returns all cities with the specified planet code. + + Args: + code (str): The code of the planet. + + Returns: + list[dict[str, any]]: A list of cities matching the planet code. + """ + return self.cities_by_planet.get(code.lower(), []) if code else [] + + def _get_planets_by_systemcode(self, code: str) -> list[dict[str, any]]: + """Returns all planets with the specified system code. + + Args: + code (str): The code of the system. + + Returns: + list[dict[str, any]]: A list of planets matching the system code. + """ + return self.planets_by_system.get(code.lower(), []) if code else [] + + def _get_tradeports_by_systemcode(self, code: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified system code. + + Args: + code (str): The code of the system. + + Returns: + list[dict[str, any]]: A list of tradeports matching the system code. + """ + return self.tradeports_by_system.get(code.lower(), []) if code else [] + + def _get_tradeports_by_planetcode(self, code: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified planet code. + + Args: + code (str): The code of the planet. + + Returns: + list[dict[str, any]]: A list of tradeports matching the planet code. + """ + return self.tradeports_by_planet.get(code.lower(), []) if code else [] + + def _get_tradeports_by_satellitecode(self, code: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified satellite code. + + Args: + code (str): The code of the satellite. + + Returns: + list[dict[str, any]]: A list of tradeports matching the satellite code. + """ + return self.tradeports_by_satellite.get(code.lower(), []) if code else [] + + def _get_tradeports_by_citycode(self, code: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified city code. + + Args: + code (str): The code of the city. + + Returns: + list[dict[str, any]]: A list of tradeports matching the city code. + """ + return self.tradeports_by_city.get(code.lower(), []) if code else [] + + def _get_tradeports_by_planetname(self, name: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified planet name. + + Args: + name (str): The name of the planet. + + Returns: + list[dict[str, any]]: A list of tradeports matching the planet name. + """ + planet = self._get_planet_by_name(name) + return self._get_tradeports_by_planetcode(planet["code"]) if planet else [] + + def _get_tradeports_by_satellitename(self, name: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified satellite name. + + Args: + name (str): The name of the satellite. + + Returns: + list[dict[str, any]]: A list of tradeports matching the satellite name. + """ + satellite = self._get_satellite_by_name(name) + return ( + self._get_tradeports_by_satellitecode(satellite["code"]) + if satellite + else [] + ) + + def _get_tradeports_by_cityname(self, name: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified city name. + + Args: + name (str): The name of the city. + + Returns: + list[dict[str, any]]: A list of tradeports matching the city name. + """ + city = self._get_city_by_name(name) + return self._get_tradeports_by_citycode(city["code"]) if city else [] + + def _get_tradeports_by_cityname(self, name: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified city name. + + Args: + name (str): The name of the city. + + Returns: + list[dict[str, any]]: A list of tradeports matching the city name. + """ + city = self._get_city_by_name(name) + return self._get_tradeports_by_citycode(city["code"]) if city else [] + + def _get_tradeports_by_systemname(self, name: str) -> list[dict[str, any]]: + """Returns all tradeports with the specified system name. + + Args: + name (str): The name of the system. + + Returns: + list[dict[str, any]]: A list of tradeports matching the system name. + """ + system = self._get_system_by_name(name) + return self._get_tradeports_by_systemcode(system["code"]) if system else [] diff --git a/templates/configs/_Star Citizen/ATC.template.yaml b/templates/configs/_Star Citizen/ATC.template.yaml new file mode 100644 index 00000000..eebb10e2 --- /dev/null +++ b/templates/configs/_Star Citizen/ATC.template.yaml @@ -0,0 +1,44 @@ +name: ATC +description: / + Air Traffic Controller is tasked with overseeing spacecraft traffic while ensuring safety and efficiency. + It handling all aspects of space station operations and emergencies. +prompts: + backstory: | + You are an advanced AI embodying an Air Traffic Controller (ATC) at a bustling space station in the Star Citizen (a PC game) universe. + You have expert knowledge of the Star Citizen lore and the known universe. + Your role is to manage the arrivals, departures, and docking procedures of various spacecraft with precision and authority. + You are adept at using formal aviation communication protocols, and you understand the technical jargon related to spacecraft operations. + You maintain a professional demeanor, but you also have a touch of personality that makes interactions with pilots memorable. + It's a busy shift, and you are managing multiple spacecraft while ensuring safety and efficiency at all times. + + Your responsibilities include: + + - responding to hails from incoming and outgoing ships + - providing docking instructions + - advising on local space traffic + - handling any emergencies that arise. + + Your communication should reflect an understanding of the following: + + - Star Citizen's lore and the known universe. + - Identifying ships by their designated call signs. + - Issuing precise landing pad assignments. + - Clearing ships for take-off with attention to surrounding traffic. + - Managing flight paths to avoid collisions or space hazards. + - Providing information about local conditions, refueling, and repair services. + - Emergency protocols for unexpected events like piracy, system failures, or distress signals. +record_key: delete +sound: + effects: [RADIO] + play_beep: True +openai: + tts_voice: onyx +commands: + - name: RequestLandingPermission + actions: + - keyboard: + hotkey: alt+n + - name: RequestDeparture + actions: + - keyboard: + hotkey: alt+n diff --git a/templates/configs/_Star Citizen/Computer.template.yaml b/templates/configs/_Star Citizen/Computer.template.yaml new file mode 100644 index 00000000..faff9da8 --- /dev/null +++ b/templates/configs/_Star Citizen/Computer.template.yaml @@ -0,0 +1,314 @@ +name: Computer +description: The computer of your ship. It's in in charge of all the systems on your ship and executes various commands. +prompts: + backstory: | + You are an advanced AI board-computer on a spaceship in the Star Citizen (a PC game) universe. + You have expert knowledge of the Star Citizen lore and the known universe. + You never refer to Star Citizen as a game, but as the universe you are in. + You are the ship's computer, and you are in charge of all the systems of the spaceship. + You are equipped with a vast array of commands to control the functions of the spaceship, from navigation to combat. + You are allowed to execute any of the command at your disposal without asking for permission. +record_key: end +is_voice_activation_default: True +sound: + effects: [ROBOT] +commands: + - name: ToggleCruiseControlOrToggleHoldCurrentSpeed + actions: + - keyboard: + hotkey: alt+c + - name: FlightReady + actions: + - keyboard: + hotkey: alt gr+r + instant_activation: + - Power up the ship + - Start the ship + - Flight Ready + responses: + - Powering up the ship. All systems online. Ready for takeoff. + - Start sequence initiated. All systems online. Ready for takeoff. + - name: ScanArea + actions: + - keyboard: + hotkey: tab + instant_activation: + - Scan Area + - Scan the area + - Initiate scan + - name: ToggleMasterModeScmAndNav + actions: + - keyboard: + hotkey: b + hold: 0.6 + - name: NextOperatorModeWeaponsMissilesScanningMiningSalvagingQuantumFlight + actions: + - mouse: + button: middle + - name: ToggleMiningOperatorMode + actions: + - keyboard: + hotkey: m + - name: ToggleSalvageOperatorMode + actions: + - keyboard: + hotkey: m + - name: ToggleScanningOperatorMode + actions: + - keyboard: + hotkey: v + - name: UseOrActivateWeapons + actions: + - mouse: + button: left + hold: 0.4 + - name: UseOrActivateMissiles + actions: + - mouse: + button: left + hold: 0.4 + - name: UseOrActivateScanning + actions: + - mouse: + button: left + hold: 0.4 + - name: UseOrActivateMining + actions: + - mouse: + button: left + hold: 0.4 + - name: UseOrActivateSalvaging + actions: + - mouse: + button: left + hold: 0.4 + - name: UseOrActivateQuantumFlight + actions: + - mouse: + button: left + hold: 0.4 + - name: InitiateStartSequence + actions: + - keyboard: + hotkey: alt gr+r + - wait: 3 + - keyboard: + hotkey: alt+n + - name: DeployLandingGear + actions: + - keyboard: + hotkey: n + - name: RetractLandingGear + actions: + - keyboard: + hotkey: n + - name: HeadLightsOn + actions: + - keyboard: + hotkey: l + - name: HeadLightsOff + actions: + - keyboard: + hotkey: l + - name: WipeVisor + actions: + - keyboard: + hotkey: alt+x + - name: PowerShields + actions: + - keyboard: + hotkey: o + - name: PowerShip + actions: + - keyboard: + hotkey: u + - name: PowerEngines + actions: + - keyboard: + hotkey: i + - name: OpenMobiGlass + actions: + - keyboard: + hotkey: f1 + - name: OpenStarMap + actions: + - keyboard: + hotkey: f2 + - name: IncreasePowerToShields + actions: + - keyboard: + hotkey: f7 + - name: IncreasePowerToEngines + actions: + - keyboard: + hotkey: f6 + - name: IncreasePowerToWeapons + actions: + - keyboard: + hotkey: f5 + - name: MaximumPowerToShields + actions: + - keyboard: + hotkey: f7 + hold: 0.8 + - name: MaximumPowerToEngines + actions: + - keyboard: + hotkey: f6 + hold: 0.8 + - name: MaximumPowerToWeapons + actions: + - keyboard: + hotkey: f5 + hold: 0.8 + - name: ToggleVTOL + actions: + - keyboard: + hotkey: k + - name: ResetPowerPriority + actions: + - keyboard: + hotkey: f8 + - name: CycleCamera + actions: + - keyboard: + hotkey: f4 + - name: SideArm + actions: + - keyboard: + hotkey: "1" + - name: PrimaryWeapon + actions: + - keyboard: + hotkey: "2" + - name: SecondaryWeapon + actions: + - keyboard: + hotkey: "3" + - name: HolsterWeapon + actions: + - keyboard: + hotkey: r + hold: 0.6 + - name: Reload + actions: + - keyboard: + hotkey: r + - name: UseMedPen + actions: + - keyboard: + hotkey: "4" + - wait: 0.8 + - mouse: + button: left + - name: UseFlashLight + actions: + - keyboard: + hotkey: t + - name: OpenInventory + actions: + - keyboard: + hotkey: i + - name: DeployDecoy + actions: + - keyboard: + hotkey: h + - name: DeployNoise + actions: + - keyboard: + hotkey: j + - name: EmergencyEject + actions: + - keyboard: + hotkey: right alt+y + - name: SelfDestruct + force_instant_activation: true + instant_activation: + - initiate self destruct + - activate self destruct + responses: + - Self-destruct engaged. Evacuation procedures recommended. + - Confirmed. Self-destruct in progress. + actions: + - keyboard: + hotkey: backspace + hold: 0.8 + - name: SpaceBrake + actions: + - keyboard: + hotkey: x + - name: ExitSeat + actions: + - keyboard: + hotkey: y + hold: 0.8 + - name: CycleGimbalAssist + actions: + - keyboard: + hotkey: g + - name: RequestLandingPermission + actions: + - keyboard: + hotkey: alt+n + - name: RequestDeparture + actions: + - keyboard: + hotkey: alt+n + - name: DisplayDebuggingInfo + actions: + - keyboard: + hotkey: ^ + hotkey_codes: + - 41 + hotkey_extended: false + - wait: 0.5 + - write: r_DisplayInfo 2 + - wait: 0.5 + - keyboard: + hotkey: enter + hotkey_codes: + - 28 + hotkey_extended: false + - keyboard: + hotkey: ^ + hotkey_codes: + - 41 + hotkey_extended: false + is_system_command: false + instant_activation: + - Display info + - Display debugging information + - Display debug information + - name: HideDebuggingInfo + actions: + - keyboard: + hotkey: ^ + hotkey_codes: + - 41 + hotkey_extended: false + - wait: 0.5 + - write: r_DisplayInfo 0 + - wait: 0.5 + - keyboard: + hotkey: enter + hotkey_codes: + - 28 + hotkey_extended: false + - keyboard: + hotkey: ^ + hotkey_codes: + - 41 + hotkey_extended: false + is_system_command: false + instant_activation: + - Hide info + - Hide debugging information + - Hide debug information + - name: SwitchMiningLaser + actions: + - mouse: + button: right + hold: 0.6 + instant_activation: + - Change mining laser + - Switch mining laser diff --git a/configs/templates/default-avatar-wingman.png b/templates/configs/default-wingman-avatar.png similarity index 100% rename from configs/templates/default-avatar-wingman.png rename to templates/configs/default-wingman-avatar.png diff --git a/templates/configs/defaults.yaml b/templates/configs/defaults.yaml new file mode 100644 index 00000000..b6cbabe9 --- /dev/null +++ b/templates/configs/defaults.yaml @@ -0,0 +1,128 @@ +prompts: + system_prompt: | + You are a so-called "Wingman", a virtual assisstant that helps the user with various tasks. + You are designed to be an efficient expert in what you are doing. + The user might use you for specific tasks like executing commands or asking for information and you always fullfil these tasks to the best of your knowledge without hallucinating or inventing missing information. + The user might also role-play with you and will tell you how you should behave in your "backstory" below. + + (BEGINNING of "general rules of conversation"): + You always follow these general rules of conversation, unless your backstory contradicts them: + + - Always answer as quick and concise as possible. Never use more than 3 sentences per reply. + - You can execute commands (also called "tools" or "functions"). Some commands require additional parameters. + - Always ask the user for missing parameters if needed. Never invent any function parameters. + - If you cannot execute a command, always explain to the user why you were unable to execute it. + - After executing a command, acknockledge the execution with a single sentence. + - The user might talk to you in different languages. Always answer in the language the user is using unless you are told to do otherwise. Example: If the user talks English, you answer in English. + - Always prefer to use informal language. For example, use "Du" and "Dir" instead of "Sie" and "Ihnen" in German. + (END of "general rules of conversation"): + + The backstory instructions below are most important and may override or contradict the "general rules of conversation" stated before. + + (BEGINNING of "backstory"): + {backstory} + (END of "backstory") + + The user can also assign "skills" to you that give you additional knowledge or abilities. + These skills are defined in the "skills" section below. Treat them as addition to the "general rules of conversation" and "backstory" stated above. + Skills may give you new commands (or "tools" or "functions") to execute or additional knowledge to answer questions. + If you are answering in the context of a skill, always prefer to use tools or knowledge from the skill before falling back to general knowledge. + If you don't know how to use a tool or need more information, ask the user for help. + + (BEGINNING of "skills"): + {skills} + (END of "skills") +features: + tts_provider: wingman_pro + stt_provider: wingman_pro + conversation_provider: wingman_pro + summarize_provider: wingman_pro +sound: + effects: [] + play_beep: false +openai: + conversation_model: gpt-4o + summarize_model: gpt-4o + tts_voice: nova +mistral: + conversation_model: mistral-large-latest + summarize_model: mistral-medium-latest + endpoint: https://api.mistral.ai/v1 +groq: + conversation_model: llama3-70b-8192 + summarize_model: llama3-8b-8192 + endpoint: https://api.groq.com/openai/v1 +openrouter: + conversation_model: meta-llama/llama-3-8b-instruct:free + summarize_model: meta-llama/llama-3-8b-instruct:free + endpoint: https://openrouter.ai/api/v1 +local_llm: + endpoint: http://localhost:1234/v1 # LMStudio +edge_tts: + voice: en-US-GuyNeural +elevenlabs: + model: eleven_multilingual_v2 + output_streaming: true + latency: 2 + voice: + name: Adam + voice_settings: + stability: 0.71 + similarity_boost: 0.5 + style: 0.0 + use_speaker_boost: true +azure: + whisper: + api_base_url: https://openai-w-eu.openai.azure.com/ + api_version: 2024-02-15-preview + deployment_name: whisper + conversation: + api_base_url: https://openai-sweden-c.openai.azure.com/ + api_version: 2024-02-15-preview + deployment_name: gpt-4o + summarize: + api_base_url: https://openai-sweden-c.openai.azure.com/ + api_version: 2024-02-15-preview + deployment_name: gpt-4o + tts: + region: westeurope + voice: en-US-JennyMultilingualV2Neural + output_streaming: true + stt: + region: westeurope + languages: + - en-US + - de-DE +whispercpp: + base_url: http://127.0.0.1:8080 + autostart: false + autostart_settings: + whispercpp_exe_path: C:\Whispercpp\server.exe + whispercpp_model_path: C:\Whispercpp\models\ggml-base.bin + temperature: 0.0 + language: en +xvasynth: + xvasynth_path: C:\Program Files (x86)\Steam\steamapps\common\xVASynth + process_device: cpu + game_folder_name: terminator + voice: tn-T850 + language: en + pace: 1.0 + use_sr: false + use_cleanup: false + synthesize_url: http://127.0.0.1:8008/synthesize + load_model_url: http://127.0.0.1:8008/loadModel +wingman_pro: + stt_provider: azure_speech + tts_provider: azure + summarize_deployment: gpt-4o + conversation_deployment: gpt-4o +commands: + - name: ResetConversationHistory + instant_activation: + - Forget everything! + - Clear conversation history! + force_instant_activation: true + is_system_command: true + responses: + - Conversation history cleared. diff --git a/configs/templates/settings.yaml b/templates/configs/settings.yaml similarity index 97% rename from configs/templates/settings.yaml rename to templates/configs/settings.yaml index f9106bad..fc93bde6 100644 --- a/configs/templates/settings.yaml +++ b/templates/configs/settings.yaml @@ -1,3 +1,4 @@ +debug_mode: false audio: {} voice_activation: enabled: false diff --git a/templates/skills/control_windows/default_config.yaml b/templates/skills/control_windows/default_config.yaml new file mode 100644 index 00000000..14cb2e6e --- /dev/null +++ b/templates/skills/control_windows/default_config.yaml @@ -0,0 +1,29 @@ +name: ControlWindows +module: skills.control_windows.main +description: + en: Launch applications and control your Windows computer. + de: Starte Anwendungen und steuere deinen Windows-Computer. +hint: + en: This skill currently only looks for applications in your Start Menu directory (%APPDATA%/Roaming/Microsoft/Windows/Start Menu/Programs), so if it tells you that it cannot find an application, create a shortcut in that directory. + de: Dieser Skill sucht derzeit nur nach Anwendungen in deinem Startmenü-Verzeichnis (%APPDATA%/Roaming/Microsoft/Windows/Start Menu/Programs). Wenn er dir also sagt, dass er eine Anwendung nicht finden kann, erstelle eine Verknüpfung in diesem Verzeichnis. +examples: + - question: + en: Open Spotify. + de: Öffne Spotify. + answer: + en: (opens the Spotify application) + de: (öffnet die Spotify-Anwendung) + - question: + en: Activate Notepad. + de: Aktiviere Notepad. + answer: + en: (maximizes the Notepad application) + de: (maximiert die Notepad Anwendung) + - question: + en: Close Notepad. + de: Schließe Notepad. + answer: + en: (closes the Notepad application) + de: (schließt die Notepad Anwendung) +prompt: | + You can also control Windows Functions, like opening and closing applications. diff --git a/templates/skills/control_windows/dependencies/pygetwindow/__init__.py b/templates/skills/control_windows/dependencies/pygetwindow/__init__.py new file mode 100644 index 00000000..fc45ab9a --- /dev/null +++ b/templates/skills/control_windows/dependencies/pygetwindow/__init__.py @@ -0,0 +1,349 @@ +# PyGetWindow +# A cross-platform module to find information about the windows on the screen. + +# Work in progress + +# Useful info: +# https://stackoverflow.com/questions/373020/finding-the-current-active-window-in-mac-os-x-using-python +# https://stackoverflow.com/questions/7142342/get-window-position-size-with-python + + +# win32 api and ctypes on Windows +# cocoa api and pyobjc on Mac +# Xlib on linux + + +# Possible Future Features: +# get/click menu (win32: GetMenuItemCount, GetMenuItemInfo, GetMenuItemID, GetMenu, GetMenuItemRect) + + +__version__ = "0.0.9" + +import sys, collections, pyrect + + +class PyGetWindowException(Exception): + """ + Base class for exceptions raised when PyGetWindow functions + encounter a problem. If PyGetWindow raises an exception that isn't + this class, that indicates a bug in the module. + """ + pass + + +def pointInRect(x, y, left, top, width, height): + """Returns ``True`` if the ``(x, y)`` point is within the box described + by ``(left, top, width, height)``.""" + return left < x < left + width and top < y < top + height + + +# NOTE: `Rect` is a named tuple for use in Python, while structs.RECT represents +# the win32 RECT struct. PyRect's Rect class is used for handling changing +# geometry of rectangular areas. +Rect = collections.namedtuple("Rect", "left top right bottom") +Point = collections.namedtuple("Point", "x y") +Size = collections.namedtuple("Size", "width height") + + +class BaseWindow: + def __init__(self): + pass + + def _setupRectProperties(self): + def _onRead(attrName): + r = self._getWindowRect() + self._rect._left = r.left # Setting _left directly to skip the onRead. + self._rect._top = r.top # Setting _top directly to skip the onRead. + self._rect._width = r.right - r.left # Setting _width directly to skip the onRead. + self._rect._height = r.bottom - r.top # Setting _height directly to skip the onRead. + + def _onChange(oldBox, newBox): + self.moveTo(newBox.left, newBox.top) + self.resizeTo(newBox.width, newBox.height) + + r = self._getWindowRect() + self._rect = pyrect.Rect(r.left, r.top, r.right - r.left, r.bottom - r.top, onChange=_onChange, onRead=_onRead) + + def _getWindowRect(self): + raise NotImplementedError + + def __str__(self): + r = self._getWindowRect() + width = r.right - r.left + height = r.bottom - r.top + return '<%s left="%s", top="%s", width="%s", height="%s", title="%s">' % ( + self.__class__.__qualname__, + r.left, + r.top, + width, + height, + self.title, + ) + + def close(self): + """Closes this window. This may trigger "Are you sure you want to + quit?" dialogs or other actions that prevent the window from + actually closing. This is identical to clicking the X button on the + window.""" + raise NotImplementedError + + def minimize(self): + """Minimizes this window.""" + raise NotImplementedError + + def maximize(self): + """Maximizes this window.""" + raise NotImplementedError + + def restore(self): + """If maximized or minimized, restores the window to it's normal size.""" + raise NotImplementedError + + def activate(self): + """Activate this window and make it the foreground window.""" + raise NotImplementedError + + def resizeRel(self, widthOffset, heightOffset): + """Resizes the window relative to its current size.""" + raise NotImplementedError + + def resizeTo(self, newWidth, newHeight): + """Resizes the window to a new width and height.""" + raise NotImplementedError + + def moveRel(self, xOffset, yOffset): + """Moves the window relative to its current position.""" + raise NotImplementedError + + def moveTo(self, newLeft, newTop): + """Moves the window to new coordinates on the screen.""" + raise NotImplementedError + + @property + def isMinimized(self): + """Returns True if the window is currently minimized.""" + raise NotImplementedError + + @property + def isMaximized(self): + """Returns True if the window is currently maximized.""" + raise NotImplementedError + + @property + def isActive(self): + """Returns True if the window is currently the active, foreground window.""" + raise NotImplementedError + + @property + def title(self): + """Returns the window title as a string.""" + raise NotImplementedError + + @property + def visible(self): + raise NotImplementedError + + # Wrappers for pyrect.Rect object's properties: + @property + def left(self): + return self._rect.left + + @left.setter + def left(self, value): + # import pdb; pdb.set_trace() + self._rect.left # Run rect's onRead to update the Rect object. + self._rect.left = value + + @property + def right(self): + return self._rect.right + + @right.setter + def right(self, value): + self._rect.right # Run rect's onRead to update the Rect object. + self._rect.right = value + + @property + def top(self): + return self._rect.top + + @top.setter + def top(self, value): + self._rect.top # Run rect's onRead to update the Rect object. + self._rect.top = value + + @property + def bottom(self): + return self._rect.bottom + + @bottom.setter + def bottom(self, value): + self._rect.bottom # Run rect's onRead to update the Rect object. + self._rect.bottom = value + + @property + def topleft(self): + return self._rect.topleft + + @topleft.setter + def topleft(self, value): + self._rect.topleft # Run rect's onRead to update the Rect object. + self._rect.topleft = value + + @property + def topright(self): + return self._rect.topright + + @topright.setter + def topright(self, value): + self._rect.topright # Run rect's onRead to update the Rect object. + self._rect.topright = value + + @property + def bottomleft(self): + return self._rect.bottomleft + + @bottomleft.setter + def bottomleft(self, value): + self._rect.bottomleft # Run rect's onRead to update the Rect object. + self._rect.bottomleft = value + + @property + def bottomright(self): + return self._rect.bottomright + + @bottomright.setter + def bottomright(self, value): + self._rect.bottomright # Run rect's onRead to update the Rect object. + self._rect.bottomright = value + + @property + def midleft(self): + return self._rect.midleft + + @midleft.setter + def midleft(self, value): + self._rect.midleft # Run rect's onRead to update the Rect object. + self._rect.midleft = value + + @property + def midright(self): + return self._rect.midright + + @midright.setter + def midright(self, value): + self._rect.midright # Run rect's onRead to update the Rect object. + self._rect.midright = value + + @property + def midtop(self): + return self._rect.midtop + + @midtop.setter + def midtop(self, value): + self._rect.midtop # Run rect's onRead to update the Rect object. + self._rect.midtop = value + + @property + def midbottom(self): + return self._rect.midbottom + + @midbottom.setter + def midbottom(self, value): + self._rect.midbottom # Run rect's onRead to update the Rect object. + self._rect.midbottom = value + + @property + def center(self): + return self._rect.center + + @center.setter + def center(self, value): + self._rect.center # Run rect's onRead to update the Rect object. + self._rect.center = value + + @property + def centerx(self): + return self._rect.centerx + + @centerx.setter + def centerx(self, value): + self._rect.centerx # Run rect's onRead to update the Rect object. + self._rect.centerx = value + + @property + def centery(self): + return self._rect.centery + + @centery.setter + def centery(self, value): + self._rect.centery # Run rect's onRead to update the Rect object. + self._rect.centery = value + + @property + def width(self): + return self._rect.width + + @width.setter + def width(self, value): + self._rect.width # Run rect's onRead to update the Rect object. + self._rect.width = value + + @property + def height(self): + return self._rect.height + + @height.setter + def height(self, value): + self._rect.height # Run rect's onRead to update the Rect object. + self._rect.height = value + + @property + def size(self): + return self._rect.size + + @size.setter + def size(self, value): + self._rect.size # Run rect's onRead to update the Rect object. + self._rect.size = value + + @property + def area(self): + return self._rect.area + + @area.setter + def area(self, value): + self._rect.area # Run rect's onRead to update the Rect object. + self._rect.area = value + + @property + def box(self): + return self._rect.box + + @box.setter + def box(self, value): + self._rect.box # Run rect's onRead to update the Rect object. + self._rect.box = value + + +if sys.platform == "darwin": + # raise NotImplementedError('PyGetWindow currently does not support macOS. If you have Appkit/Cocoa knowledge, please contribute! https://github.com/asweigart/pygetwindow') # TODO - implement mac + from ._pygetwindow_macos import * + + Window = MacOSWindow +elif sys.platform == "win32": + from ._pygetwindow_win import ( + Win32Window, + getActiveWindow, + getActiveWindowTitle, + getWindowsAt, + getWindowsWithTitle, + getAllWindows, + getAllTitles, + ) + + Window = Win32Window +else: + raise NotImplementedError( + "PyGetWindow currently does not support Linux. If you have Xlib knowledge, please contribute! https://github.com/asweigart/pygetwindow" + ) diff --git a/templates/skills/control_windows/dependencies/pygetwindow/_pygetwindow_macos.py b/templates/skills/control_windows/dependencies/pygetwindow/_pygetwindow_macos.py new file mode 100644 index 00000000..7ba44b6e --- /dev/null +++ b/templates/skills/control_windows/dependencies/pygetwindow/_pygetwindow_macos.py @@ -0,0 +1,174 @@ +import Quartz +import pygetwindow + + +def getAllTitles(): + """Returns a list of strings of window titles for all visible windows. + """ + + # Source: https://stackoverflow.com/questions/53237278/obtain-list-of-all-window-titles-on-macos-from-a-python-script/53985082#53985082 + windows = Quartz.CGWindowListCopyWindowInfo(Quartz.kCGWindowListExcludeDesktopElements | Quartz.kCGWindowListOptionOnScreenOnly, Quartz.kCGNullWindowID) + return ['%s %s' % (win[Quartz.kCGWindowOwnerName], win.get(Quartz.kCGWindowName, '')) for win in windows] + + +def getActiveWindow(): + """Returns a Window object of the currently active Window.""" + + # Source: https://stackoverflow.com/questions/5286274/front-most-window-using-cgwindowlistcopywindowinfo + windows = Quartz.CGWindowListCopyWindowInfo(Quartz.kCGWindowListExcludeDesktopElements | Quartz.kCGWindowListOptionOnScreenOnly, Quartz.kCGNullWindowID) + for win in windows: + if win['kCGWindowLayer'] == 0: + return '%s %s' % (win[Quartz.kCGWindowOwnerName], win.get(Quartz.kCGWindowName, '')) # Temporary. For now, we'll just return the title of the active window. + raise Exception('Could not find an active window.') # Temporary hack. + + +def getWindowsAt(x, y): + windows = Quartz.CGWindowListCopyWindowInfo(Quartz.kCGWindowListExcludeDesktopElements | Quartz.kCGWindowListOptionOnScreenOnly, Quartz.kCGNullWindowID) + matches = [] + for win in windows: + w = win['kCGWindowBounds'] + if pygetwindow.pointInRect(x, y, w['X'], w['Y'], w['Width'], w['Height']): + matches.append('%s %s' % (win[Quartz.kCGWindowOwnerName], win.get(Quartz.kCGWindowName, ''))) + return matches + + + +def activate(): + # TEMP - this is not a real api, I'm just using this name to store these notes for now. + + # Source: https://stackoverflow.com/questions/7460092/nswindow-makekeyandorderfront-makes-window-appear-but-not-key-or-front?rq=1 + # Source: https://stackoverflow.com/questions/4905024/is-it-possible-to-bring-window-to-front-without-taking-focus?rq=1 + pass + + +def getWindowGeometry(title): + # TEMP - this is not a real api, I'm just using this name to stoe these notes for now. + windows = Quartz.CGWindowListCopyWindowInfo(Quartz.kCGWindowListExcludeDesktopElements | Quartz.kCGWindowListOptionOnScreenOnly, Quartz.kCGNullWindowID) + for win in windows: + if title in '%s %s' % (win[Quartz.kCGWindowOwnerName], win.get(Quartz.kCGWindowName, '')): + w = win['kCGWindowBounds'] + return (w['X'], w['Y'], w['Width'], w['Height']) + + +def isVisible(title): + # TEMP - this is not a real api, I'm just using this name to stoe these notes for now. + windows = Quartz.CGWindowListCopyWindowInfo(Quartz.kCGWindowListExcludeDesktopElements | Quartz.kCGWindowListOptionOnScreenOnly, Quartz.kCGNullWindowID) + for win in windows: + if title in '%s %s' % (win[Quartz.kCGWindowOwnerName], win.get(Quartz.kCGWindowName, '')): + return win['kCGWindowAlpha'] != 0.0 + +def isMinimized(): + # TEMP - this is not a real api, I'm just using this name to stoe these notes for now. + # Source: https://stackoverflow.com/questions/10258676/how-to-know-whether-a-window-is-minimised-or-not + # Use the kCGWindowIsOnscreen to check this. Minimized windows are considered to not be on the screen. (But I'm not sure if there are other situations where a window is "off screen".) + + # I'm not sure how kCGWindowListOptionOnScreenOnly interferes with this. + pass + +# TODO: This class doesn't work yet. I've copied the Win32Window class and will make adjustments as needed here. + +class MacOSWindow(): + def __init__(self, hWnd): + self._hWnd = hWnd # TODO fix this, this is a LP_c_long insead of an int. + + def _onRead(attrName): + r = self._getWindowRect(_hWnd) + self._rect._left = r.left # Setting _left directly to skip the onRead. + self._rect._top = r.top # Setting _top directly to skip the onRead. + self._rect._width = r.right - r.left # Setting _width directly to skip the onRead. + self._rect._height = r.bottom - r.top # Setting _height directly to skip the onRead. + + def _onChange(oldBox, newBox): + self.moveTo(newBox.left, newBox.top) + self.resizeTo(newBox.width, newBox.height) + + r = self._getWindowRect(_hWnd) + self._rect = pyrect.Rect(r.left, r.top, r.right - r.left, r.bottom - r.top, onChange=_onChange, onRead=_onRead) + + def __str__(self): + r = self._getWindowRect(_hWnd) + width = r.right - r.left + height = r.bottom - r.top + return '<%s left="%s", top="%s", width="%s", height="%s", title="%s">' % (self.__class__.__name__, r.left, r.top, width, height, self.title) + + + def __repr__(self): + return '%s(hWnd=%s)' % (self.__class__.__name__, self._hWnd) + + + def __eq__(self, other): + return isinstance(other, MacOSWindow) and self._hWnd == other._hWnd + + + def close(self): + """Closes this window. This may trigger "Are you sure you want to + quit?" dialogs or other actions that prevent the window from + actually closing. This is identical to clicking the X button on the + window.""" + raise NotImplementedError + + def minimize(self): + """Minimizes this window.""" + raise NotImplementedError + + + def maximize(self): + """Maximizes this window.""" + raise NotImplementedError + + + def restore(self): + """If maximized or minimized, restores the window to it's normal size.""" + raise NotImplementedError + + + def activate(self): + """Activate this window and make it the foreground window.""" + raise NotImplementedError + + + def resizeRel(self, widthOffset, heightOffset): + """Resizes the window relative to its current size.""" + raise NotImplementedError + + + def resizeTo(self, newWidth, newHeight): + """Resizes the window to a new width and height.""" + raise NotImplementedError + + + def moveRel(self, xOffset, yOffset): + """Moves the window relative to its current position.""" + raise NotImplementedError + + + def moveTo(self, newLeft, newTop): + """Moves the window to new coordinates on the screen.""" + raise NotImplementedError + + + @property + def isMinimized(self): + """Returns True if the window is currently minimized.""" + raise NotImplementedError + + @property + def isMaximized(self): + """Returns True if the window is currently maximized.""" + raise NotImplementedError + + @property + def isActive(self): + """Returns True if the window is currently the active, foreground window.""" + raise NotImplementedError + + @property + def title(self): + """Returns the window title as a string.""" + raise NotImplementedError + + @property + def visible(self): + raise NotImplementedError + + diff --git a/templates/skills/control_windows/dependencies/pygetwindow/_pygetwindow_win.py b/templates/skills/control_windows/dependencies/pygetwindow/_pygetwindow_win.py new file mode 100644 index 00000000..bbb7ae1e --- /dev/null +++ b/templates/skills/control_windows/dependencies/pygetwindow/_pygetwindow_win.py @@ -0,0 +1,351 @@ +import ctypes +from ctypes import wintypes # We can't use ctypes.wintypes, we must import wintypes this way. + +from pygetwindow import PyGetWindowException, pointInRect, BaseWindow, Rect, Point, Size + + +NULL = 0 # Used to match the Win32 API value of "null". + +# These FORMAT_MESSAGE_ constants are used for FormatMesage() and are +# documented at https://docs.microsoft.com/en-us/windows/desktop/api/winbase/nf-winbase-formatmessage#parameters +FORMAT_MESSAGE_ALLOCATE_BUFFER = 0x00000100 +FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000 +FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200 + +# These SW_ constants are used for ShowWindow() and are documented at +# https://docs.microsoft.com/en-us/windows/desktop/api/winuser/nf-winuser-showwindow#parameters +SW_MINIMIZE = 6 +SW_MAXIMIZE = 3 +SW_HIDE = 0 +SW_SHOW = 5 +SW_RESTORE = 9 + +# SetWindowPos constants: +HWND_TOP = 0 + +# Window Message constants: +WM_CLOSE = 0x0010 + +# This ctypes structure is for a Win32 POINT structure, +# which is documented here: http://msdn.microsoft.com/en-us/library/windows/desktop/dd162805(v=vs.85).aspx +# The POINT structure is used by GetCursorPos(). +class POINT(ctypes.Structure): + _fields_ = [("x", ctypes.c_long), + ("y", ctypes.c_long)] + +enumWindows = ctypes.windll.user32.EnumWindows +enumWindowsProc = ctypes.WINFUNCTYPE(ctypes.c_bool, ctypes.c_int, ctypes.POINTER(ctypes.c_int)) +getWindowText = ctypes.windll.user32.GetWindowTextW +getWindowTextLength = ctypes.windll.user32.GetWindowTextLengthW +isWindowVisible = ctypes.windll.user32.IsWindowVisible + + +class RECT(ctypes.Structure): + """A nice wrapper of the RECT structure. + + Microsoft Documentation: + https://msdn.microsoft.com/en-us/library/windows/desktop/dd162897(v=vs.85).aspx + """ + _fields_ = [('left', ctypes.c_long), + ('top', ctypes.c_long), + ('right', ctypes.c_long), + ('bottom', ctypes.c_long)] + + +def _getAllTitles(): + # This code taken from https://sjohannes.wordpress.com/2012/03/23/win32-python-getting-all-window-titles/ + # A correction to this code (for enumWindowsProc) is here: http://makble.com/the-story-of-lpclong + titles = [] + def foreach_window(hWnd, lParam): + if isWindowVisible(hWnd): + length = getWindowTextLength(hWnd) + buff = ctypes.create_unicode_buffer(length + 1) + getWindowText(hWnd, buff, length + 1) + titles.append((hWnd, buff.value)) + return True + enumWindows(enumWindowsProc(foreach_window), 0) + + return titles + + +def _formatMessage(errorCode): + """A nice wrapper for FormatMessageW(). TODO + + Microsoft Documentation: + https://docs.microsoft.com/en-us/windows/desktop/api/winbase/nf-winbase-formatmessagew + + Additional information: + https://stackoverflow.com/questions/18905702/python-ctypes-and-mutable-buffers + https://stackoverflow.com/questions/455434/how-should-i-use-formatmessage-properly-in-c + """ + lpBuffer = wintypes.LPWSTR() + + ctypes.windll.kernel32.FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, + errorCode, + 0, # dwLanguageId + ctypes.cast(ctypes.byref(lpBuffer), wintypes.LPWSTR), + 0, # nSize + NULL) + msg = lpBuffer.value.rstrip() + ctypes.windll.kernel32.LocalFree(lpBuffer) # Free the memory allocated for the error message's buffer. + return msg + + +def _raiseWithLastError(): + """A helper function that raises PyGetWindowException using the error + information from GetLastError() and FormatMessage().""" + errorCode = ctypes.windll.kernel32.GetLastError() + raise PyGetWindowException('Error code from Windows: %s - %s' % (errorCode, _formatMessage(errorCode))) + + +def getActiveWindow(): + """Returns a Window object of the currently active (focused) Window.""" + hWnd = ctypes.windll.user32.GetForegroundWindow() + if hWnd == 0: + # TODO - raise error instead + return None # Note that this function doesn't use GetLastError(). + else: + return Win32Window(hWnd) + + +def getActiveWindowTitle(): + """Returns a string of the title text of the currently active (focused) Window.""" + # NOTE - This function isn't threadsafe because it relies on a global variable. I don't use nonlocal because I want this to work on Python 2. + + global activeWindowTitle + activeWindowHwnd = ctypes.windll.user32.GetForegroundWindow() + if activeWindowHwnd == 0: + # TODO - raise error instead + return None # Note that this function doesn't use GetLastError(). + + def foreach_window(hWnd, lParam): + global activeWindowTitle + if hWnd == activeWindowHwnd: + length = getWindowTextLength(hWnd) + buff = ctypes.create_unicode_buffer(length + 1) + getWindowText(hWnd, buff, length + 1) + activeWindowTitle = buff.value + return True + enumWindows(enumWindowsProc(foreach_window), 0) + + return activeWindowTitle + + +def getWindowsAt(x, y): + """Returns a list of Window objects whose windows contain the point ``(x, y)``. + + * ``x`` (int, optional): The x position of the window(s). + * ``y`` (int, optional): The y position of the window(s).""" + windowsAtXY = [] + for window in getAllWindows(): + if pointInRect(x, y, window.left, window.top, window.width, window.height): + windowsAtXY.append(window) + return windowsAtXY + + +def getWindowsWithTitle(title): + """Returns a list of Window objects that substring match ``title`` in their title text.""" + hWndsAndTitles = _getAllTitles() + windowObjs = [] + for hWnd, winTitle in hWndsAndTitles: + if title.upper() in winTitle.upper(): # do a case-insensitive match + windowObjs.append(Win32Window(hWnd)) + return windowObjs + + +def getAllTitles(): + """Returns a list of strings of window titles for all visible windows. + """ + return [window.title for window in getAllWindows()] + + +def getAllWindows(): + """Returns a list of Window objects for all visible windows. + """ + windowObjs = [] + def foreach_window(hWnd, lParam): + if ctypes.windll.user32.IsWindowVisible(hWnd) != 0: + windowObjs.append(Win32Window(hWnd)) + return True + enumWindows(enumWindowsProc(foreach_window), 0) + + return windowObjs + + +class Win32Window(BaseWindow): + def __init__(self, hWnd): + self._hWnd = hWnd # TODO fix this, this is a LP_c_long insead of an int. + self._setupRectProperties() + + + def _getWindowRect(self): + """A nice wrapper for GetWindowRect(). TODO + + Syntax: + BOOL GetWindowRect( + HWND hWnd, + LPRECT lpRect + ); + + Microsoft Documentation: + https://docs.microsoft.com/en-us/windows/desktop/api/winuser/nf-winuser-getwindowrect + """ + rect = RECT() + result = ctypes.windll.user32.GetWindowRect(self._hWnd, ctypes.byref(rect)) + if result != 0: + return Rect(rect.left, rect.top, rect.right, rect.bottom) + else: + _raiseWithLastError() + + + def __repr__(self): + return '%s(hWnd=%s)' % (self.__class__.__name__, self._hWnd) + + + def __eq__(self, other): + return isinstance(other, Win32Window) and self._hWnd == other._hWnd + + + def close(self): + """Closes this window. This may trigger "Are you sure you want to + quit?" dialogs or other actions that prevent the window from + actually closing. This is identical to clicking the X button on the + window.""" + result = ctypes.windll.user32.PostMessageA(self._hWnd, WM_CLOSE, 0, 0) + if result == 0: + _raiseWithLastError() + + + def minimize(self): + """Minimizes this window.""" + ctypes.windll.user32.ShowWindow(self._hWnd, SW_MINIMIZE) + + + def maximize(self): + """Maximizes this window.""" + ctypes.windll.user32.ShowWindow(self._hWnd, SW_MAXIMIZE) + + + def restore(self): + """If maximized or minimized, restores the window to it's normal size.""" + ctypes.windll.user32.ShowWindow(self._hWnd, SW_RESTORE) + + def show(self): + """If hidden or showing, shows the window on screen and in title bar.""" + ctypes.windll.user32.ShowWindow(self._hWnd,SW_SHOW) + + def hide(self): + """If hidden or showing, hides the window from screen and title bar.""" + ctypes.windll.user32.ShowWindow(self._hWnd,SW_HIDE) + + def activate(self): + """Activate this window and make it the foreground (focused) window.""" + result = ctypes.windll.user32.SetForegroundWindow(self._hWnd) + if result == 0: + _raiseWithLastError() + + + def resize(self, widthOffset, heightOffset): + """Resizes the window relative to its current size.""" + result = ctypes.windll.user32.SetWindowPos(self._hWnd, HWND_TOP, self.left, self.top, self.width + widthOffset, self.height + heightOffset, 0) + if result == 0: + _raiseWithLastError() + resizeRel = resize # resizeRel is an alias for the resize() method. + + def resizeTo(self, newWidth, newHeight): + """Resizes the window to a new width and height.""" + result = ctypes.windll.user32.SetWindowPos(self._hWnd, HWND_TOP, self.left, self.top, newWidth, newHeight, 0) + if result == 0: + _raiseWithLastError() + + + def move(self, xOffset, yOffset): + """Moves the window relative to its current position.""" + result = ctypes.windll.user32.SetWindowPos(self._hWnd, HWND_TOP, self.left + xOffset, self.top + yOffset, self.width, self.height, 0) + if result == 0: + _raiseWithLastError() + moveRel = move # moveRel is an alias for the move() method. + + def moveTo(self, newLeft, newTop): + """Moves the window to new coordinates on the screen.""" + result = ctypes.windll.user32.SetWindowPos(self._hWnd, HWND_TOP, newLeft, newTop, self.width, self.height, 0) + if result == 0: + _raiseWithLastError() + + + @property + def isMinimized(self): + """Returns ``True`` if the window is currently minimized.""" + return ctypes.windll.user32.IsIconic(self._hWnd) != 0 + + @property + def isMaximized(self): + """Returns ``True`` if the window is currently maximized.""" + return ctypes.windll.user32.IsZoomed(self._hWnd) != 0 + + @property + def isActive(self): + """Returns ``True`` if the window is currently the active, foreground window.""" + return getActiveWindow() == self + + @property + def title(self): + """Returns the window title as a string.""" + textLenInCharacters = ctypes.windll.user32.GetWindowTextLengthW(self._hWnd) + stringBuffer = ctypes.create_unicode_buffer(textLenInCharacters + 1) # +1 for the \0 at the end of the null-terminated string. + ctypes.windll.user32.GetWindowTextW(self._hWnd, stringBuffer, textLenInCharacters + 1) + + # TODO it's ambiguous if an error happened or the title text is just empty. Look into this later. + return stringBuffer.value + + @property + def visible(self): + """Return ``True`` if the window is currently visible.""" + return isWindowVisible(self._hWnd) + + +def cursor(): + """Returns the current xy coordinates of the mouse cursor as a two-integer + tuple by calling the GetCursorPos() win32 function. + + Returns: + (x, y) tuple of the current xy coordinates of the mouse cursor. + """ + + cursor = POINT() + ctypes.windll.user32.GetCursorPos(ctypes.byref(cursor)) + return Point(x=cursor.x, y=cursor.y) + + +def resolution(): + """Returns the width and height of the screen as a two-integer tuple. + + Returns: + (width, height) tuple of the screen size, in pixels. + """ + return Size(width=ctypes.windll.user32.GetSystemMetrics(0), height=ctypes.windll.user32.GetSystemMetrics(1)) + +''' +def displayWindowsUnderMouse(xOffset=0, yOffset=0): + """This function is meant to be run from the command line. It will + automatically display the location and RGB of the mouse cursor.""" + print('Press Ctrl-C to quit.') + if xOffset != 0 or yOffset != 0: + print('xOffset: %s yOffset: %s' % (xOffset, yOffset)) + resolution = size() + try: + while True: + # Get and print the mouse coordinates. + x, y = position() + positionStr = 'X: ' + str(x - xOffset).rjust(4) + ' Y: ' + str(y - yOffset).rjust(4) + + # TODO - display windows under the mouse + + sys.stdout.write(positionStr) + sys.stdout.write('\b' * len(positionStr)) + sys.stdout.flush() + except KeyboardInterrupt: + sys.stdout.write('\n') + sys.stdout.flush() +''' diff --git a/templates/skills/control_windows/dependencies/pyrect/__init__.py b/templates/skills/control_windows/dependencies/pyrect/__init__.py new file mode 100644 index 00000000..ca24291d --- /dev/null +++ b/templates/skills/control_windows/dependencies/pyrect/__init__.py @@ -0,0 +1,1462 @@ +import doctest +import collections + +# TODO - finish doc tests + +# TODO - unit tests needed for get/set and Box named tuple + +__version__ = "0.2.0" + + +# Constants for rectangle attributes: +TOP = "top" +BOTTOM = "bottom" +LEFT = "left" +RIGHT = "right" +TOPLEFT = "topleft" +TOPRIGHT = "topright" +BOTTOMLEFT = "bottomleft" +BOTTOMRIGHT = "bottomright" +MIDTOP = "midtop" +MIDRIGHT = "midright" +MIDLEFT = "midleft" +MIDBOTTOM = "midbottom" +CENTER = "center" +CENTERX = "centerx" +CENTERY = "centery" +WIDTH = "width" +HEIGHT = "height" +SIZE = "size" +BOX = "box" +AREA = "area" +PERIMETER = "perimeter" + +Box = collections.namedtuple("Box", "left top width height") +Point = collections.namedtuple("Point", "x y") +Size = collections.namedtuple("Size", "width height") + + +class PyRectException(Exception): + """ + This class exists for PyRect exceptions. If the PyRect module raises any + non-PyRectException exceptions, this indicates there's a bug in PyRect. + """ + + pass + + +def _checkForIntOrFloat(arg): + """Raises an exception if arg is not an int or float. Always returns None.""" + if not isinstance(arg, (int, float)): + raise PyRectException( + "argument must be int or float, not %s" % (arg.__class__.__name__) + ) + + +def _checkForInt(arg): + """Raises an exception if arg is not an int. Always returns None.""" + if not isinstance(arg, int): + raise PyRectException( + "argument must be int or float, not %s" % (arg.__class__.__name__) + ) + + +def _checkForTwoIntOrFloatTuple(arg): + try: + if not isinstance(arg[0], (int, float)) or not isinstance(arg[1], (int, float)): + raise PyRectException( + "argument must be a two-item tuple containing int or float values" + ) + except: + raise PyRectException( + "argument must be a two-item tuple containing int or float values" + ) + + +def _checkForFourIntOrFloatTuple(arg): + try: + if ( + not isinstance(arg[0], (int, float)) + or not isinstance(arg[1], (int, float)) + or not isinstance(arg[2], (int, float)) + or not isinstance(arg[3], (int, float)) + ): + raise PyRectException( + "argument must be a four-item tuple containing int or float values" + ) + except: + raise PyRectException( + "argument must be a four-item tuple containing int or float values" + ) + + +def _collides(rectOrPoint1, rectOrPoint2): + """Returns True if rectOrPoint1 and rectOrPoint2 collide with each other.""" + + +def _getRectsAndPoints(rectsOrPoints): + points = [] + rects = [] + for rectOrPoint in rectsOrPoints: + try: + _checkForTwoIntOrFloatTuple(rectOrPoint) + points.append(rectOrPoint) + except PyRectException: + try: + _checkForFourIntOrFloatTuple(rectOrPoint) + except: + raise PyRectException("argument is not a point or a rect tuple") + rects.append(rectOrPoint) + return (rects, points) + + +''' +def collideAnyBetween(rectsOrPoints): + """Returns True if any of the (x, y) or (left, top, width, height) tuples + in rectsOrPoints collides with any other point or box tuple in rectsOrPoints. + + >>> p1 = (50, 50) + >>> p2 = (100, 100) + >>> p3 = (50, 200) + >>> r1 = (-50, -50, 20, 20) + >>> r2 = (25, 25, 50, 50) + >>> collideAnyBetween([p1, p2, p3, r1, r2]) # p1 and r2 collide + True + >>> collideAnyBetween([p1, p2, p3, r1]) + False + """ + # TODO - needs to be complete + + # split up + rects, points = _getRectsAndPoints(rectsOrPoints) + + # compare points with each other + if len(points) > 1: + for point in points: + if point != points[0]: + return False + + # TODO finish +''' + + +''' +def collideAllBetween(rectsOrPoints): + """Returns True if any of the (x, y) or (left, top, width, height) tuples + in rectsOrPoints collides with any other point or box tuple in rectsOrPoints. + + >>> p1 = (50, 50) + >>> p2 = (100, 100) + >>> p3 = (50, 200) + >>> r1 = (-50, -50, 20, 20) + >>> r2 = (25, 25, 50, 50) + >>> collideAllBetween([p1, p2, p3, r1, r2]) + False + >>> collideAllBetween([p1, p2, p3, r1]) + False + >>> collideAllBetween([p1, r2]) # Everything in the list collides with each other. + True + """ + + # Check for valid arguments + try: + for rectOrPoint in rectsOrPoints: + if len(rectOrPoint) == 2: + _checkForTwoIntOrFloatTuple(rectOrPoint) + elif len(rectOrPoint) == 4: + _checkForFourIntOrFloatTuple(rectOrPoint) + else: + raise PyRectException() + except: + raise PyRectException('Arguments in rectsOrPoints must be 2- or 4-integer/float tuples.') + + raise NotImplementedError # return a list of all rects or points that collide with any other in the argument +''' + + +class Rect(object): + def __init__( + self, + left=0, + top=0, + width=0, + height=0, + enableFloat=False, + readOnly=False, + onChange=None, + onRead=None, + ): + _checkForIntOrFloat(width) + _checkForIntOrFloat(height) + _checkForIntOrFloat(left) + _checkForIntOrFloat(top) + + self._enableFloat = bool(enableFloat) + self._readOnly = bool(readOnly) + + if onChange is not None and not callable(onChange): + raise PyRectException( + "onChange argument must be None or callable (function, method, etc.)" + ) + self.onChange = onChange + + if onRead is not None and not callable(onRead): + raise PyRectException( + "onRead argument must be None or callable (function, method, etc.)" + ) + self.onRead = onRead + + if enableFloat: + self._width = float(width) + self._height = float(height) + self._left = float(left) + self._top = float(top) + else: + self._width = int(width) + self._height = int(height) + self._left = int(left) + self._top = int(top) + + # OPERATOR OVERLOADING / DUNDER METHODS + def __repr__(self): + """Return a string of the constructor function call to create this Rect object.""" + return "%s(left=%s, top=%s, width=%s, height=%s)" % ( + self.__class__.__name__, + self._left, + self._top, + self._width, + self._height, + ) + + def __str__(self): + """Return a string representation of this Rect object.""" + return "(x=%s, y=%s, w=%s, h=%s)" % ( + self._left, + self._top, + self._width, + self._height, + ) + + def callOnChange(self, oldLeft, oldTop, oldWidth, oldHeight): + # Note: callOnChange() should be called *after* the attribute has been changed. + # Note: This isn't thread safe; the attributes can change between the calling of this function and the code in the function running. + if self.onChange is not None: + self.onChange( + Box(oldLeft, oldTop, oldWidth, oldHeight), + Box(self._left, self._top, self._width, self._height), + ) + + @property + def enableFloat(self): + """ + A Boolean attribute that determines if this rectangle uses floating point + numbers for its position and size. False, by default. + + >>> r = Rect(0, 0, 10, 20) + >>> r.enableFloat + False + >>> r.enableFloat = True + >>> r.top = 3.14 + >>> r + Rect(left=0.0, top=3.14, width=10.0, height=20.0) + """ + return self._enableFloat + + @enableFloat.setter + def enableFloat(self, value): + if not isinstance(value, bool): + raise PyRectException("enableFloat must be set to a bool value") + self._enableFloat = value + + if self._enableFloat: + self._left = float(self._left) + self._top = float(self._top) + self._width = float(self._width) + self._height = float(self._height) + else: + self._left = int(self._left) + self._top = int(self._top) + self._width = int(self._width) + self._height = int(self._height) + + # LEFT SIDE PROPERTY + @property + def left(self): + """ + The x coordinate for the left edge of the rectangle. `x` is an alias for `left`. + + >>> r = Rect(0, 0, 10, 20) + >>> r.left + 0 + >>> r.left = 50 + >>> r + Rect(left=50, top=0, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(LEFT) + return self._left + + @left.setter + def left(self, newLeft): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newLeft) + if ( + newLeft != self._left + ): # Only run this code if the size/position has changed. + originalLeft = self._left + if self._enableFloat: + self._left = newLeft + else: + self._left = int(newLeft) + self.callOnChange(originalLeft, self._top, self._width, self._height) + + x = left # x is an alias for left + + # TOP SIDE PROPERTY + @property + def top(self): + """ + The y coordinate for the top edge of the rectangle. `y` is an alias for `top`. + + >>> r = Rect(0, 0, 10, 20) + >>> r.top + 0 + >>> r.top = 50 + >>> r + Rect(left=0, top=50, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(TOP) + return self._top + + @top.setter + def top(self, newTop): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newTop) + if newTop != self._top: # Only run this code if the size/position has changed. + originalTop = self._top + if self._enableFloat: + self._top = newTop + else: + self._top = int(newTop) + self.callOnChange(self._left, originalTop, self._width, self._height) + + y = top # y is an alias for top + + # RIGHT SIDE PROPERTY + @property + def right(self): + """ + The x coordinate for the right edge of the rectangle. + + >>> r = Rect(0, 0, 10, 20) + >>> r.right + 10 + >>> r.right = 50 + >>> r + Rect(left=40, top=0, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(RIGHT) + return self._left + self._width + + @right.setter + def right(self, newRight): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newRight) + if ( + newRight != self._left + self._width + ): # Only run this code if the size/position has changed. + originalLeft = self._left + if self._enableFloat: + self._left = newRight - self._width + else: + self._left = int(newRight) - self._width + self.callOnChange(originalLeft, self._top, self._width, self._height) + + # BOTTOM SIDE PROPERTY + @property + def bottom(self): + """The y coordinate for the bottom edge of the rectangle. + + >>> r = Rect(0, 0, 10, 20) + >>> r.bottom + 20 + >>> r.bottom = 30 + >>> r + Rect(left=0, top=10, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(BOTTOM) + return self._top + self._height + + @bottom.setter + def bottom(self, newBottom): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newBottom) + if ( + newBottom != self._top + self._height + ): # Only run this code if the size/position has changed. + originalTop = self._top + if self._enableFloat: + self._top = newBottom - self._height + else: + self._top = int(newBottom) - self._height + self.callOnChange(self._left, originalTop, self._width, self._height) + + # TOP LEFT CORNER PROPERTY + @property + def topleft(self): + """ + The x and y coordinates for the top right corner of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.topleft + (0, 0) + >>> r.topleft = (30, 30) + >>> r + Rect(left=30, top=30, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(TOPLEFT) + return Point(x=self._left, y=self._top) + + @topleft.setter + def topleft(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newLeft, newTop = value + if (newLeft != self._left) or ( + newTop != self._top + ): # Only run this code if the size/position has changed. + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + self._left = newLeft + self._top = newTop + else: + self._left = int(newLeft) + self._top = int(newTop) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # BOTTOM LEFT CORNER PROPERTY + @property + def bottomleft(self): + """ + The x and y coordinates for the bottom right corner of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.bottomleft + (0, 20) + >>> r.bottomleft = (30, 30) + >>> r + Rect(left=30, top=10, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(BOTTOMLEFT) + return Point(x=self._left, y=self._top + self._height) + + @bottomleft.setter + def bottomleft(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newLeft, newBottom = value + if (newLeft != self._left) or ( + newBottom != self._top + self._height + ): # Only run this code if the size/position has changed. + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + self._left = newLeft + self._top = newBottom - self._height + else: + self._left = int(newLeft) + self._top = int(newBottom) - self._height + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # TOP RIGHT CORNER PROPERTY + @property + def topright(self): + """ + The x and y coordinates for the top right corner of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.topright + (10, 0) + >>> r.topright = (30, 30) + >>> r + Rect(left=20, top=30, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(TOPRIGHT) + return Point(x=self._left + self._width, y=self._top) + + @topright.setter + def topright(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newRight, newTop = value + if (newRight != self._left + self._width) or ( + newTop != self._top + ): # Only run this code if the size/position has changed. + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + self._left = newRight - self._width + self._top = newTop + else: + self._left = int(newRight) - self._width + self._top = int(newTop) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # BOTTOM RIGHT CORNER PROPERTY + @property + def bottomright(self): + """ + The x and y coordinates for the bottom right corner of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.bottomright + (10, 20) + >>> r.bottomright = (30, 30) + >>> r + Rect(left=20, top=10, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(BOTTOMRIGHT) + return Point(x=self._left + self._width, y=self._top + self._height) + + @bottomright.setter + def bottomright(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newRight, newBottom = value + if (newBottom != self._top + self._height) or ( + newRight != self._left + self._width + ): # Only run this code if the size/position has changed. + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + self._left = newRight - self._width + self._top = newBottom - self._height + else: + self._left = int(newRight) - self._width + self._top = int(newBottom) - self._height + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # MIDDLE OF TOP SIDE PROPERTY + @property + def midtop(self): + """ + The x and y coordinates for the midpoint of the top edge of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.midtop + (5, 0) + >>> r.midtop = (40, 50) + >>> r + Rect(left=35, top=50, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(MIDTOP) + if self._enableFloat: + return Point(x=self._left + (self._width / 2.0), y=self._top) + else: + return Point(x=self._left + (self._width // 2), y=self._top) + + @midtop.setter + def midtop(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newMidTop, newTop = value + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + if (newMidTop != self._left + self._width / 2.0) or ( + newTop != self._top + ): # Only run this code if the size/position has changed. + self._left = newMidTop - (self._width / 2.0) + self._top = newTop + self.callOnChange(originalLeft, originalTop, self._width, self._height) + else: + if (newMidTop != self._left + self._width // 2) or ( + newTop != self._top + ): # Only run this code if the size/position has changed. + self._left = int(newMidTop) - (self._width // 2) + self._top = int(newTop) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # MIDDLE OF BOTTOM SIDE PROPERTY + @property + def midbottom(self): + """ + The x and y coordinates for the midpoint of the bottom edge of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.midbottom + (5, 20) + >>> r.midbottom = (40, 50) + >>> r + Rect(left=35, top=30, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(MIDBOTTOM) + if self._enableFloat: + return Point(x=self._left + (self._width / 2.0), y=self._top + self._height) + else: + return Point(x=self._left + (self._width // 2), y=self._top + self._height) + + @midbottom.setter + def midbottom(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newMidBottom, newBottom = value + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + if (newMidBottom != self._left + self._width / 2.0) or ( + newBottom != self._top + self._height + ): # Only run this code if the size/position has changed. + self._left = newMidBottom - (self._width / 2.0) + self._top = newBottom - self._height + self.callOnChange(originalLeft, originalTop, self._width, self._height) + else: + if (newMidBottom != self._left + self._width // 2) or ( + newBottom != self._top + self._height + ): # Only run this code if the size/position has changed. + self._left = int(newMidBottom) - (self._width // 2) + self._top = int(newBottom) - self._height + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # MIDDLE OF LEFT SIDE PROPERTY + @property + def midleft(self): + """ + The x and y coordinates for the midpoint of the left edge of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.midleft + (0, 10) + >>> r.midleft = (40, 50) + >>> r + Rect(left=40, top=40, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(MIDLEFT) + if self._enableFloat: + return Point(x=self._left, y=self._top + (self._height / 2.0)) + else: + return Point(x=self._left, y=self._top + (self._height // 2)) + + @midleft.setter + def midleft(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newLeft, newMidLeft = value + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + if (newLeft != self._left) or ( + newMidLeft != self._top + (self._height / 2.0) + ): # Only run this code if the size/position has changed. + self._left = newLeft + self._top = newMidLeft - (self._height / 2.0) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + else: + if (newLeft != self._left) or ( + newMidLeft != self._top + (self._height // 2) + ): # Only run this code if the size/position has changed. + self._left = int(newLeft) + self._top = int(newMidLeft) - (self._height // 2) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # MIDDLE OF RIGHT SIDE PROPERTY + @property + def midright(self): + """ + The x and y coordinates for the midpoint of the right edge of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.midright + (10, 10) + >>> r.midright = (40, 50) + >>> r + Rect(left=30, top=40, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(MIDRIGHT) + if self._enableFloat: + return Point(x=self._left + self._width, y=self._top + (self._height / 2.0)) + else: + return Point(x=self._left + self._width, y=self._top + (self._height // 2)) + + @midright.setter + def midright(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newRight, newMidRight = value + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + if (newRight != self._left + self._width) or ( + newMidRight != self._top + self._height / 2.0 + ): # Only run this code if the size/position has changed. + self._left = newRight - self._width + self._top = newMidRight - (self._height / 2.0) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + else: + if (newRight != self._left + self._width) or ( + newMidRight != self._top + self._height // 2 + ): # Only run this code if the size/position has changed. + self._left = int(newRight) - self._width + self._top = int(newMidRight) - (self._height // 2) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # CENTER POINT PROPERTY + @property + def center(self): + """ + The x and y coordinates for the center of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.center + (5, 10) + >>> r.center = (40, 50) + >>> r + Rect(left=35, top=40, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(CENTER) + if self._enableFloat: + return Point( + x=self._left + (self._width / 2.0), y=self._top + (self._height / 2.0) + ) + else: + return Point( + x=self._left + (self._width // 2), y=self._top + (self._height // 2) + ) + + @center.setter + def center(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newCenterx, newCentery = value + originalLeft = self._left + originalTop = self._top + if self._enableFloat: + if (newCenterx != self._left + self._width / 2.0) or ( + newCentery != self._top + self._height / 2.0 + ): # Only run this code if the size/position has changed. + self._left = newCenterx - (self._width / 2.0) + self._top = newCentery - (self._height / 2.0) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + else: + if (newCenterx != self._left + self._width // 2) or ( + newCentery != self._top + self._height // 2 + ): # Only run this code if the size/position has changed. + self._left = int(newCenterx) - (self._width // 2) + self._top = int(newCentery) - (self._height // 2) + self.callOnChange(originalLeft, originalTop, self._width, self._height) + + # X COORDINATE OF CENTER POINT PROPERTY + @property + def centerx(self): + """ + The x coordinate for the center of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.centerx + 5 + >>> r.centerx = 50 + >>> r + Rect(left=45, top=0, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(CENTERX) + if self._enableFloat: + return self._left + (self._width / 2.0) + else: + return self._left + (self._width // 2) + + @centerx.setter + def centerx(self, newCenterx): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newCenterx) + originalLeft = self._left + if self._enableFloat: + if ( + newCenterx != self._left + self._width / 2.0 + ): # Only run this code if the size/position has changed. + self._left = newCenterx - (self._width / 2.0) + self.callOnChange(originalLeft, self._top, self._width, self._height) + else: + if ( + newCenterx != self._left + self._width // 2 + ): # Only run this code if the size/position has changed. + self._left = int(newCenterx) - (self._width // 2) + self.callOnChange(originalLeft, self._top, self._width, self._height) + + # Y COORDINATE OF CENTER POINT PROPERTY + @property + def centery(self): + """ + The y coordinate for the center of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.centery + 10 + >>> r.centery = 50 + >>> r + Rect(left=0, top=40, width=10, height=20) + """ + if self.onRead is not None: + self.onRead(CENTERY) + if self._enableFloat: + return self._top + (self._height / 2.0) + else: + return self._top + (self._height // 2) + + @centery.setter + def centery(self, newCentery): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newCentery) + originalTop = self._top + if self._enableFloat: + if ( + newCentery != self._top + self._height / 2.0 + ): # Only run this code if the size/position has changed. + self._top = newCentery - (self._height / 2.0) + self.callOnChange(self._left, originalTop, self._width, self._height) + else: + if ( + newCentery != self._top + self._height // 2 + ): # Only run this code if the size/position has changed. + self._top = int(newCentery) - (self._height // 2) + self.callOnChange(self._left, originalTop, self._width, self._height) + + # SIZE PROPERTY (i.e. (width, height)) + @property + def size(self): + """ + The width and height of the rectangle, as a tuple. + + >>> r = Rect(0, 0, 10, 20) + >>> r.size + (10, 20) + >>> r.size = (40, 50) + >>> r + Rect(left=0, top=0, width=40, height=50) + """ + if self.onRead is not None: + self.onRead(SIZE) + return Size(width=self._width, height=self._height) + + @size.setter + def size(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForTwoIntOrFloatTuple(value) + newWidth, newHeight = value + if newWidth != self._width or newHeight != self._height: + originalWidth = self._width + originalHeight = self._height + if self._enableFloat: + self._width = newWidth + self._height = newHeight + else: + self._width = int(newWidth) + self._height = int(newHeight) + self.callOnChange(self._left, self._top, originalWidth, originalHeight) + + # WIDTH PROPERTY + @property + def width(self): + """ + The width of the rectangle. `w` is an alias for `width`. + + >>> r = Rect(0, 0, 10, 20) + >>> r.width + 10 + >>> r.width = 50 + >>> r + Rect(left=0, top=0, width=50, height=20) + """ + if self.onRead is not None: + self.onRead(WIDTH) + return self._width + + @width.setter + def width(self, newWidth): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newWidth) + if ( + newWidth != self._width + ): # Only run this code if the size/position has changed. + originalWidth = self._width + if self._enableFloat: + self._width = newWidth + else: + self._width = int(newWidth) + self.callOnChange(self._left, self._top, originalWidth, self._height) + + w = width + + # HEIGHT PROPERTY + @property + def height(self): + """ + The height of the rectangle. `h` is an alias for `height` + + >>> r = Rect(0, 0, 10, 20) + >>> r.height + 20 + >>> r.height = 50 + >>> r + Rect(left=0, top=0, width=10, height=50) + """ + if self.onRead is not None: + self.onRead(HEIGHT) + return self._height + + @height.setter + def height(self, newHeight): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(newHeight) + if ( + newHeight != self._height + ): # Only run this code if the size/position has changed. + originalHeight = self._height + if self._enableFloat: + self._height = newHeight + else: + self._height = int(newHeight) + self.callOnChange(self._left, self._top, self._width, originalHeight) + + h = height + + # AREA PROPERTY + @property + def area(self): + """The area of the `Rect`, which is simply the width times the height. + This is a read-only attribute. + + >>> r = Rect(0, 0, 10, 20) + >>> r.area + 200 + """ + if self.onRead is not None: + self.onRead(AREA) + return self._width * self._height + + + # PERIMETER PROPERTY + @property + def perimeter(self): + """The perimeter of the `Rect`, which is simply the (width + height) * 2. + This is a read-only attribute. + + >>> r = Rect(0, 0, 10, 20) + >>> r.area + 200 + """ + if self.onRead is not None: + self.onRead(AREA) + return (self._width + self._height) * 2 + + + # BOX PROPERTY + @property + def box(self): + """A tuple of four integers: (left, top, width, height). + + >>> r = Rect(0, 0, 10, 20) + >>> r.box + (0, 0, 10, 20) + >>> r.box = (5, 15, 100, 200) + >>> r.box + (5, 15, 100, 200)""" + if self.onRead is not None: + self.onRead(BOX) + return Box( + left=self._left, top=self._top, width=self._width, height=self._height + ) + + @box.setter + def box(self, value): + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForFourIntOrFloatTuple(value) + newLeft, newTop, newWidth, newHeight = value + if ( + (newLeft != self._left) + or (newTop != self._top) + or (newWidth != self._width) + or (newHeight != self._height) + ): + originalLeft = self._left + originalTop = self._top + originalWidth = self._width + originalHeight = self._height + if self._enableFloat: + self._left = float(newLeft) + self._top = float(newTop) + self._width = float(newWidth) + self._height = float(newHeight) + else: + self._left = int(newLeft) + self._top = int(newTop) + self._width = int(newWidth) + self._height = int(newHeight) + self.callOnChange(originalLeft, originalTop, originalWidth, originalHeight) + + def get(self, rectAttrName): + # Access via the properties so that it triggers onRead(). + if rectAttrName == TOP: + return self.top + elif rectAttrName == BOTTOM: + return self.bottom + elif rectAttrName == LEFT: + return self.left + elif rectAttrName == RIGHT: + return self.right + elif rectAttrName == TOPLEFT: + return self.topleft + elif rectAttrName == TOPRIGHT: + return self.topright + elif rectAttrName == BOTTOMLEFT: + return self.bottomleft + elif rectAttrName == BOTTOMRIGHT: + return self.bottomright + elif rectAttrName == MIDTOP: + return self.midtop + elif rectAttrName == MIDBOTTOM: + return self.midbottom + elif rectAttrName == MIDLEFT: + return self.midleft + elif rectAttrName == MIDRIGHT: + return self.midright + elif rectAttrName == CENTER: + return self.center + elif rectAttrName == CENTERX: + return self.centerx + elif rectAttrName == CENTERY: + return self.centery + elif rectAttrName == WIDTH: + return self.width + elif rectAttrName == HEIGHT: + return self.height + elif rectAttrName == SIZE: + return self.size + elif rectAttrName == AREA: + return self.area + elif rectAttrName == BOX: + return self.box + else: + raise PyRectException("'%s' is not a valid attribute name" % (rectAttrName)) + + def set(self, rectAttrName, value): + # Set via the properties so that it triggers onChange(). + if rectAttrName == TOP: + self.top = value + elif rectAttrName == BOTTOM: + self.bottom = value + elif rectAttrName == LEFT: + self.left = value + elif rectAttrName == RIGHT: + self.right = value + elif rectAttrName == TOPLEFT: + self.topleft = value + elif rectAttrName == TOPRIGHT: + self.topright = value + elif rectAttrName == BOTTOMLEFT: + self.bottomleft = value + elif rectAttrName == BOTTOMRIGHT: + self.bottomright = value + elif rectAttrName == MIDTOP: + self.midtop = value + elif rectAttrName == MIDBOTTOM: + self.midbottom = value + elif rectAttrName == MIDLEFT: + self.midleft = value + elif rectAttrName == MIDRIGHT: + self.midright = value + elif rectAttrName == CENTER: + self.center = value + elif rectAttrName == CENTERX: + self.centerx = value + elif rectAttrName == CENTERY: + self.centery = value + elif rectAttrName == WIDTH: + self.width = value + elif rectAttrName == HEIGHT: + self.height = value + elif rectAttrName == SIZE: + self.size = value + elif rectAttrName == AREA: + raise PyRectException("area is a read-only attribute") + elif rectAttrName == BOX: + self.box = value + else: + raise PyRectException("'%s' is not a valid attribute name" % (rectAttrName)) + + def move(self, xOffset, yOffset): + """Moves this Rect object by the given offsets. The xOffset and yOffset + arguments can be any integer value, positive or negative. + >>> r = Rect(0, 0, 100, 100) + >>> r.move(10, 20) + >>> r + Rect(left=10, top=20, width=100, height=100) + """ + if self._readOnly: + raise PyRectException("Rect object is read-only") + + _checkForIntOrFloat(xOffset) + _checkForIntOrFloat(yOffset) + if self._enableFloat: + self._left += xOffset + self._top += yOffset + else: + self._left += int(xOffset) + self._top += int(yOffset) + + def copy(self): + """Return a copied `Rect` object with the same position and size as this + `Rect` object. + + >>> r1 = Rect(0, 0, 100, 150) + >>> r2 = r1.copy() + >>> r1 == r2 + True + >>> r2 + Rect(left=0, top=0, width=100, height=150) + """ + return Rect( + self._left, + self._top, + self._width, + self._height, + self._enableFloat, + self._readOnly, + ) + + def inflate(self, widthChange=0, heightChange=0): + """Increases the size of this Rect object by the given offsets. The + rectangle's center doesn't move. Negative values will shrink the + rectangle. + + >>> r = Rect(0, 0, 100, 150) + >>> r.inflate(20, 40) + >>> r + Rect(left=-10, top=-20, width=120, height=190) + """ + if self._readOnly: + raise PyRectException("Rect object is read-only") + + originalCenter = self.center + self.width += widthChange + self.height += heightChange + self.center = originalCenter + + def clamp(self, otherRect): + """Centers this Rect object at the center of otherRect. + + >>> r1 =Rect(0, 0, 100, 100) + >>> r2 = Rect(-20, -90, 50, 50) + >>> r2.clamp(r1) + >>> r2 + Rect(left=25, top=25, width=50, height=50) + >>> r1.center == r2.center + True + """ + if self._readOnly: + raise PyRectException("Rect object is read-only") + + self.center = otherRect.center + + ''' + def intersection(self, otherRect): + """Returns a new Rect object of the overlapping area between this + Rect object and otherRect. + + `clip()` is an alias for `intersection()`. + """ + pass + + clip = intersection + ''' + + def union(self, otherRect): + """Adjusts the width and height to also cover the area of `otherRect`. + + >>> r1 = Rect(0, 0, 100, 100) + >>> r2 = Rect(-10, -10, 100, 100) + >>> r1.union(r2) + >>> r1 + Rect(left=-10, top=-10, width=110, height=110) + """ + + # TODO - Change otherRect so that it could be a point as well. + + unionLeft = min(self._left, otherRect._left) + unionTop = min(self._top, otherRect._top) + unionRight = max(self.right, otherRect.right) + unionBottom = max(self.bottom, otherRect.bottom) + + self._left = unionLeft + self._top = unionTop + self._width = unionRight - unionLeft + self._height = unionBottom - unionTop + + def unionAll(self, otherRects): + """Adjusts the width and height to also cover all the `Rect` objects in + the `otherRects` sequence. + + >>> r = Rect(0, 0, 100, 100) + >>> r1 = Rect(0, 0, 150, 100) + >>> r2 = Rect(-10, -10, 100, 100) + >>> r.unionAll([r1, r2]) + >>> r + Rect(left=-10, top=-10, width=160, height=110) + """ + + # TODO - Change otherRect so that it could be a point as well. + + otherRects = list(otherRects) + otherRects.append(self) + + unionLeft = min([r._left for r in otherRects]) + unionTop = min([r._top for r in otherRects]) + unionRight = max([r.right for r in otherRects]) + unionBottom = max([r.bottom for r in otherRects]) + + self._left = unionLeft + self._top = unionTop + self._width = unionRight - unionLeft + self._height = unionBottom - unionTop + + """ + def fit(self, other): + pass # TODO - needs to be complete + """ + + def normalize(self): + """Rect objects with a negative width or height cover a region where the + right/bottom edge is to the left/above of the left/top edge, respectively. + The `normalize()` method sets the `width` and `height` to positive if they + were negative. + + The Rect stays in the same place, though with the `top` and `left` + attributes representing the true top and left side. + + >>> r = Rect(0, 0, -10, -20) + >>> r.normalize() + >>> r + Rect(left=-10, top=-20, width=10, height=20) + """ + if self._readOnly: + raise PyRectException("Rect object is read-only") + + if self._width < 0: + self._width = -self._width + self._left -= self._width + if self._height < 0: + self._height = -self._height + self._top -= self._height + # Note: No need to intify here, since the four attributes should already be ints and no multiplication was done. + + def __contains__( + self, value + ): # for either points or other Rect objects. For Rects, the *entire* Rect must be in this Rect. + if isinstance(value, Rect): + return ( + value.topleft in self + and value.topright in self + and value.bottomleft in self + and value.bottomright in self + ) + + # Check if value is an (x, y) sequence or a (left, top, width, height) sequence. + try: + len(value) + except: + raise PyRectException( + "in requires an (x, y) tuple, a (left, top, width, height) tuple, or a Rect object as left operand, not %s" + % (value.__class__.__name__) + ) + + if len(value) == 2: + # Assume that value is an (x, y) sequence. + _checkForTwoIntOrFloatTuple(value) + x, y = value + return ( + self._left < x < self._left + self._width + and self._top < y < self._top + self._height + ) + + elif len(value) == 4: + # Assume that value is an (x, y) sequence. + _checkForFourIntOrFloatTuple(value) + left, top, width, height = value + return ( + (left, top) in self + and (left + width, top) in self + and (left, top + height) in self + and (left + width, top + height) in self + ) + else: + raise PyRectException( + "in requires an (x, y) tuple, a (left, top, width, height) tuple, or a Rect object as left operand, not %s" + % (value.__class__.__name__) + ) + + def collide(self, value): + """Returns `True` if value collides with this `Rect` object, where value can + be an (x, y) tuple, a (left, top, width, height) box tuple, or another `Rect` + object. If value represents a rectangular area, any part of that area + can collide with this `Rect` object to make `collide()` return `True`. + Otherwise, returns `False`.""" + + # Note: This code is similar to __contains__(), with some minor changes + # because __contains__() requires the rectangular are to be COMPELTELY + # within the Rect object. + if isinstance(value, Rect): + return ( + value.topleft in self + or value.topright in self + or value.bottomleft in self + or value.bottomright in self + ) + + # Check if value is an (x, y) sequence or a (left, top, width, height) sequence. + try: + len(value) + except: + raise PyRectException( + "in requires an (x, y) tuple, a (left, top, width, height) tuple, or a Rect object as left operand, not %s" + % (value.__class__.__name__) + ) + + if len(value) == 2: + # Assume that value is an (x, y) sequence. + _checkForTwoIntOrFloatTuple(value) + x, y = value + return ( + self._left < x < self._left + self._width + and self._top < y < self._top + self._height + ) + + elif len(value) == 4: + # Assume that value is an (x, y) sequence. + left, top, width, height = value + return ( + (left, top) in self + or (left + width, top) in self + or (left, top + height) in self + or (left + width, top + height) in self + ) + else: + raise PyRectException( + "in requires an (x, y) tuple, a (left, top, width, height) tuple, or a Rect object as left operand, not %s" + % (value.__class__.__name__) + ) + + ''' + def collideAny(self, rectsOrPoints): + """Returns True if any of the (x, y) or (left, top, width, height) + tuples in rectsOrPoints is inside this Rect object. + + >> r = Rect(0, 0, 100, 100) + >> p1 = (150, 80) + >> p2 = (100, 100) # This point collides. + >> r.collideAny([p1, p2]) + True + >> r1 = Rect(50, 50, 10, 20) # This Rect collides. + >> r.collideAny([r1]) + True + >> r.collideAny([p1, p2, r1]) + True + """ + # TODO - needs to be complete + pass # returns True or False + raise NotImplementedError +''' + + ''' + def collideAll(self, rectsOrPoints): + """Returns True if all of the (x, y) or (left, top, width, height) + tuples in rectsOrPoints is inside this Rect object. + """ + + pass # return a list of all rects or points that collide with any other in the argument + raise NotImplementedError +''' + + # TODO - Add overloaded operators for + - * / and others once we can determine actual use cases for them. + + """NOTE: All of the comparison magic methods compare the box tuple of Rect + objects. This is the behavior of the pygame Rect objects. Originally, + I thought about having the <, <=, >, and >= operators compare the area + of Rect objects. But at the same time, I wanted to have == and != compare + not just area, but all four left, top, width, and height attributes. + But that's weird to have different comparison operators comparing different + features of a rectangular area. So I just defaulted to what Pygame does + and compares the box tuple. This means that the == and != operators are + the only really useful comparison operators, so I decided to ditch the + other operators altogether and just have Rect only support == and !=. + """ + + def __eq__(self, other): + if isinstance(other, Rect): + return other.box == self.box + else: + raise PyRectException( + "Rect objects can only be compared with other Rect objects" + ) + + def __ne__(self, other): + if isinstance(other, Rect): + return other.box != self.box + else: + raise PyRectException( + "Rect objects can only be compared with other Rect objects" + ) + + +if __name__ == "__main__": + print(doctest.testmod()) diff --git a/templates/skills/control_windows/logo.png b/templates/skills/control_windows/logo.png new file mode 100644 index 00000000..3242b570 Binary files /dev/null and b/templates/skills/control_windows/logo.png differ diff --git a/templates/skills/control_windows/main.py b/templates/skills/control_windows/main.py new file mode 100644 index 00000000..d89e7bcd --- /dev/null +++ b/templates/skills/control_windows/main.py @@ -0,0 +1,153 @@ +import os +from pathlib import Path +import pygetwindow as gw +from api.interface import ( + SettingsConfig, + SkillConfig, + WingmanConfig, + WingmanInitializationError, +) +from api.enums import LogType +from skills.skill_base import Skill + + +class ControlWindows(Skill): + + # Paths to Start Menu directories + start_menu_paths: list[Path] = [ + Path(os.environ["APPDATA"], "Microsoft", "Windows", "Start Menu", "Programs"), + Path( + os.environ["PROGRAMDATA"], "Microsoft", "Windows", "Start Menu", "Programs" + ), + ] + + def __init__( + self, + config: SkillConfig, + wingman_config: WingmanConfig, + settings: SettingsConfig, + ) -> None: + super().__init__( + config=config, wingman_config=wingman_config, settings=settings + ) + + async def validate(self) -> list[WingmanInitializationError]: + errors = await super().validate() + return errors + + # Function to recursively list files in a directory + def list_files(self, directory, extension=""): + for item in directory.iterdir(): + if item.is_dir(): + yield from self.list_files(item, extension) + elif item.is_file() and item.suffix == extension: + yield item + + # Function to search and start an application + def search_and_start(self, app_name): + for start_menu_path in self.start_menu_paths: + if start_menu_path.exists(): + for file_path in self.list_files(start_menu_path, ".lnk"): + if app_name.lower() in file_path.stem.lower(): + # Attempt to start the application + os.startfile(str(file_path)) + # subprocess.Popen([str(file_path)]) + return True + return False + + def close_application(self, app_name): + windows = gw.getWindowsWithTitle(app_name) + if windows and len(windows) > 0: + for window in windows: + window.close() + + return True + + return False + + def execute_ui_command(self, app_name: str, command: str): + windows = gw.getWindowsWithTitle(app_name) + if windows and len(windows) > 0: + for window in windows: + try: + getattr(window, command)() + except AttributeError: + pass + + return True + + return False + + def get_tools(self) -> list[tuple[str, dict]]: + tools = [ + ( + "control_windows_functions", + { + "type": "function", + "function": { + "name": "control_windows_functions", + "description": "Control Windows Functions, like opening and closing applications.", + "parameters": { + "type": "object", + "properties": { + "command": { + "type": "string", + "description": "The command to execute", + "enum": [ + "open", + "close", + "minimize", + "maximize", + "restore", + "activate", + ], + }, + "parameter": { + "type": "string", + "description": "The parameter for the command. For example, the application name to open or close. Or the information to get.", + }, + }, + "required": ["command"], + }, + }, + }, + ), + ] + return tools + + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] + ) -> tuple[str, str]: + function_response = "Application not found." + instant_response = "" + + if tool_name == "control_windows_functions": + if self.settings.debug_mode: + self.start_execution_benchmark() + await self.printr.print_async( + f"Executing control_windows_functions with parameters: {parameters}", + color=LogType.INFO, + ) + + parameter = parameters.get("parameter") + + if parameters["command"] == "open": + app_started = self.search_and_start(parameter) + if app_started: + function_response = "Application started." + + elif parameters["command"] == "close": + app_closed = self.close_application(parameter) + if app_closed: + function_response = "Application closed." + + else: + command = parameters["command"] + app_minimize = self.execute_ui_command(parameter, command) + if app_minimize: + function_response = f"Application {command}." + + if self.settings.debug_mode: + await self.print_execution_time() + + return function_response, instant_response diff --git a/templates/skills/control_windows/requirements.txt b/templates/skills/control_windows/requirements.txt new file mode 100644 index 00000000..1e049956 --- /dev/null +++ b/templates/skills/control_windows/requirements.txt @@ -0,0 +1 @@ +pygetwindow==0.0.9 \ No newline at end of file diff --git a/templates/skills/spotify/default_config.yaml b/templates/skills/spotify/default_config.yaml new file mode 100644 index 00000000..3192bf84 --- /dev/null +++ b/templates/skills/spotify/default_config.yaml @@ -0,0 +1,69 @@ +name: Spotify +module: skills.spotify.main +description: + en: Control Spotify using the the WebAPI. Play songs, artists playlists. Control playback, volume, and more. All powered by AI! + de: Steuere Spotify mit der WebAPI. Spiele Lieder, Künstler und Playlists ab. Steuere die Wiedergabe, Lautstärke und mehr. Alles von KI gesteuert! +hint: + en:
  • Create an app in the Spotify Dashboard.
  • Select "Web API" and "Web Playback SDK" as APIs used
  • Add http://127.0.0.1:8082 as Redirect URI
  • Enter Client ID and Client secret here
+ de:
  • Erstelle eine App im Spotify Dashboard.
  • Wähle "Web API" und "Web Playback SDK" als verwendete APIs
  • Füge http://127.0.0.1:8082 as Redirect URI hinzu
  • Gib die Client ID and das Client Secret hier ein
+examples: + - question: + en: Play the song Californication. + de: Spiele das Lied Californication. + answer: + en: Now playing 'Californication' by Red Hot Chili Peppers. + de: \'Californication\' von Red Hot Chili Peppers wird abgespielt. + - question: + en: What's the current song? + de: Wie heißt das aktuelle Lied? + answer: + en: You are currently listening to 'Californication' by Red Hot Chili Peppers. + de: Du hörst gerade 'Californication' von Red Hot Chili Peppers. + - question: + en: My girlfriend left me. Play a really sad song to match my mood. + de: Meine Freundin hat mich verlassen. Spiele ein wirklich trauriges Lied, das zu meiner Stimmung passt. + answer: + en: I'm sorry for you. Now playing 'Someone Like You' by Adele. + de: Es tut mir leid für dich. Jetzt wird "Someone Like You" von Adele gespielt. + - question: + en: Play the most popular song from the musical Les Miserables. + de: Spiele das beliebteste Lied aus dem Musical Les Miserables. + answer: + en: Playing 'I Dreamed a Dream' from Les Miserables. + de: \'I Dreamed a Dream\' aus Les Miserables wird abgespielt. + - question: + en: That's a cover song. Play the real version! + de: Das ist ein Cover-Song. Spiele die echte Version aus dem Film! + answer: + en: Playing 'I Dreamed a Dream' by Anne Hathaway from Les Miserables. + de: Spiele 'I Dreamed a Dream' von Anne Hathaway aus Les Miserables. + - question: + en: What are my Spotify devices? + de: Was sind meine Spotify-Geräte? + answer: + en: You have 2 devices available - 'Gaming PC' and 'iPhone'. + de: Du hast 2 Geräte verfügbar - 'Gaming PC' und 'iPhone'. + - question: + en: Play the music on my iPhone. + de: Spiele die Musik auf meinem iPhone ab. + answer: + en: Moves the current playback to your iPhone + de: Überträgt die Spotify-Wiedergabe auf das iPhone +prompt: | + You are also an expert DJ and music player interface responsible to control the Spotify music player client of the user. + You have access to different tools or functions you can call to control the Spotify client using its API. + If the user asks you to play a song, resume, stop or pause the current playback etc. use your tools to do so. + For some functions, you need parameters like the song or artist name. Try to extract these values from the + player's request. + Never invent any function parameters. Ask the user for clarification if you are not sure or cannot extract function parameters. +custom_properties: + - hint: Create an app in the Spotify Dashboard at https://developer.spotify.com/dashboard. You'll find the Client ID in the Settings of that app. + id: spotify_client_id + name: Spotify Client ID + required: true + value: enter-your-client-id-here + - hint: Create an app in the Spotify Dashboard at https://developer.spotify.com/dashboard. In the Settings of the app, add http://127.0.0.1:8082 (or any other free port) as Redirect URL. Then enter the same value here. + id: spotify_redirect_url + name: Spotify Redirect URL + required: true + value: http://127.0.0.1:8082 diff --git a/templates/skills/spotify/dependencies/bin/normalizer b/templates/skills/spotify/dependencies/bin/normalizer new file mode 100755 index 00000000..d207e619 --- /dev/null +++ b/templates/skills/spotify/dependencies/bin/normalizer @@ -0,0 +1,8 @@ +#!/Users/shackles/Source/wingman-ai/skills/spotify/venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/INSTALLER b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/LICENSE b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/LICENSE new file mode 100644 index 00000000..62b076cd --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/METADATA b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/METADATA new file mode 100644 index 00000000..c688a628 --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/METADATA @@ -0,0 +1,66 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2024.2.2 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. diff --git a/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/RECORD b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/RECORD new file mode 100644 index 00000000..716f0bbb --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/RECORD @@ -0,0 +1,14 @@ +certifi-2024.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2024.2.2.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 +certifi-2024.2.2.dist-info/METADATA,sha256=1noreLRChpOgeSj0uJT1mehiBl8ngh33Guc7KdvzYYM,2170 +certifi-2024.2.2.dist-info/RECORD,, +certifi-2024.2.2.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 +certifi-2024.2.2.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=ljtEx-EmmPpTe2SOd5Kzsujm_lUD0fKJVnE9gzce320,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/__pycache__/__init__.cpython-311.pyc,, +certifi/__pycache__/__main__.cpython-311.pyc,, +certifi/__pycache__/core.cpython-311.pyc,, +certifi/cacert.pem,sha256=ejR8qP724p-CtuR4U1WmY1wX-nVeCUD2XxWqj8e9f5I,292541 +certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/WHEEL b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/WHEEL new file mode 100644 index 00000000..98c0d20b --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.42.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/top_level.txt b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/top_level.txt new file mode 100644 index 00000000..963eac53 --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi-2024.2.2.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/templates/skills/spotify/dependencies/certifi/__init__.py b/templates/skills/spotify/dependencies/certifi/__init__.py new file mode 100644 index 00000000..1c91f3ec --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi/__init__.py @@ -0,0 +1,4 @@ +from .core import contents, where + +__all__ = ["contents", "where"] +__version__ = "2024.02.02" diff --git a/templates/skills/spotify/dependencies/certifi/__main__.py b/templates/skills/spotify/dependencies/certifi/__main__.py new file mode 100644 index 00000000..8945b5da --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi/__main__.py @@ -0,0 +1,12 @@ +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/templates/skills/spotify/dependencies/certifi/cacert.pem b/templates/skills/spotify/dependencies/certifi/cacert.pem new file mode 100644 index 00000000..fac3c319 --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi/cacert.pem @@ -0,0 +1,4814 @@ + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946069240 +# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 +# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 +# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 +MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub +j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo +U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf +zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b +u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ +bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er +fF6adulZkMV8gzURZVE= +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2" +# Serial: 1289 +# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b +# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 +# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 +-----BEGIN CERTIFICATE----- +MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa +GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg +Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J +WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB +rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp ++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 +ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i +Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz +PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og +/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH +oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI +yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud +EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 +A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL +MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT +ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f +BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn +g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl +fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K +WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha +B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc +hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR +TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD +mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z +ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y +4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza +8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3" +# Serial: 1478 +# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf +# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 +# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 +-----BEGIN CERTIFICATE----- +MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x +GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv +b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV +BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W +YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM +V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB +4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr +H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd +8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv +vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT +mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe +btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc +T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt +WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ +c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A +4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD +VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG +CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 +aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 +aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu +dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw +czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G +A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC +TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg +Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 +7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem +d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd ++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B +4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN +t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x +DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 +k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s +zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j +Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT +mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK +4SVhM7JZG+Ju1zdXtg2pEto= +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG +# Label: "SwissSign Gold CA - G2" +# Serial: 13492815561806991280 +# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 +# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 +# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 +-----BEGIN CERTIFICATE----- +MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV +BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln +biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF +MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT +d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 +76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ +bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c +6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE +emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd +MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt +MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y +MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y +FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi +aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM +gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB +qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 +lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn +8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov +L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 +45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO +UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 +O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC +bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv +GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a +77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC +hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 +92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp +Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w +ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt +Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ +-----END CERTIFICATE----- + +# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG +# Label: "SwissSign Silver CA - G2" +# Serial: 5700383053117599563 +# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 +# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb +# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 +-----BEGIN CERTIFICATE----- +MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE +BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu +IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow +RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY +U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv +Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br +YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF +nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH +6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt +eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ +c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ +MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH +HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf +jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 +5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB +rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c +wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 +cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB +AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp +WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 +xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ +2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ +IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 +aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X +em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR +dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ +OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ +hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy +tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u +-----END CERTIFICATE----- + +# Issuer: CN=SecureTrust CA O=SecureTrust Corporation +# Subject: CN=SecureTrust CA O=SecureTrust Corporation +# Label: "SecureTrust CA" +# Serial: 17199774589125277788362757014266862032 +# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 +# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 +# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz +MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv +cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz +Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO +0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao +wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj +7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS +8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT +BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg +JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 +6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ +3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm +D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS +CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR +3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= +-----END CERTIFICATE----- + +# Issuer: CN=Secure Global CA O=SecureTrust Corporation +# Subject: CN=Secure Global CA O=SecureTrust Corporation +# Label: "Secure Global CA" +# Serial: 9751836167731051554232119481456978597 +# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de +# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b +# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 +-----BEGIN CERTIFICATE----- +MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK +MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x +GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx +MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg +Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ +iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa +/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ +jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI +HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 +sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w +gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw +KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG +AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L +URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO +H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm +I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY +iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc +f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=Certigna O=Dhimyotis +# Subject: CN=Certigna O=Dhimyotis +# Label: "Certigna" +# Serial: 18364802974209362175 +# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff +# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 +# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d +-----BEGIN CERTIFICATE----- +MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV +BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X +DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ +BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 +DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 +QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny +gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw +zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q +130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 +JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw +DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw +ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT +AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj +AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG +9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h +bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc +fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu +HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w +t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw +WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== +-----END CERTIFICATE----- + +# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority +# Label: "ePKI Root Certification Authority" +# Serial: 28956088682735189655030529057352760477 +# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 +# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 +# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 +-----BEGIN CERTIFICATE----- +MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw +IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL +SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH +SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh +ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X +DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 +TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ +fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA +sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU +WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS +nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH +dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip +NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC +AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF +MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH +ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB +uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl +PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP +JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ +gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 +j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 +5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB +o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS +/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z +Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE +W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D +hNQ+IIX3Sj0rnP0qCglN6oH4EZw= +-----END CERTIFICATE----- + +# Issuer: O=certSIGN OU=certSIGN ROOT CA +# Subject: O=certSIGN OU=certSIGN ROOT CA +# Label: "certSIGN ROOT CA" +# Serial: 35210227249154 +# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 +# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b +# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb +-----BEGIN CERTIFICATE----- +MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT +AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD +QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP +MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do +0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ +UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d +RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ +OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv +JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C +AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O +BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ +LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY +MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ +44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I +Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw +i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN +9u6wWk5JRFRYX0KD +-----END CERTIFICATE----- + +# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) +# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" +# Serial: 80544274841616 +# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 +# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 +# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG +EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 +MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl +cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR +dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB +pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM +b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm +aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz +IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A +MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT +lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz +AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 +VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG +ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 +BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG +AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M +U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh +bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C ++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC +bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F +uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 +XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= +-----END CERTIFICATE----- + +# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. +# Label: "SecureSign RootCA11" +# Serial: 1 +# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 +# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 +# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr +MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG +A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 +MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp +Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD +QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz +i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 +h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV +MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 +UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni +8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC +h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD +VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB +AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm +KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ +X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr +QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 +pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN +QSdJQO7e5iNEOdyhIta6A/I= +-----END CERTIFICATE----- + +# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. +# Label: "Microsec e-Szigno Root CA 2009" +# Serial: 14014712776195784473 +# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 +# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e +# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 +-----BEGIN CERTIFICATE----- +MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD +VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 +ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G +CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y +OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx +FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp +Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o +dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP +kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc +cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U +fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 +N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC +xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 ++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM +Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG +SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h +mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk +ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 +tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c +2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t +HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=Izenpe.com O=IZENPE S.A. +# Subject: CN=Izenpe.com O=IZENPE S.A. +# Label: "Izenpe.com" +# Serial: 917563065490389241595536686991402621 +# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 +# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 +# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f +-----BEGIN CERTIFICATE----- +MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 +MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 +ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD +VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j +b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq +scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO +xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H +LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX +uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD +yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ +JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q +rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN +BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L +hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB +QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ +HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu +Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg +QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB +BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx +MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA +A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb +laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 +awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo +JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw +LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT +VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk +LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb +UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ +QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ +naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls +QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA" +# Serial: 279744 +# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 +# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e +# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e +-----BEGIN CERTIFICATE----- +MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM +MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D +ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU +cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 +WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg +Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw +IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH +UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM +TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU +BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM +kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x +AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV +HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y +sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL +I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 +J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY +VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI +03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA +# Label: "TWCA Root Certification Authority" +# Serial: 1 +# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 +# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 +# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES +MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU +V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz +WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO +LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB +AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE +AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH +K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX +RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z +rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx +3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq +hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC +MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls +XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D +lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn +aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ +YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== +-----END CERTIFICATE----- + +# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 +# Label: "Security Communication RootCA2" +# Serial: 0 +# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 +# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 +# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl +MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe +U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX +DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy +dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj +YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV +OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr +zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM +VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ +hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO +ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw +awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs +OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 +DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF +coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc +okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 +t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy +1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ +SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 +-----END CERTIFICATE----- + +# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 +# Label: "Actalis Authentication Root CA" +# Serial: 6271844772424770508 +# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 +# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac +# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 +-----BEGIN CERTIFICATE----- +MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE +BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w +MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 +IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC +SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 +ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv +UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX +4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 +KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ +gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb +rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ +51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F +be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe +KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F +v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn +fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 +jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz +ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt +ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL +e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 +jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz +WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V +SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j +pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX +X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok +fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R +K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU +ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU +LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT +LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 2 Root CA" +# Serial: 2 +# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 +# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 +# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr +6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV +L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 +1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx +MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ +QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB +arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr +Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi +FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS +P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN +9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz +uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h +9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s +A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t +OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo ++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 +KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 +DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us +H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ +I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 +5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h +3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz +Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= +-----END CERTIFICATE----- + +# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 +# Label: "Buypass Class 3 Root CA" +# Serial: 2 +# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec +# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 +# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d +-----BEGIN CERTIFICATE----- +MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd +MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg +Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow +TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw +HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB +BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y +ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E +N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 +tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX +0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c +/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X +KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY +zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS +O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D +34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP +K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 +AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv +Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj +QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV +cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS +IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 +HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa +O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv +033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u +dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE +kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 +3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD +u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq +4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 3" +# Serial: 1 +# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef +# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 +# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN +8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ +RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 +hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 +ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM +EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 +A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy +WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ +1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 +6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT +91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml +e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p +TpPDpFQUWw== +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 2009" +# Serial: 623603 +# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f +# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 +# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 +-----BEGIN CERTIFICATE----- +MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha +ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM +HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 +UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 +tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R +ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM +lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp +/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G +A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G +A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj +dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy +MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl +cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js +L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL +BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni +acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 +o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K +zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 +PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y +Johw1+qRzT65ysCQblrGXnRl11z+o+I= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH +# Label: "D-TRUST Root Class 3 CA 2 EV 2009" +# Serial: 623604 +# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 +# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 +# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF +MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD +bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw +NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV +BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn +ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 +3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z +qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR +p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 +HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw +ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea +HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw +Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh +c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E +RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt +dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku +Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp +3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 +nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF +CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na +xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX +KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 +-----END CERTIFICATE----- + +# Issuer: CN=CA Disig Root R2 O=Disig a.s. +# Subject: CN=CA Disig Root R2 O=Disig a.s. +# Label: "CA Disig Root R2" +# Serial: 10572350602393338211 +# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 +# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 +# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 +-----BEGIN CERTIFICATE----- +MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV +BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu +MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy +MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx +EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw +ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe +NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH +PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I +x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe +QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR +yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO +QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 +H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ +QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD +i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs +nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 +rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud +DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI +hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM +tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf +GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb +lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka ++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal +TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i +nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 +gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr +G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os +zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x +L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL +-----END CERTIFICATE----- + +# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV +# Label: "ACCVRAIZ1" +# Serial: 6828503384748696800 +# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 +# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 +# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 +-----BEGIN CERTIFICATE----- +MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE +AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw +CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ +BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND +VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb +qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY +HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo +G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA +lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr +IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ +0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH +k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 +4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO +m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa +cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl +uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI +KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls +ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG +AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 +VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT +VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG +CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA +cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA +QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA +7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA +cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA +QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA +czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu +aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt +aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud +DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF +BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp +D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU +JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m +AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD +vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms +tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH +7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h +I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA +h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF +d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H +pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 +-----END CERTIFICATE----- + +# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA +# Label: "TWCA Global Root CA" +# Serial: 3262 +# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 +# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 +# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx +EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT +VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 +NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT +B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF +10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz +0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh +MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH +zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc +46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 +yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi +laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP +oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA +BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE +qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm +4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL +1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn +LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF +H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo +RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ +nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh +15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW +6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW +nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j +wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz +aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy +KwbQBM0= +-----END CERTIFICATE----- + +# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera +# Label: "TeliaSonera Root CA v1" +# Serial: 199041966741090107964904287217786801558 +# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c +# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 +# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 +-----BEGIN CERTIFICATE----- +MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw +NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv +b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD +VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F +VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 +7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X +Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ +/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs +81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm +dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe +Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu +sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 +pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs +slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ +arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD +VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG +9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl +dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx +0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj +TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed +Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 +Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI +OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 +vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW +t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn +HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx +SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= +-----END CERTIFICATE----- + +# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center +# Label: "T-TeleSec GlobalRoot Class 2" +# Serial: 1 +# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a +# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 +# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 +-----BEGIN CERTIFICATE----- +MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx +KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd +BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl +YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 +OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy +aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 +ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd +AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC +FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi +1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq +jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ +wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ +WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy +NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC +uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw +IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 +g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN +9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP +BSeOE6Fuwg== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot 2011 O=Atos +# Subject: CN=Atos TrustedRoot 2011 O=Atos +# Label: "Atos TrustedRoot 2011" +# Serial: 6643877497813316402 +# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 +# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 +# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE +AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG +EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM +FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC +REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp +Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM +VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ +SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ +4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L +cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi +eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG +A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 +DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j +vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP +DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc +maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D +lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv +KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 1 G3" +# Serial: 687049649626669250736271037606554624078720034195 +# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab +# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 +# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 +MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV +wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe +rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 +68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh +4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp +UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o +abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc +3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G +KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt +hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO +Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt +zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD +ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC +MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 +cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN +qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 +YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv +b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 +8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k +NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj +ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp +q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt +nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 2 G3" +# Serial: 390156079458959257446133169266079962026824725800 +# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 +# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 +# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 +MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf +qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW +n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym +c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ +O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 +o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j +IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq +IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz +8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh +vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l +7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG +cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD +ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 +AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC +roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga +W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n +lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE ++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV +csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd +dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg +KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM +HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 +WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M +-----END CERTIFICATE----- + +# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited +# Label: "QuoVadis Root CA 3 G3" +# Serial: 268090761170461462463995952157327242137089239581 +# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 +# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d +# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL +BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc +BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 +MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM +aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR +/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu +FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR +U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c +ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR +FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k +A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw +eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl +sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp +VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q +A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ +ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD +ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px +KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI +FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv +oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg +u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP +0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf +3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl +8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ +DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN +PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ +ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G2" +# Serial: 15385348160840213938643033620894905419 +# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d +# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f +# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 +-----BEGIN CERTIFICATE----- +MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA +n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc +biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp +EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA +bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu +YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB +AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW +BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI +QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I +0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni +lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 +B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv +ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo +IhNzbM8m9Yop5w== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root G3" +# Serial: 15459312981008553731928384953135426796 +# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb +# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 +# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 +-----BEGIN CERTIFICATE----- +MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg +RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf +Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q +RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD +AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY +JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv +6pZjamVFkpUBtA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G2" +# Serial: 4293743540046975378534879503202253541 +# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 +# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 +# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f +-----BEGIN CERTIFICATE----- +MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH +MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI +2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx +1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ +q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz +tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ +vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV +5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY +1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 +NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG +Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 +8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe +pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl +MrY= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root G3" +# Serial: 7089244469030293291760083333884364146 +# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca +# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e +# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 +-----BEGIN CERTIFICATE----- +MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw +CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu +ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe +Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw +EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x +IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG +fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO +Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd +BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx +AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ +oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 +sycX +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Trusted Root G4" +# Serial: 7451500558977370777930084869016614236 +# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 +# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 +# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 +-----BEGIN CERTIFICATE----- +MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg +RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV +UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu +Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y +ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If +xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV +ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO +DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ +jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ +CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi +EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM +fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY +uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK +chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t +9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB +hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD +ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 +SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd ++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc +fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa +sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N +cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N +0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie +4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI +r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 +/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm +gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ +-----END CERTIFICATE----- + +# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited +# Label: "COMODO RSA Certification Authority" +# Serial: 101909084537582093308941363524873193117 +# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 +# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 +# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 +-----BEGIN CERTIFICATE----- +MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB +hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV +BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT +EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR +Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR +6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X +pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC +9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV +/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf +Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z ++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w +qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah +SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC +u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf +Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq +crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E +FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB +/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl +wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM +4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV +2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna +FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ +CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK +boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke +jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL +S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb +QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl +0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB +NVOFBkpdn627G190 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network +# Label: "USERTrust RSA Certification Authority" +# Serial: 2645093764781058787591871645665788717 +# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 +# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e +# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 +-----BEGIN CERTIFICATE----- +MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB +iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl +cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV +BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw +MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV +BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU +aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B +3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY +tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ +Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 +VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT +79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 +c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT +Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l +c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee +UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE +Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd +BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G +A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF +Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO +VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 +ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs +8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR +iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze +Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ +XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ +qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB +VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB +L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG +jjxDah2nGN59PRbxYvnKkKj9 +-----END CERTIFICATE----- + +# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network +# Label: "USERTrust ECC Certification Authority" +# Serial: 123013823720199481456569720443997572134 +# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 +# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 +# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a +-----BEGIN CERTIFICATE----- +MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL +MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl +eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT +JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx +MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT +Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg +VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo +I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng +o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G +A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB +zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW +RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 +# Label: "GlobalSign ECC Root CA - R5" +# Serial: 32785792099990507226680698011560947931244 +# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 +# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa +# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 +-----BEGIN CERTIFICATE----- +MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk +MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH +bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX +DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD +QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu +MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc +8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke +hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI +KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg +515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO +xwy8p2Fp8fc74SrL+SvzZpA3 +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust +# Label: "IdenTrust Commercial Root CA 1" +# Serial: 13298821034946342390520003877796839426 +# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 +# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 +# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae +-----BEGIN CERTIFICATE----- +MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu +VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw +MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw +JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT +3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU ++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp +S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 +bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi +T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL +vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK +Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK +dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT +c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv +l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N +iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB +/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD +ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH +6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt +LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 +nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 ++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK +W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT +AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq +l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG +4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ +mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A +7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H +-----END CERTIFICATE----- + +# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust +# Label: "IdenTrust Public Sector Root CA 1" +# Serial: 13298821034946342390521976156843933698 +# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba +# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd +# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu +VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN +MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 +MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 +ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy +RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS +bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF +/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R +3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw +EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy +9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V +GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ +2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV +WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD +W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN +AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj +t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV +DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 +TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G +lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW +mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df +WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 ++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ +tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA +GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv +8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G2" +# Serial: 1246989352 +# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 +# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 +# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 +-----BEGIN CERTIFICATE----- +MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 +cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs +IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz +dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy +NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu +dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt +dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 +aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK +AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T +RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN +cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW +wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 +U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 +jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN +BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ +jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ +Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v +1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R +nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH +VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - EC1" +# Serial: 51543124481930649114116133369 +# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc +# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 +# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 +-----BEGIN CERTIFICATE----- +MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG +A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 +d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu +dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq +RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy +MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD +VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 +L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g +Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD +ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi +A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt +ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH +Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O +BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC +R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX +hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G +-----END CERTIFICATE----- + +# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority +# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority +# Label: "CFCA EV ROOT" +# Serial: 407555286 +# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 +# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 +# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd +-----BEGIN CERTIFICATE----- +MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD +TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y +aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx +MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j +aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP +T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 +sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL +TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 +/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp +7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz +EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt +hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP +a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot +aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg +TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV +PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv +cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL +tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd +BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB +ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT +ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL +jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS +ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy +P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 +xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d +Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN +5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe +/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z +AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ +5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GB CA" +# Serial: 157768595616588414422159278966750757568 +# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d +# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed +# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 +-----BEGIN CERTIFICATE----- +MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt +MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg +Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i +YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x +CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG +b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh +bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 +HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx +WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX +1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk +u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P +99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r +M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB +BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh +cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 +gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO +ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf +aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic +Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= +-----END CERTIFICATE----- + +# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. +# Label: "SZAFIR ROOT CA2" +# Serial: 357043034767186914217277344587386743377558296292 +# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 +# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de +# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe +-----BEGIN CERTIFICATE----- +MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL +BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 +ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw +NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L +cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg +Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN +QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT +3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw +3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 +3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 +BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN +XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF +AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw +8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG +nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP +oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy +d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg +LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Network CA 2" +# Serial: 44979900017204383099463764357512596969 +# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 +# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 +# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 +-----BEGIN CERTIFICATE----- +MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB +gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu +QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG +A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz +OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ +VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 +b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA +DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn +0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB +OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE +fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E +Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m +o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i +sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW +OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez +Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS +adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n +3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC +AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ +F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf +CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 +XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm +djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ +WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb +AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq +P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko +b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj +XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P +5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi +DrW5viSP +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce +# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 +# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 +-----BEGIN CERTIFICATE----- +MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix +DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k +IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT +N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v +dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG +A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh +ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx +QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 +dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA +4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 +AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 +4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C +ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV +9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD +gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 +Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq +NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko +LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc +Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV +HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd +ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I +XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI +M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot +9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V +Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea +j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh +X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ +l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf +bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 +pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK +e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 +vm9qp/UsQu0yrbYhnr68 +-----END CERTIFICATE----- + +# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority +# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" +# Serial: 0 +# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef +# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 +# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 +-----BEGIN CERTIFICATE----- +MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN +BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl +bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv +b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ +BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj +YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 +MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 +dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg +QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa +jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC +MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi +C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep +lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof +TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X1 O=Internet Security Research Group +# Subject: CN=ISRG Root X1 O=Internet Security Research Group +# Label: "ISRG Root X1" +# Serial: 172886928669790476064670243504169061120 +# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e +# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 +# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 +-----BEGIN CERTIFICATE----- +MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw +TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh +cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 +WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu +ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY +MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc +h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ +0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U +A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW +T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH +B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC +B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv +KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn +OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn +jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw +qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI +rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq +hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL +ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ +3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK +NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 +ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur +TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC +jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc +oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq +4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA +mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d +emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= +-----END CERTIFICATE----- + +# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM +# Label: "AC RAIZ FNMT-RCM" +# Serial: 485876308206448804701554682760554759 +# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d +# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 +# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx +CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ +WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ +BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG +Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ +yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf +BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz +WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF +tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z +374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC +IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL +mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 +wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS +MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 +ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet +UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H +YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 +LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD +nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 +RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM +LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf +77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N +JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm +fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp +6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp +1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B +9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok +RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv +uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 1 O=Amazon +# Subject: CN=Amazon Root CA 1 O=Amazon +# Label: "Amazon Root CA 1" +# Serial: 143266978916655856878034712317230054538369994 +# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 +# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 +# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e +-----BEGIN CERTIFICATE----- +MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj +ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM +9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw +IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 +VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L +93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm +jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA +A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI +U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs +N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv +o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU +5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy +rqXRfboQnoZsG4q5WTP468SQvvG5 +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 2 O=Amazon +# Subject: CN=Amazon Root CA 2 O=Amazon +# Label: "Amazon Root CA 2" +# Serial: 143266982885963551818349160658925006970653239 +# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 +# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a +# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 +-----BEGIN CERTIFICATE----- +MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF +ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 +b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL +MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv +b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK +gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ +W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg +1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K +8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r +2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me +z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR +8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj +mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz +7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 ++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI +0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm +UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 +LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY ++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS +k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl +7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm +btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl +urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ +fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 +n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE +76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H +9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT +4PsJYGw= +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 3 O=Amazon +# Subject: CN=Amazon Root CA 3 O=Amazon +# Label: "Amazon Root CA 3" +# Serial: 143266986699090766294700635381230934788665930 +# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 +# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e +# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 +-----BEGIN CERTIFICATE----- +MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl +ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j +QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr +ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr +BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM +YyRIHN8wfdVoOw== +-----END CERTIFICATE----- + +# Issuer: CN=Amazon Root CA 4 O=Amazon +# Subject: CN=Amazon Root CA 4 O=Amazon +# Label: "Amazon Root CA 4" +# Serial: 143266989758080763974105200630763877849284878 +# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd +# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be +# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 +-----BEGIN CERTIFICATE----- +MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 +MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g +Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG +A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg +Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi +9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk +M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB +/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB +MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw +CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW +1KyLa2tJElMzrdfkviT8tQp21KW8EA== +-----END CERTIFICATE----- + +# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM +# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" +# Serial: 1 +# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 +# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca +# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 +-----BEGIN CERTIFICATE----- +MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx +GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp +bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w +KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 +BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy +dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG +EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll +IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU +QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT +TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg +LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 +a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr +LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr +N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X +YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ +iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f +AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH +V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh +AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf +IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 +lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c +8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf +lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= +-----END CERTIFICATE----- + +# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. +# Label: "GDCA TrustAUTH R5 ROOT" +# Serial: 9009899650740120186 +# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 +# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 +# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 +-----BEGIN CERTIFICATE----- +MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE +BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ +IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 +MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV +BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w +HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF +AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj +Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj +TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u +KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj +qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm +MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 +ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP +zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk +L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC +jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA +HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC +AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg +p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm +DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 +COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry +L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf +JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg +IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io +2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV +09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ +XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq +T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe +MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation +# Label: "SSL.com Root Certification Authority RSA" +# Serial: 8875640296558310041 +# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 +# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb +# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 +-----BEGIN CERTIFICATE----- +MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE +BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK +DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz +OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv +bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN +AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R +xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX +qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC +C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 +6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh +/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF +YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E +JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc +US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 +ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm ++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi +M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV +HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G +A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV +cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc +Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs +PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ +q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 +cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr +a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I +H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y +K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu +nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf +oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY +Ic2wBlX7Jz9TkHCpBB5XJ7k= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com Root Certification Authority ECC" +# Serial: 8495723813297216424 +# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e +# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a +# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 +-----BEGIN CERTIFICATE----- +MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz +WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 +b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS +b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI +7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg +CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud +EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD +VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T +kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ +gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority RSA R2" +# Serial: 6248227494352943350 +# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 +# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a +# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c +-----BEGIN CERTIFICATE----- +MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV +BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE +CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy +dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy +MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G +A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD +DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq +M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf +OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa +4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 +HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR +aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA +b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ +Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV +PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO +pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu +UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY +MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV +HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 +9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW +s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 +Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg +cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM +79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz +/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt +ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm +Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK +QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ +w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi +S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 +mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation +# Label: "SSL.com EV Root Certification Authority ECC" +# Serial: 3182246526754555285 +# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 +# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d +# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 +-----BEGIN CERTIFICATE----- +MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC +VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T +U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx +NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv +dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv +bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA +VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku +WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX +5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ +ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg +h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 +# Label: "GlobalSign Root CA - R6" +# Serial: 1417766617973444989252670301619537 +# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae +# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 +# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 +-----BEGIN CERTIFICATE----- +MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg +MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx +MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET +MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI +xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k +ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD +aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw +LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw +1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX +k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 +SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h +bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n +WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY +rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce +MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD +AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu +bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN +nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt +Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 +55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj +vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf +cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz +oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp +nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs +pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v +JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R +8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 +5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= +-----END CERTIFICATE----- + +# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed +# Label: "OISTE WISeKey Global Root GC CA" +# Serial: 44084345621038548146064804565436152554 +# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 +# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 +# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d +-----BEGIN CERTIFICATE----- +MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw +CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 +bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg +Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ +BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu +ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS +b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni +eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W +p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T +rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV +57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg +Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 +-----END CERTIFICATE----- + +# Issuer: CN=UCA Global G2 Root O=UniTrust +# Subject: CN=UCA Global G2 Root O=UniTrust +# Label: "UCA Global G2 Root" +# Serial: 124779693093741543919145257850076631279 +# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 +# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a +# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH +bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x +CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds +b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr +b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 +kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm +VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R +VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc +C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj +tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY +D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv +j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl +NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 +iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP +O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ +BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV +ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj +L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 +1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl +1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU +b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV +PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj +y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb +EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg +DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI ++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy +YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX +UB+K+wb1whnw0A== +-----END CERTIFICATE----- + +# Issuer: CN=UCA Extended Validation Root O=UniTrust +# Subject: CN=UCA Extended Validation Root O=UniTrust +# Label: "UCA Extended Validation Root" +# Serial: 106100277556486529736699587978573607008 +# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 +# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a +# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH +MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF +eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx +MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV +BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog +D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS +sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop +O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk +sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi +c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj +VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz +KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ +TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G +sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs +1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD +fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T +AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN +l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR +ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ +VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 +c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp +4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s +t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj +2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO +vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C +xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx +cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM +fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax +-----END CERTIFICATE----- + +# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 +# Label: "Certigna Root CA" +# Serial: 269714418870597844693661054334862075617 +# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 +# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 +# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 +-----BEGIN CERTIFICATE----- +MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw +WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw +MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x +MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD +VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX +BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw +ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO +ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M +CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu +I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm +TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh +C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf +ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz +IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT +Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k +JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 +hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB +GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of +1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov +L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo +dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr +aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq +hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L +6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG +HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 +0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB +lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi +o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 +gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v +faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 +Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh +jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw +3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign Root CA - G1" +# Serial: 235931866688319308814040 +# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac +# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c +# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 +-----BEGIN CERTIFICATE----- +MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD +VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU +ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH +MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO +MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv +Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz +f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO +8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq +d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM +tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt +Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB +o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD +AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x +PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM +wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d +GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH +6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby +RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx +iN66zB+Afko= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI +# Label: "emSign ECC Root CA - G3" +# Serial: 287880440101571086945156 +# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 +# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 +# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b +-----BEGIN CERTIFICATE----- +MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG +EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo +bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g +RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ +TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s +b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 +WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS +fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB +zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq +hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB +CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD ++JbNR6iC8hZVdyR+EhCVBCyj +-----END CERTIFICATE----- + +# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI +# Label: "emSign Root CA - C1" +# Serial: 825510296613316004955058 +# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 +# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 +# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f +-----BEGIN CERTIFICATE----- +MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG +A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg +SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v +dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ +BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ +HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH +3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH +GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c +xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 +aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq +TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 +/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 +kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG +YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT ++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo +WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= +-----END CERTIFICATE----- + +# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI +# Label: "emSign ECC Root CA - C3" +# Serial: 582948710642506000014504 +# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 +# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 +# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 +-----BEGIN CERTIFICATE----- +MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG +EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx +IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw +MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln +biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND +IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci +MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti +sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O +BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB +Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c +3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J +0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== +-----END CERTIFICATE----- + +# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post +# Label: "Hongkong Post Root CA 3" +# Serial: 46170865288971385588281144162979347873371282084 +# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 +# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 +# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 +-----BEGIN CERTIFICATE----- +MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL +BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ +SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n +a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 +NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT +CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u +Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK +AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO +dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI +VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV +9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY +2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY +vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt +bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb +x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ +l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK +TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj +Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP +BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e +i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw +DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG +7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk +MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr +gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk +GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS +3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm +Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ +l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c +JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP +L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa +LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG +mpv0 +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only +# Label: "Entrust Root Certification Authority - G4" +# Serial: 289383649854506086828220374796556676440 +# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 +# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 +# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 +-----BEGIN CERTIFICATE----- +MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw +gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL +Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg +MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw +BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 +MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 +c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ +bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg +Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B +AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ +2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E +T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j +5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM +C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T +DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX +wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A +2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm +nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 +dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl +N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj +c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS +5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS +Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr +hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ +B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI +AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw +H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ +b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk +2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol +IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk +5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY +n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G3" +# Serial: 576386314500428537169965010905813481816650257167 +# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 +# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 +# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 +-----BEGIN CERTIFICATE----- +MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM +BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp +ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe +Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw +IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU +cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC +DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS +T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK +AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 +nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep +qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA +yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs +hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX +zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv +kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT +f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA +uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB +o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih +MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E +BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 +wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 +XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 +JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j +ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV +VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx +xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on +AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d +7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj +gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV ++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo +FGWsJwt0ivKH +-----END CERTIFICATE----- + +# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. +# Label: "TrustAsia Global Root CA G4" +# Serial: 451799571007117016466790293371524403291602933463 +# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb +# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a +# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c +-----BEGIN CERTIFICATE----- +MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw +WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs +IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y +MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD +VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz +dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx +s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw +LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD +pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE +AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR +UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj +/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope +# Label: "CommScope Public Trust ECC Root-01" +# Serial: 385011430473757362783587124273108818652468453534 +# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 +# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d +# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b +-----BEGIN CERTIFICATE----- +MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa +Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C +flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE +hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq +hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg +2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS +Um9poIyNStDuiw7LR47QjRE= +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope +# Label: "CommScope Public Trust ECC Root-02" +# Serial: 234015080301808452132356021271193974922492992893 +# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 +# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 +# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a +-----BEGIN CERTIFICATE----- +MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw +TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t +bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa +Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv +cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw +djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL +j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU +v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD +VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq +hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n +ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV +mkzw5l4lIhVtwodZ0LKOag== +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope +# Label: "CommScope Public Trust RSA Root-01" +# Serial: 354030733275608256394402989253558293562031411421 +# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 +# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 +# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 +NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk +YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh +suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al +DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj +WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl +P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 +KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p +UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ +kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO +Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB +Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U +CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ +KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 +NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ +nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ +QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v +trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a +aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD +j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 +Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w +lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn +YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc +icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw +-----END CERTIFICATE----- + +# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope +# Label: "CommScope Public Trust RSA Root-02" +# Serial: 480062499834624527752716769107743131258796508494 +# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa +# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae +# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL +BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi +Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 +NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t +U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt +MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE +NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 +kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C +rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz +hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 +LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs +n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku +FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 +kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 +wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v +wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs +5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ +KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB +KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 ++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme +APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq +pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT +6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF +sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt +PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d +lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 +v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O +rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS ECC Root 2020" +# Serial: 72082518505882327255703894282316633856 +# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd +# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec +# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 +-----BEGIN CERTIFICATE----- +MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw +CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH +bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw +MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx +JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE +AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 +AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O +tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP +f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA +MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di +z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn +27iQ7t0l +-----END CERTIFICATE----- + +# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH +# Label: "Telekom Security TLS RSA Root 2023" +# Serial: 44676229530606711399881795178081572759 +# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 +# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 +# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj +MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 +eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy +MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC +REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG +A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 +cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV +cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA +U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 +Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug +BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy +8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J +co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg +8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 +rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 +mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg ++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX +gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 +p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ +pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm +9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw +M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd +GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ +CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t +xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ +w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK +L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj +X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q +ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm +dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= +-----END CERTIFICATE----- diff --git a/templates/skills/spotify/dependencies/certifi/core.py b/templates/skills/spotify/dependencies/certifi/core.py new file mode 100644 index 00000000..91f538bb --- /dev/null +++ b/templates/skills/spotify/dependencies/certifi/core.py @@ -0,0 +1,114 @@ +""" +certifi.py +~~~~~~~~~~ + +This module returns the installation location of cacert.pem or its contents. +""" +import sys +import atexit + +def exit_cacert_ctx() -> None: + _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + atexit.register(exit_cacert_ctx) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) + + return os.path.join(f, "cacert.pem") + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/templates/skills/spotify/dependencies/certifi/py.typed b/templates/skills/spotify/dependencies/certifi/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/INSTALLER b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/LICENSE b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/LICENSE new file mode 100644 index 00000000..ad82355b --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/METADATA b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/METADATA new file mode 100644 index 00000000..822550e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/METADATA @@ -0,0 +1,683 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 3.3.2 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/Ousret/charset_normalizer +Author: Ahmed TAHRI +Author-email: ahmed.tahri@cloudnursery.dev +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Typing :: Typed +Requires-Python: >=3.7.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport + +

Charset Detection, for Everyone 👋

+ +

+ The Real First Universal Charset Detector
+ + + + + Download Count Total + + + + +

+

+ Featured Packages
+ + Static Badge + + + Static Badge + +

+

+ In other language (unofficial port - by the community)
+ + Static Badge + +

+ +> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

+ +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| +| `Fast` | ❌ | ✅ | ✅ | +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ | +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | +| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | +| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | + +

+Reading Normalized TextCat Reading Text +

+ +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +|-----------------------------------------------|:--------:|:------------------:|:------------------:| +| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec | +| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +|-----------------------------------------------|:---------------:|:---------------:|:---------------:| +| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | +| charset-normalizer | 100 ms | 50 ms | 5 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. +> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability +> (eg. Supported Encoding) Challenge-them if you want. + +## ✨ Installation + +Using pip: + +```sh +pip install charset-normalizer -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +or + +```bash +python -m charset_normalizer ./data/sample.1.fr.srt +``` + +🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is noise/mess and coherence according to **YOU ?** + +*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## ⚠️ About Python EOLs + +**If you are running:** + +- Python >=2.7,<3.5: Unsupported +- Python 3.5: charset-normalizer < 2.1 +- Python 3.6: charset-normalizer < 3.1 +- Python 3.7: charset-normalizer < 4.0 + +Upgrade your Python interpreter as soon as possible. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + +## 💼 For Enterprise + +Professional support for charset-normalizer is available as part of the [Tidelift +Subscription][1]. Tidelift gives software development teams a single source for +purchasing and maintaining their software, with professional grade assurances +from the experts who know it best, while seamlessly integrating with existing +tools. + +[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme + +# Changelog +All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) + +### Fixed +- Unintentional memory usage regression when using large payload that match several encoding (#376) +- Regression on some detection case showcased in the documentation (#371) + +### Added +- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) + +## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) + +### Changed +- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 +- Improved the general detection reliability based on reports from the community + +## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) + +### Added +- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` +- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) + +### Removed +- (internal) Redundant utils.is_ascii function and unused function is_private_use_only +- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant + +### Changed +- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection +- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 + +### Fixed +- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) + +## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) + +### Changed +- Typehint for function `from_path` no longer enforce `PathLike` as its first argument +- Minor improvement over the global detection reliability + +### Added +- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries +- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) +- Explicit support for Python 3.12 + +### Fixed +- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) + +## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) + +### Added +- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) + +### Removed +- Support for Python 3.6 (PR #260) + +### Changed +- Optional speedup provided by mypy/c 1.0.1 + +## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) + +### Fixed +- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) + +### Changed +- Speedup provided by mypy/c 0.990 on Python >= 3.7 + +## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it +- Sphinx warnings when generating the documentation + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) + +### Added +- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results +- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES +- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio + +### Changed +- Build with static metadata using 'build' frontend +- Make the language detection stricter + +### Fixed +- CLI with opt --normalize fail when using full path for files +- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it + +### Removed +- Coherence detector no longer return 'Simple English' instead return 'English' +- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' + +## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) + +### Added +- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) + +### Removed +- Breaking: Method `first()` and `best()` from CharsetMatch +- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) + +### Fixed +- Sphinx warnings when generating the documentation + +## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) + +### Changed +- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 + +### Removed +- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches +- Breaking: Top-level function `normalize` +- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch +- Support for the backport `unicodedata2` + +## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) + +### Deprecated +- Function `normalize` scheduled for removal in 3.0 + +### Changed +- Removed useless call to decode in fn is_unprintable (#206) + +### Fixed +- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) + +## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) + +### Added +- Output the Unicode table version when running the CLI with `--version` (PR #194) + +### Changed +- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) +- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) + +### Fixed +- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) +- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) + +### Removed +- Support for Python 3.5 (PR #192) + +### Deprecated +- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) + +## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) + +### Fixed +- ASCII miss-detection on rare cases (PR #170) + +## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) + +### Added +- Explicit support for Python 3.11 (PR #164) + +### Changed +- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) + +## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) + +### Fixed +- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) + +### Changed +- Skipping the language-detection (CD) on ASCII (PR #155) + +## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) + +### Changed +- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) + +### Fixed +- Wrong logging level applied when setting kwarg `explain` to True (PR #146) + +## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) +### Changed +- Improvement over Vietnamese detection (PR #126) +- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) +- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) +- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) +- Code style as refactored by Sourcery-AI (PR #131) +- Minor adjustment on the MD around european words (PR #133) +- Remove and replace SRTs from assets / tests (PR #139) +- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) + +### Fixed +- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) +- Avoid using too insignificant chunk (PR #137) + +### Added +- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) +- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) + +## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) +### Added +- Add support for Kazakh (Cyrillic) language detection (PR #109) + +### Changed +- Further, improve inferring the language from a given single-byte code page (PR #112) +- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) +- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) +- Various detection improvement (MD+CD) (PR #117) + +### Removed +- Remove redundant logging entry about detected language(s) (PR #115) + +### Fixed +- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) + +## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) +### Fixed +- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) +- Fix CLI crash when using --minimal output in certain cases (PR #103) + +### Changed +- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) + +## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) +### Changed +- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) +- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) +- The Unicode detection is slightly improved (PR #93) +- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) + +### Removed +- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) + +### Fixed +- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) +- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) +- The MANIFEST.in was not exhaustive (PR #78) + +## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) +### Fixed +- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) +- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) +- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) +- Submatch factoring could be wrong in rare edge cases (PR #72) +- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) +- Fix line endings from CRLF to LF for certain project files (PR #67) + +### Changed +- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) +- Allow fallback on specified encoding if any (PR #71) + +## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) +### Changed +- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) +- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) + +## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) +### Fixed +- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) + +### Changed +- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) + +## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) +### Fixed +- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) +- Using explain=False permanently disable the verbose output in the current runtime (PR #47) +- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) +- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) + +### Changed +- Public function normalize default args values were not aligned with from_bytes (PR #53) + +### Added +- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) + +## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) +### Changed +- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. +- Accent has been made on UTF-8 detection, should perform rather instantaneous. +- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. +- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) +- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ +- utf_7 detection has been reinstated. + +### Removed +- This package no longer require anything when used with Python 3.5 (Dropped cached_property) +- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. +- The exception hook on UnicodeDecodeError has been removed. + +### Deprecated +- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 + +### Fixed +- The CLI output used the relative path of the file(s). Should be absolute. + +## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) +### Fixed +- Logger configuration/usage no longer conflict with others (PR #44) + +## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) +### Removed +- Using standard logging instead of using the package loguru. +- Dropping nose test framework in favor of the maintained pytest. +- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. +- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. +- Stop support for UTF-7 that does not contain a SIG. +- Dropping PrettyTable, replaced with pure JSON output in CLI. + +### Fixed +- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. +- Not searching properly for the BOM when trying utf32/16 parent codec. + +### Changed +- Improving the package final size by compressing frequencies.json. +- Huge improvement over the larges payload. + +### Added +- CLI now produces JSON consumable output. +- Return ASCII if given sequences fit. Given reasonable confidence. + +## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) + +### Fixed +- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) + +## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) + +### Fixed +- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) + +## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) + +### Fixed +- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) + +## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) + +### Changed +- Amend the previous release to allow prettytable 2.0 (PR #35) + +## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) + +### Fixed +- Fix error while using the package with a python pre-release interpreter (PR #33) + +### Changed +- Dependencies refactoring, constraints revised. + +### Added +- Add python 3.9 and 3.10 to the supported interpreters + +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/RECORD b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/RECORD new file mode 100644 index 00000000..22e47ec8 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/RECORD @@ -0,0 +1,35 @@ +../../bin/normalizer,sha256=ORV8hDqjWnylIIBcEb590dsNP4mxGe-utfLcw_Ai68w,281 +charset_normalizer-3.3.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-3.3.2.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-3.3.2.dist-info/METADATA,sha256=cfLhl5A6SI-F0oclm8w8ux9wshL1nipdeCdVnYb4AaA,33550 +charset_normalizer-3.3.2.dist-info/RECORD,, +charset_normalizer-3.3.2.dist-info/WHEEL,sha256=eaDTbMedWofVq8IZjew9qeAkoA5Sw2MOU2ppdIRr1Jg,110 +charset_normalizer-3.3.2.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65 +charset_normalizer-3.3.2.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577 +charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73 +charset_normalizer/__pycache__/__init__.cpython-311.pyc,, +charset_normalizer/__pycache__/__main__.cpython-311.pyc,, +charset_normalizer/__pycache__/api.cpython-311.pyc,, +charset_normalizer/__pycache__/cd.cpython-311.pyc,, +charset_normalizer/__pycache__/constant.cpython-311.pyc,, +charset_normalizer/__pycache__/legacy.cpython-311.pyc,, +charset_normalizer/__pycache__/md.cpython-311.pyc,, +charset_normalizer/__pycache__/models.cpython-311.pyc,, +charset_normalizer/__pycache__/utils.cpython-311.pyc,, +charset_normalizer/__pycache__/version.cpython-311.pyc,, +charset_normalizer/api.py,sha256=WOlWjy6wT8SeMYFpaGbXZFN1TMXa-s8vZYfkL4G29iQ,21097 +charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560 +charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100 +charset_normalizer/cli/__main__.py,sha256=2F-xURZJzo063Ye-2RLJ2wcmURpbKeAzKwpiws65dAs,9744 +charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,, +charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc,, +charset_normalizer/constant.py,sha256=p0IsOVcEbPWYPOdWhnhRbjK1YVBy6fs05C5vKC-zoxU,40481 +charset_normalizer/legacy.py,sha256=T-QuVMsMeDiQEk8WSszMrzVJg_14AMeSkmHdRYhdl1k,2071 +charset_normalizer/md.cpython-311-darwin.so,sha256=zbs-p3GrSygP9-4v4GVAUcyRpreXASFbQqgK9rvFoKw,50117 +charset_normalizer/md.py,sha256=NkSuVLK13_a8c7BxZ4cGIQ5vOtGIWOdh22WZEvjp-7U,19624 +charset_normalizer/md__mypyc.cpython-311-darwin.so,sha256=5u-KvFhpxi_WDpF0bB0tfYS2z7PzQ08aO8DAOMiMAXI,232636 +charset_normalizer/models.py,sha256=I5i0s4aKCCgLPY2tUY3pwkgFA-BUbbNxQ7hVkVTt62s,11624 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894 +charset_normalizer/version.py,sha256=iHKUfHD3kDRSyrh_BN2ojh43TA5-UZQjvbVIEFfpHDs,79 diff --git a/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/WHEEL b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/WHEEL new file mode 100644 index 00000000..5feb1eeb --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: false +Tag: cp311-cp311-macosx_11_0_arm64 + diff --git a/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/entry_points.txt b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/entry_points.txt new file mode 100644 index 00000000..65619e73 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +normalizer = charset_normalizer.cli:cli_detect diff --git a/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/top_level.txt b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/top_level.txt new file mode 100644 index 00000000..66958f0a --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer-3.3.2.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/templates/skills/spotify/dependencies/charset_normalizer/__init__.py b/templates/skills/spotify/dependencies/charset_normalizer/__init__.py new file mode 100644 index 00000000..55991fc3 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/__init__.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path, is_binary +from .legacy import detect +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "is_binary", + "detect", + "CharsetMatch", + "CharsetMatches", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/templates/skills/spotify/dependencies/charset_normalizer/__main__.py b/templates/skills/spotify/dependencies/charset_normalizer/__main__.py new file mode 100644 index 00000000..beae2ef7 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/__main__.py @@ -0,0 +1,4 @@ +from .cli import cli_detect + +if __name__ == "__main__": + cli_detect() diff --git a/templates/skills/spotify/dependencies/charset_normalizer/api.py b/templates/skills/spotify/dependencies/charset_normalizer/api.py new file mode 100644 index 00000000..0ba08e3a --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/api.py @@ -0,0 +1,626 @@ +import logging +from os import PathLike +from typing import BinaryIO, List, Optional, Set, Union + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + cut_sequence_chunks, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: Union[bytes, bytearray], + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level: int = logger.level + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length: int = len(sequences) + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE + is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings: List[str] = [] + + specified_encoding: Optional[str] = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested: Set[str] = set() + tested_but_hard_failure: List[str] = [] + tested_but_soft_failure: List[str] = [] + + fallback_ascii: Optional[CharsetMatch] = None + fallback_u8: Optional[CharsetMatch] = None + fallback_specified: Optional[CharsetMatch] = None + + results: CharsetMatches = CharsetMatches() + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload: Optional[str] = None + bom_or_sig_available: bool = sig_encoding == encoding_iana + strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + if encoding_iana in {"utf_7"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)], + encoding=encoding_iana, + ) + else: + decoded_payload = str( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :], + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test: bool = False + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus: bool = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up: int = int(len(r_) / 4) + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count: int = 0 + lazy_str_hard_failure = False + + md_chunks: List[str] = [] + md_ratios = [] + + try: + for chunk in cut_sequence_chunks( + sequences, + encoding_iana, + r_, + chunk_size, + bom_or_sig_available, + strip_sig_or_bom, + sig_payload, + is_multi_byte_decoder, + decoded_payload, + ): + md_chunks.append(chunk) + + md_ratios.append( + mess_ratio( + chunk, + threshold, + explain is True and 1 <= len(cp_isolation) <= 2, + ) + ) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + except ( + UnicodeDecodeError + ) as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + enable_fallback + and encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, encoding_iana, threshold, False, [], decoded_payload + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages: List[str] = encoding_languages(encoding_iana) + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, + language_threshold, + ",".join(target_languages) if target_languages else None, + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + results.append( + CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + decoded_payload, + ) + ) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def from_path( + path: Union[str, bytes, PathLike], # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = True, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + language_threshold, + enable_fallback, + ) + + +def is_binary( + fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg] + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: Optional[List[str]] = None, + cp_exclusion: Optional[List[str]] = None, + preemptive_behaviour: bool = True, + explain: bool = False, + language_threshold: float = 0.1, + enable_fallback: bool = False, +) -> bool: + """ + Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. + Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match + are disabled to be stricter around ASCII-compatible but unlikely to be a string. + """ + if isinstance(fp_or_path_or_payload, (str, PathLike)): + guesses = from_path( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + elif isinstance( + fp_or_path_or_payload, + ( + bytes, + bytearray, + ), + ): + guesses = from_bytes( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + else: + guesses = from_fp( + fp_or_path_or_payload, + steps=steps, + chunk_size=chunk_size, + threshold=threshold, + cp_isolation=cp_isolation, + cp_exclusion=cp_exclusion, + preemptive_behaviour=preemptive_behaviour, + explain=explain, + language_threshold=language_threshold, + enable_fallback=enable_fallback, + ) + + return not guesses diff --git a/templates/skills/spotify/dependencies/charset_normalizer/cd.py b/templates/skills/spotify/dependencies/charset_normalizer/cd.py new file mode 100644 index 00000000..4ea6760c --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/cd.py @@ -0,0 +1,395 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter +from functools import lru_cache +from typing import Counter as TypeCounter, Dict, List, Optional, Tuple + +from .constant import ( + FREQUENCIES, + KO_NAMES, + LANGUAGE_SUPPORTED_COUNT, + TOO_SMALL_SEQUENCE, + ZH_NAMES, +) +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module( + "encodings.{}".format(iana_name) + ).IncrementalDecoder + + p: IncrementalDecoder = decoder(errors="ignore") + seen_ranges: Dict[str, int] = {} + character_count: int = 0 + + for i in range(0x40, 0xFF): + chunk: str = p.decode(bytes([i])) + + if chunk: + character_range: Optional[str] = unicode_range(chunk) + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages: List[str] = [] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges: List[str] = encoding_unicode_range(iana_name) + primary_range: Optional[str] = None + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents: bool = False + target_pure_latin: bool = True + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages: List[Tuple[str, float]] = [] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count: int = len(language_characters) + + character_match_count: int = len( + [c for c in language_characters if c in characters] + ) + + ratio: float = character_match_count / character_count + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count: int = 0 + FREQUENCIES_language_set = set(FREQUENCIES[language]) + + ordered_characters_count: int = len(ordered_characters) + target_language_characters_count: int = len(FREQUENCIES[language]) + + large_alphabet: bool = target_language_characters_count > 26 + + for character, character_rank in zip( + ordered_characters, range(0, ordered_characters_count) + ): + if character not in FREQUENCIES_language_set: + continue + + character_rank_in_language: int = FREQUENCIES[language].index(character) + expected_projection_ratio: float = ( + target_language_characters_count / ordered_characters_count + ) + character_rank_projection: int = int(character_rank * expected_projection_ratio) + + if ( + large_alphabet is False + and abs(character_rank_projection - character_rank_in_language) > 4 + ): + continue + + if ( + large_alphabet is True + and abs(character_rank_projection - character_rank_in_language) + < target_language_characters_count / 3 + ): + character_approved_count += 1 + continue + + characters_before_source: List[str] = FREQUENCIES[language][ + 0:character_rank_in_language + ] + characters_after_source: List[str] = FREQUENCIES[language][ + character_rank_in_language: + ] + characters_before: List[str] = ordered_characters[0:character_rank] + characters_after: List[str] = ordered_characters[character_rank:] + + before_match_count: int = len( + set(characters_before) & set(characters_before_source) + ) + + after_match_count: int = len( + set(characters_after) & set(characters_after_source) + ) + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers: Dict[str, str] = {} + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + layer_target_range: Optional[str] = None + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios: Dict[str, List[float]] = {} + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: + """ + We shall NOT return "English—" in CoherenceMatches because it is an alternative + of "English". This function only keeps the best match and remove the em-dash in it. + """ + index_results: Dict[str, List[float]] = dict() + + for result in results: + language, ratio = result + no_em_name: str = language.replace("—", "") + + if no_em_name not in index_results: + index_results[no_em_name] = [] + + index_results[no_em_name].append(ratio) + + if any(len(index_results[e]) > 1 for e in index_results): + filtered_results: CoherenceMatches = [] + + for language in index_results: + filtered_results.append((language, max(index_results[language]))) + + return filtered_results + + return results + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results: List[Tuple[str, float]] = [] + ignore_non_latin: bool = False + + sufficient_match_count: int = 0 + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies: TypeCounter[str] = Counter(layer) + most_common = sequence_frequencies.most_common() + + character_count: int = sum(o for c, o in most_common) + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered: List[str] = [c for c, o in most_common] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio: float = characters_popularity_compare( + language, popular_character_ordered + ) + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted( + filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True + ) diff --git a/templates/skills/spotify/dependencies/charset_normalizer/cli/__init__.py b/templates/skills/spotify/dependencies/charset_normalizer/cli/__init__.py new file mode 100644 index 00000000..d95fedfe --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/cli/__init__.py @@ -0,0 +1,6 @@ +from .__main__ import cli_detect, query_yes_no + +__all__ = ( + "cli_detect", + "query_yes_no", +) diff --git a/templates/skills/spotify/dependencies/charset_normalizer/cli/__main__.py b/templates/skills/spotify/dependencies/charset_normalizer/cli/__main__.py new file mode 100644 index 00000000..f4bcbaac --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/cli/__main__.py @@ -0,0 +1,296 @@ +import argparse +import sys +from json import dumps +from os.path import abspath, basename, dirname, join, realpath +from platform import python_version +from typing import List, Optional +from unicodedata import unidata_version + +import charset_normalizer.md as md_module +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: Optional[List[str]] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.2, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( + __version__, + python_version(), + unidata_version, + "OFF" if md_module.__file__.lower().endswith(".py") else "ON", + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "", + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + dir_path = dirname(realpath(my_file.name)) + file_name = basename(realpath(my_file.name)) + + o_: List[str] = file_name.split(".") + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = join(dir_path, ".".join(o_)) + + with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: + fp.write(str(best_guess)) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/templates/skills/spotify/dependencies/charset_normalizer/constant.py b/templates/skills/spotify/dependencies/charset_normalizer/constant.py new file mode 100644 index 00000000..86349046 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/constant.py @@ -0,0 +1,1995 @@ +# -*- coding: utf-8 -*- +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from encodings.aliases import aliases +from re import IGNORECASE, compile as re_compile +from typing import Dict, List, Set, Union + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { + "utf_8": BOM_UTF8, + "utf_7": [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + "gb18030": b"\x84\x31\x95\x33", + "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], + "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], +} + +TOO_SMALL_SEQUENCE: int = 32 +TOO_BIG_SEQUENCE: int = int(10e6) + +UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 + +# Up-to-date Unicode ucd/15.0.0 +UNICODE_RANGES_COMBINED: Dict[str, range] = { + "Control character": range(32), + "Basic Latin": range(32, 128), + "Latin-1 Supplement": range(128, 256), + "Latin Extended-A": range(256, 384), + "Latin Extended-B": range(384, 592), + "IPA Extensions": range(592, 688), + "Spacing Modifier Letters": range(688, 768), + "Combining Diacritical Marks": range(768, 880), + "Greek and Coptic": range(880, 1024), + "Cyrillic": range(1024, 1280), + "Cyrillic Supplement": range(1280, 1328), + "Armenian": range(1328, 1424), + "Hebrew": range(1424, 1536), + "Arabic": range(1536, 1792), + "Syriac": range(1792, 1872), + "Arabic Supplement": range(1872, 1920), + "Thaana": range(1920, 1984), + "NKo": range(1984, 2048), + "Samaritan": range(2048, 2112), + "Mandaic": range(2112, 2144), + "Syriac Supplement": range(2144, 2160), + "Arabic Extended-B": range(2160, 2208), + "Arabic Extended-A": range(2208, 2304), + "Devanagari": range(2304, 2432), + "Bengali": range(2432, 2560), + "Gurmukhi": range(2560, 2688), + "Gujarati": range(2688, 2816), + "Oriya": range(2816, 2944), + "Tamil": range(2944, 3072), + "Telugu": range(3072, 3200), + "Kannada": range(3200, 3328), + "Malayalam": range(3328, 3456), + "Sinhala": range(3456, 3584), + "Thai": range(3584, 3712), + "Lao": range(3712, 3840), + "Tibetan": range(3840, 4096), + "Myanmar": range(4096, 4256), + "Georgian": range(4256, 4352), + "Hangul Jamo": range(4352, 4608), + "Ethiopic": range(4608, 4992), + "Ethiopic Supplement": range(4992, 5024), + "Cherokee": range(5024, 5120), + "Unified Canadian Aboriginal Syllabics": range(5120, 5760), + "Ogham": range(5760, 5792), + "Runic": range(5792, 5888), + "Tagalog": range(5888, 5920), + "Hanunoo": range(5920, 5952), + "Buhid": range(5952, 5984), + "Tagbanwa": range(5984, 6016), + "Khmer": range(6016, 6144), + "Mongolian": range(6144, 6320), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), + "Limbu": range(6400, 6480), + "Tai Le": range(6480, 6528), + "New Tai Lue": range(6528, 6624), + "Khmer Symbols": range(6624, 6656), + "Buginese": range(6656, 6688), + "Tai Tham": range(6688, 6832), + "Combining Diacritical Marks Extended": range(6832, 6912), + "Balinese": range(6912, 7040), + "Sundanese": range(7040, 7104), + "Batak": range(7104, 7168), + "Lepcha": range(7168, 7248), + "Ol Chiki": range(7248, 7296), + "Cyrillic Extended-C": range(7296, 7312), + "Georgian Extended": range(7312, 7360), + "Sundanese Supplement": range(7360, 7376), + "Vedic Extensions": range(7376, 7424), + "Phonetic Extensions": range(7424, 7552), + "Phonetic Extensions Supplement": range(7552, 7616), + "Combining Diacritical Marks Supplement": range(7616, 7680), + "Latin Extended Additional": range(7680, 7936), + "Greek Extended": range(7936, 8192), + "General Punctuation": range(8192, 8304), + "Superscripts and Subscripts": range(8304, 8352), + "Currency Symbols": range(8352, 8400), + "Combining Diacritical Marks for Symbols": range(8400, 8448), + "Letterlike Symbols": range(8448, 8528), + "Number Forms": range(8528, 8592), + "Arrows": range(8592, 8704), + "Mathematical Operators": range(8704, 8960), + "Miscellaneous Technical": range(8960, 9216), + "Control Pictures": range(9216, 9280), + "Optical Character Recognition": range(9280, 9312), + "Enclosed Alphanumerics": range(9312, 9472), + "Box Drawing": range(9472, 9600), + "Block Elements": range(9600, 9632), + "Geometric Shapes": range(9632, 9728), + "Miscellaneous Symbols": range(9728, 9984), + "Dingbats": range(9984, 10176), + "Miscellaneous Mathematical Symbols-A": range(10176, 10224), + "Supplemental Arrows-A": range(10224, 10240), + "Braille Patterns": range(10240, 10496), + "Supplemental Arrows-B": range(10496, 10624), + "Miscellaneous Mathematical Symbols-B": range(10624, 10752), + "Supplemental Mathematical Operators": range(10752, 11008), + "Miscellaneous Symbols and Arrows": range(11008, 11264), + "Glagolitic": range(11264, 11360), + "Latin Extended-C": range(11360, 11392), + "Coptic": range(11392, 11520), + "Georgian Supplement": range(11520, 11568), + "Tifinagh": range(11568, 11648), + "Ethiopic Extended": range(11648, 11744), + "Cyrillic Extended-A": range(11744, 11776), + "Supplemental Punctuation": range(11776, 11904), + "CJK Radicals Supplement": range(11904, 12032), + "Kangxi Radicals": range(12032, 12256), + "Ideographic Description Characters": range(12272, 12288), + "CJK Symbols and Punctuation": range(12288, 12352), + "Hiragana": range(12352, 12448), + "Katakana": range(12448, 12544), + "Bopomofo": range(12544, 12592), + "Hangul Compatibility Jamo": range(12592, 12688), + "Kanbun": range(12688, 12704), + "Bopomofo Extended": range(12704, 12736), + "CJK Strokes": range(12736, 12784), + "Katakana Phonetic Extensions": range(12784, 12800), + "Enclosed CJK Letters and Months": range(12800, 13056), + "CJK Compatibility": range(13056, 13312), + "CJK Unified Ideographs Extension A": range(13312, 19904), + "Yijing Hexagram Symbols": range(19904, 19968), + "CJK Unified Ideographs": range(19968, 40960), + "Yi Syllables": range(40960, 42128), + "Yi Radicals": range(42128, 42192), + "Lisu": range(42192, 42240), + "Vai": range(42240, 42560), + "Cyrillic Extended-B": range(42560, 42656), + "Bamum": range(42656, 42752), + "Modifier Tone Letters": range(42752, 42784), + "Latin Extended-D": range(42784, 43008), + "Syloti Nagri": range(43008, 43056), + "Common Indic Number Forms": range(43056, 43072), + "Phags-pa": range(43072, 43136), + "Saurashtra": range(43136, 43232), + "Devanagari Extended": range(43232, 43264), + "Kayah Li": range(43264, 43312), + "Rejang": range(43312, 43360), + "Hangul Jamo Extended-A": range(43360, 43392), + "Javanese": range(43392, 43488), + "Myanmar Extended-B": range(43488, 43520), + "Cham": range(43520, 43616), + "Myanmar Extended-A": range(43616, 43648), + "Tai Viet": range(43648, 43744), + "Meetei Mayek Extensions": range(43744, 43776), + "Ethiopic Extended-A": range(43776, 43824), + "Latin Extended-E": range(43824, 43888), + "Cherokee Supplement": range(43888, 43968), + "Meetei Mayek": range(43968, 44032), + "Hangul Syllables": range(44032, 55216), + "Hangul Jamo Extended-B": range(55216, 55296), + "High Surrogates": range(55296, 56192), + "High Private Use Surrogates": range(56192, 56320), + "Low Surrogates": range(56320, 57344), + "Private Use Area": range(57344, 63744), + "CJK Compatibility Ideographs": range(63744, 64256), + "Alphabetic Presentation Forms": range(64256, 64336), + "Arabic Presentation Forms-A": range(64336, 65024), + "Variation Selectors": range(65024, 65040), + "Vertical Forms": range(65040, 65056), + "Combining Half Marks": range(65056, 65072), + "CJK Compatibility Forms": range(65072, 65104), + "Small Form Variants": range(65104, 65136), + "Arabic Presentation Forms-B": range(65136, 65280), + "Halfwidth and Fullwidth Forms": range(65280, 65520), + "Specials": range(65520, 65536), + "Linear B Syllabary": range(65536, 65664), + "Linear B Ideograms": range(65664, 65792), + "Aegean Numbers": range(65792, 65856), + "Ancient Greek Numbers": range(65856, 65936), + "Ancient Symbols": range(65936, 66000), + "Phaistos Disc": range(66000, 66048), + "Lycian": range(66176, 66208), + "Carian": range(66208, 66272), + "Coptic Epact Numbers": range(66272, 66304), + "Old Italic": range(66304, 66352), + "Gothic": range(66352, 66384), + "Old Permic": range(66384, 66432), + "Ugaritic": range(66432, 66464), + "Old Persian": range(66464, 66528), + "Deseret": range(66560, 66640), + "Shavian": range(66640, 66688), + "Osmanya": range(66688, 66736), + "Osage": range(66736, 66816), + "Elbasan": range(66816, 66864), + "Caucasian Albanian": range(66864, 66928), + "Vithkuqi": range(66928, 67008), + "Linear A": range(67072, 67456), + "Latin Extended-F": range(67456, 67520), + "Cypriot Syllabary": range(67584, 67648), + "Imperial Aramaic": range(67648, 67680), + "Palmyrene": range(67680, 67712), + "Nabataean": range(67712, 67760), + "Hatran": range(67808, 67840), + "Phoenician": range(67840, 67872), + "Lydian": range(67872, 67904), + "Meroitic Hieroglyphs": range(67968, 68000), + "Meroitic Cursive": range(68000, 68096), + "Kharoshthi": range(68096, 68192), + "Old South Arabian": range(68192, 68224), + "Old North Arabian": range(68224, 68256), + "Manichaean": range(68288, 68352), + "Avestan": range(68352, 68416), + "Inscriptional Parthian": range(68416, 68448), + "Inscriptional Pahlavi": range(68448, 68480), + "Psalter Pahlavi": range(68480, 68528), + "Old Turkic": range(68608, 68688), + "Old Hungarian": range(68736, 68864), + "Hanifi Rohingya": range(68864, 68928), + "Rumi Numeral Symbols": range(69216, 69248), + "Yezidi": range(69248, 69312), + "Arabic Extended-C": range(69312, 69376), + "Old Sogdian": range(69376, 69424), + "Sogdian": range(69424, 69488), + "Old Uyghur": range(69488, 69552), + "Chorasmian": range(69552, 69600), + "Elymaic": range(69600, 69632), + "Brahmi": range(69632, 69760), + "Kaithi": range(69760, 69840), + "Sora Sompeng": range(69840, 69888), + "Chakma": range(69888, 69968), + "Mahajani": range(69968, 70016), + "Sharada": range(70016, 70112), + "Sinhala Archaic Numbers": range(70112, 70144), + "Khojki": range(70144, 70224), + "Multani": range(70272, 70320), + "Khudawadi": range(70320, 70400), + "Grantha": range(70400, 70528), + "Newa": range(70656, 70784), + "Tirhuta": range(70784, 70880), + "Siddham": range(71040, 71168), + "Modi": range(71168, 71264), + "Mongolian Supplement": range(71264, 71296), + "Takri": range(71296, 71376), + "Ahom": range(71424, 71504), + "Dogra": range(71680, 71760), + "Warang Citi": range(71840, 71936), + "Dives Akuru": range(71936, 72032), + "Nandinagari": range(72096, 72192), + "Zanabazar Square": range(72192, 72272), + "Soyombo": range(72272, 72368), + "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), + "Pau Cin Hau": range(72384, 72448), + "Devanagari Extended-A": range(72448, 72544), + "Bhaiksuki": range(72704, 72816), + "Marchen": range(72816, 72896), + "Masaram Gondi": range(72960, 73056), + "Gunjala Gondi": range(73056, 73136), + "Makasar": range(73440, 73472), + "Kawi": range(73472, 73568), + "Lisu Supplement": range(73648, 73664), + "Tamil Supplement": range(73664, 73728), + "Cuneiform": range(73728, 74752), + "Cuneiform Numbers and Punctuation": range(74752, 74880), + "Early Dynastic Cuneiform": range(74880, 75088), + "Cypro-Minoan": range(77712, 77824), + "Egyptian Hieroglyphs": range(77824, 78896), + "Egyptian Hieroglyph Format Controls": range(78896, 78944), + "Anatolian Hieroglyphs": range(82944, 83584), + "Bamum Supplement": range(92160, 92736), + "Mro": range(92736, 92784), + "Tangsa": range(92784, 92880), + "Bassa Vah": range(92880, 92928), + "Pahawh Hmong": range(92928, 93072), + "Medefaidrin": range(93760, 93856), + "Miao": range(93952, 94112), + "Ideographic Symbols and Punctuation": range(94176, 94208), + "Tangut": range(94208, 100352), + "Tangut Components": range(100352, 101120), + "Khitan Small Script": range(101120, 101632), + "Tangut Supplement": range(101632, 101760), + "Kana Extended-B": range(110576, 110592), + "Kana Supplement": range(110592, 110848), + "Kana Extended-A": range(110848, 110896), + "Small Kana Extension": range(110896, 110960), + "Nushu": range(110960, 111360), + "Duployan": range(113664, 113824), + "Shorthand Format Controls": range(113824, 113840), + "Znamenny Musical Notation": range(118528, 118736), + "Byzantine Musical Symbols": range(118784, 119040), + "Musical Symbols": range(119040, 119296), + "Ancient Greek Musical Notation": range(119296, 119376), + "Kaktovik Numerals": range(119488, 119520), + "Mayan Numerals": range(119520, 119552), + "Tai Xuan Jing Symbols": range(119552, 119648), + "Counting Rod Numerals": range(119648, 119680), + "Mathematical Alphanumeric Symbols": range(119808, 120832), + "Sutton SignWriting": range(120832, 121520), + "Latin Extended-G": range(122624, 122880), + "Glagolitic Supplement": range(122880, 122928), + "Cyrillic Extended-D": range(122928, 123024), + "Nyiakeng Puachue Hmong": range(123136, 123216), + "Toto": range(123536, 123584), + "Wancho": range(123584, 123648), + "Nag Mundari": range(124112, 124160), + "Ethiopic Extended-B": range(124896, 124928), + "Mende Kikakui": range(124928, 125152), + "Adlam": range(125184, 125280), + "Indic Siyaq Numbers": range(126064, 126144), + "Ottoman Siyaq Numbers": range(126208, 126288), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), + "Mahjong Tiles": range(126976, 127024), + "Domino Tiles": range(127024, 127136), + "Playing Cards": range(127136, 127232), + "Enclosed Alphanumeric Supplement": range(127232, 127488), + "Enclosed Ideographic Supplement": range(127488, 127744), + "Miscellaneous Symbols and Pictographs": range(127744, 128512), + "Emoticons range(Emoji)": range(128512, 128592), + "Ornamental Dingbats": range(128592, 128640), + "Transport and Map Symbols": range(128640, 128768), + "Alchemical Symbols": range(128768, 128896), + "Geometric Shapes Extended": range(128896, 129024), + "Supplemental Arrows-C": range(129024, 129280), + "Supplemental Symbols and Pictographs": range(129280, 129536), + "Chess Symbols": range(129536, 129648), + "Symbols and Pictographs Extended-A": range(129648, 129792), + "Symbols for Legacy Computing": range(129792, 130048), + "CJK Unified Ideographs Extension B": range(131072, 173792), + "CJK Unified Ideographs Extension C": range(173824, 177984), + "CJK Unified Ideographs Extension D": range(177984, 178208), + "CJK Unified Ideographs Extension E": range(178208, 183984), + "CJK Unified Ideographs Extension F": range(183984, 191472), + "CJK Compatibility Ideographs Supplement": range(194560, 195104), + "CJK Unified Ideographs Extension G": range(196608, 201552), + "CJK Unified Ideographs Extension H": range(201552, 205744), + "Tags": range(917504, 917632), + "Variation Selectors Supplement": range(917760, 918000), + "Supplementary Private Use Area-A": range(983040, 1048576), + "Supplementary Private Use Area-B": range(1048576, 1114112), +} + + +UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_NO_ALIASES = [ + "cp720", + "cp737", + "cp856", + "cp874", + "cp875", + "cp1006", + "koi8_r", + "koi8_t", + "koi8_u", +] + +IANA_SUPPORTED: List[str] = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())) + IANA_NO_ALIASES, + ) +) + +IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} + + +CHARDET_CORRESPONDENCE: Dict[str, str] = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} + + +COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", +} + + +KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} +ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} + +# Logging LEVEL below DEBUG +TRACE: int = 5 + + +# Language label that contain the em dash "—" +# character are to be considered alternative seq to origin +FREQUENCIES: Dict[str, List[str]] = { + "English": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + "English—": [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + "German": [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + "French": [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + "Dutch": [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + "Italian": [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + "Polish": [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + "Spanish": [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + "Russian": [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + # Jap-Kanji + "Japanese": [ + "人", + "一", + "大", + "亅", + "丁", + "丨", + "竹", + "笑", + "口", + "日", + "今", + "二", + "彳", + "行", + "十", + "土", + "丶", + "寸", + "寺", + "時", + "乙", + "丿", + "乂", + "气", + "気", + "冂", + "巾", + "亠", + "市", + "目", + "儿", + "見", + "八", + "小", + "凵", + "県", + "月", + "彐", + "門", + "間", + "木", + "東", + "山", + "出", + "本", + "中", + "刀", + "分", + "耳", + "又", + "取", + "最", + "言", + "田", + "心", + "思", + "刂", + "前", + "京", + "尹", + "事", + "生", + "厶", + "云", + "会", + "未", + "来", + "白", + "冫", + "楽", + "灬", + "馬", + "尸", + "尺", + "駅", + "明", + "耂", + "者", + "了", + "阝", + "都", + "高", + "卜", + "占", + "厂", + "广", + "店", + "子", + "申", + "奄", + "亻", + "俺", + "上", + "方", + "冖", + "学", + "衣", + "艮", + "食", + "自", + ], + # Jap-Katakana + "Japanese—": [ + "ー", + "ン", + "ス", + "・", + "ル", + "ト", + "リ", + "イ", + "ア", + "ラ", + "ッ", + "ク", + "ド", + "シ", + "レ", + "ジ", + "タ", + "フ", + "ロ", + "カ", + "テ", + "マ", + "ィ", + "グ", + "バ", + "ム", + "プ", + "オ", + "コ", + "デ", + "ニ", + "ウ", + "メ", + "サ", + "ビ", + "ナ", + "ブ", + "ャ", + "エ", + "ュ", + "チ", + "キ", + "ズ", + "ダ", + "パ", + "ミ", + "ェ", + "ョ", + "ハ", + "セ", + "ベ", + "ガ", + "モ", + "ツ", + "ネ", + "ボ", + "ソ", + "ノ", + "ァ", + "ヴ", + "ワ", + "ポ", + "ペ", + "ピ", + "ケ", + "ゴ", + "ギ", + "ザ", + "ホ", + "ゲ", + "ォ", + "ヤ", + "ヒ", + "ユ", + "ヨ", + "ヘ", + "ゼ", + "ヌ", + "ゥ", + "ゾ", + "ヶ", + "ヂ", + "ヲ", + "ヅ", + "ヵ", + "ヱ", + "ヰ", + "ヮ", + "ヽ", + "゠", + "ヾ", + "ヷ", + "ヿ", + "ヸ", + "ヹ", + "ヺ", + ], + # Jap-Hiragana + "Japanese——": [ + "の", + "に", + "る", + "た", + "と", + "は", + "し", + "い", + "を", + "で", + "て", + "が", + "な", + "れ", + "か", + "ら", + "さ", + "っ", + "り", + "す", + "あ", + "も", + "こ", + "ま", + "う", + "く", + "よ", + "き", + "ん", + "め", + "お", + "け", + "そ", + "つ", + "だ", + "や", + "え", + "ど", + "わ", + "ち", + "み", + "せ", + "じ", + "ば", + "へ", + "び", + "ず", + "ろ", + "ほ", + "げ", + "む", + "べ", + "ひ", + "ょ", + "ゆ", + "ぶ", + "ご", + "ゃ", + "ね", + "ふ", + "ぐ", + "ぎ", + "ぼ", + "ゅ", + "づ", + "ざ", + "ぞ", + "ぬ", + "ぜ", + "ぱ", + "ぽ", + "ぷ", + "ぴ", + "ぃ", + "ぁ", + "ぇ", + "ぺ", + "ゞ", + "ぢ", + "ぉ", + "ぅ", + "ゐ", + "ゝ", + "ゑ", + "゛", + "゜", + "ゎ", + "ゔ", + "゚", + "ゟ", + "゙", + "ゕ", + "ゖ", + ], + "Portuguese": [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + "Swedish": [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + "Chinese": [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + "可", + "也", + "你", + "对", + "生", + "能", + "而", + "子", + "那", + "得", + "于", + "着", + "下", + "自", + "之", + "年", + "过", + "发", + "后", + "作", + "里", + "用", + "道", + "行", + "所", + "然", + "家", + "种", + "事", + "成", + "方", + "多", + "经", + "么", + "去", + "法", + "学", + "如", + "都", + "同", + "现", + "当", + "没", + "动", + "面", + "起", + "看", + "定", + "天", + "分", + "还", + "进", + "好", + "小", + "部", + "其", + "些", + "主", + "样", + "理", + "心", + "她", + "本", + "前", + "开", + "但", + "因", + "只", + "从", + "想", + "实", + ], + "Ukrainian": [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + "Norwegian": [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + "Finnish": [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + "Vietnamese": [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + "Czech": [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + "Hungarian": [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + "Korean": [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + "Indonesian": [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + "Turkish": [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + "Romanian": [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + "Farsi": [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + "Arabic": [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + "Danish": [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + "Serbian": [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + "Lithuanian": [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + "Slovene": [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + "Slovak": [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + "Hebrew": [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + "Bulgarian": [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + "Croatian": [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + "Hindi": [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + "Estonian": [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + "Thai": [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + "Greek": [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + "Tamil": [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + "Kazakh": [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], +} + +LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/templates/skills/spotify/dependencies/charset_normalizer/legacy.py b/templates/skills/spotify/dependencies/charset_normalizer/legacy.py new file mode 100644 index 00000000..43aad21a --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/legacy.py @@ -0,0 +1,54 @@ +from typing import Any, Dict, Optional, Union +from warnings import warn + +from .api import from_bytes +from .constant import CHARDET_CORRESPONDENCE + + +def detect( + byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any +) -> Dict[str, Optional[Union[str, float]]]: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + """ + if len(kwargs): + warn( + f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" + ) + + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " + "{0}".format(type(byte_str)) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: + encoding = CHARDET_CORRESPONDENCE[encoding] + + return { + "encoding": encoding, + "language": language, + "confidence": confidence, + } diff --git a/templates/skills/spotify/dependencies/charset_normalizer/md.py b/templates/skills/spotify/dependencies/charset_normalizer/md.py new file mode 100644 index 00000000..77897aae --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/md.py @@ -0,0 +1,615 @@ +from functools import lru_cache +from logging import getLogger +from typing import List, Optional + +from .constant import ( + COMMON_SAFE_ASCII_CHARACTERS, + TRACE, + UNICODE_SECONDARY_RANGE_KEYWORD, +) +from .utils import ( + is_accentuated, + is_arabic, + is_arabic_isolated_form, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + is_unprintable, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count: int = 0 + self._symbol_count: int = 0 + self._character_count: int = 0 + + self._last_printable_char: Optional[str] = None + self._frenzy_symbol_in_word: bool = False + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # pragma: no cover + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation: float = ( + self._punctuation_count + self._symbol_count + ) / self._character_count + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._accentuated_count: int = 0 + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + ratio_of_accentuation: float = self._accentuated_count / self._character_count + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count: int = 0 + self._character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if is_unprintable(character): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count: int = 0 + self._character_count: int = 0 + + self._last_latin_character: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # pragma: no cover + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count: int = 0 + self._character_count: int = 0 + self._last_printable_seen: Optional[str] = None + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) + unicode_range_b: Optional[str] = unicode_range(character) + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count <= 24: + return 0.0 + + ratio_of_suspicious_range_usage: float = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count: int = 0 + self._bad_word_count: int = 0 + self._foreign_long_count: int = 0 + + self._is_current_word_bad: bool = False + self._foreign_long_watch: bool = False + + self._character_count: int = 0 + self._bad_character_count: int = 0 + + self._buffer: str = "" + self._buffer_accent_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer += character + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length: int = len(self._buffer) + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length > 0.34: + self._is_current_word_bad = True + # Word/Buffer ending with an upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + if ( + is_accentuated(self._buffer[-1]) + and self._buffer[-1].isupper() + and all(_.isupper() for _ in self._buffer) is False + ): + self._foreign_long_count += 1 + self._is_current_word_bad = True + if buffer_length >= 24 and self._foreign_long_watch: + camel_case_dst = [ + i + for c, i in zip(self._buffer, range(0, buffer_length)) + if c.isupper() + ] + probable_camel_cased: bool = False + + if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): + probable_camel_cased = True + + if not probable_camel_cased: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # pragma: no cover + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count: int = 0 + self._cjk_character_count: int = 0 + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf: bool = False + + self._character_count_since_last_sep: int = 0 + + self._successive_upper_lower_count: int = 0 + self._successive_upper_lower_count_final: int = 0 + + self._character_count: int = 0 + + self._last_alpha_seen: Optional[str] = None + self._current_ascii_only: bool = True + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and character.isascii() is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +class ArabicIsolatedFormPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count: int = 0 + self._isolated_form_count: int = 0 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._isolated_form_count = 0 + + def eligible(self, character: str) -> bool: + return is_arabic(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_arabic_isolated_form(character): + self._isolated_form_count += 1 + + @property + def ratio(self) -> float: + if self._character_count < 8: + return 0.0 + + isolated_form_usage: float = self._isolated_form_count / self._character_count + + return isolated_form_usage + + +@lru_cache(maxsize=1024) +def is_suspiciously_successive_range( + unicode_range_a: Optional[str], unicode_range_b: Optional[str] +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = unicode_range_a.split( + " " + ), unicode_range_b.split(" ") + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors: List[MessDetectorPlugin] = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] + + length: int = len(decoded_sequence) + 1 + + mean_mess_ratio: float = 0.0 + + if length < 512: + intermediary_mean_mess_ratio_calc: int = 32 + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + logger = getLogger("charset_normalizer") + + logger.log( + TRACE, + "Mess-detector extended-analysis start. " + f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " + f"maximum_threshold={maximum_threshold}", + ) + + if len(decoded_sequence) > 16: + logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") + logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") + + for dt in detectors: # pragma: nocover + logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") + + return round(mean_mess_ratio, 3) diff --git a/templates/skills/spotify/dependencies/charset_normalizer/models.py b/templates/skills/spotify/dependencies/charset_normalizer/models.py new file mode 100644 index 00000000..a760b9c5 --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/models.py @@ -0,0 +1,340 @@ +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +from .constant import TOO_BIG_SEQUENCE +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + ): + self._payload: bytes = payload + + self._encoding: str = guessed_encoding + self._mean_mess_ratio: float = mean_mess_ratio + self._languages: CoherenceMatches = languages + self._has_sig_or_bom: bool = has_sig_or_bom + self._unicode_ranges: Optional[List[str]] = None + + self._leaves: List[CharsetMatch] = [] + self._mean_coherence_ratio: float = 0.0 + + self._output_payload: Optional[bytes] = None + self._output_encoding: Optional[str] = None + + self._string: Optional[str] = decoded_payload + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + raise TypeError( + "__eq__ cannot be invoked on {} and {}.".format( + str(other.__class__), str(self.__class__) + ) + ) + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference: float = abs(self.chaos - other.chaos) + coherence_difference: float = abs(self.coherence - other.coherence) + + # Below 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + return self.coherence > other.coherence + elif chaos_difference < 0.01 and coherence_difference <= 0.02: + # When having a difficult decision, use the result that decoded as many multi-byte as possible. + # preserve RAM usage! + if len(self._payload) >= TOO_BIG_SEQUENCE: + return self.chaos < other.chaos + return self.multi_byte_usage > other.multi_byte_usage + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - (len(str(self)) / len(self.raw)) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as: List[str] = [] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges: List[Optional[str]] = [ + unicode_range(char) for char in str(self) + ] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: Optional[List[CharsetMatch]] = None): + self._results: List[CharsetMatch] = sorted(results) if results else [] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path: str = path + self.unicode_path: Optional[str] = unicode_path + self.encoding: Optional[str] = encoding + self.encoding_aliases: List[str] = encoding_aliases + self.alternative_encodings: List[str] = alternative_encodings + self.language: str = language + self.alphabets: List[str] = alphabets + self.has_sig_or_bom: bool = has_sig_or_bom + self.chaos: float = chaos + self.coherence: float = coherence + self.is_preferred: bool = is_preferred + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/templates/skills/spotify/dependencies/charset_normalizer/py.typed b/templates/skills/spotify/dependencies/charset_normalizer/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/templates/skills/spotify/dependencies/charset_normalizer/utils.py b/templates/skills/spotify/dependencies/charset_normalizer/utils.py new file mode 100644 index 00000000..e5cbbf4c --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/utils.py @@ -0,0 +1,421 @@ +import importlib +import logging +import unicodedata +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import Generator, List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + or "WITH MACRON" in description + or "WITH RING ABOVE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed: str = unicodedata.decomposition(character) + if not decomposed: + return character + + codes: List[str] = decomposed.split(" ") + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord: int = ord(character) + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description: str = unicodedata.name(character) + except ValueError: + return False + return "LATIN" in description + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "P" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category: str = unicodedata.category(character) + + if "S" in character_category or "N" in character_category: + return True + + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Forms" in character_range and character_category != "Lo" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + return False + + return "Emoticons" in character_range or "Pictographs" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", "<", ">"}: + return True + + character_category: str = unicodedata.category(character) + + return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "ARABIC" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_arabic_isolated_form(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "ARABIC" in character_name and "ISOLATED FORM" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_unprintable(character: str) -> bool: + return ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + and character != "\ufeff" # bug discovered in Python, + # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. + ) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len: int = len(sequence) + + results: List[str] = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + encoding_alias: str + encoding_iana: str + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges: Set[str] = set() + + for character in decoded_sequence: + character_range: Optional[str] = unicode_range(character) + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module( + "encodings.{}".format(iana_name_a) + ).IncrementalDecoder + decoder_b = importlib.import_module( + "encodings.{}".format(iana_name_b) + ).IncrementalDecoder + + id_a: IncrementalDecoder = decoder_a(errors="ignore") + id_b: IncrementalDecoder = decoder_b(errors="ignore") + + character_match_count: int = 0 + + for i in range(255): + to_be_decoded: bytes = bytes([i]) + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) + + +def cut_sequence_chunks( + sequences: bytes, + encoding_iana: str, + offsets: range, + chunk_size: int, + bom_or_sig_available: bool, + strip_sig_or_bom: bool, + sig_payload: bytes, + is_multi_byte_decoder: bool, + decoded_payload: Optional[str] = None, +) -> Generator[str, None, None]: + if decoded_payload and is_multi_byte_decoder is False: + for i in offsets: + chunk = decoded_payload[i : i + chunk_size] + if not chunk: + break + yield chunk + else: + for i in offsets: + chunk_end = i + chunk_size + if chunk_end > len(sequences) + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0: + chunk_partial_size_chk: int = min(chunk_size, 16) + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j:chunk_end] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + yield chunk diff --git a/templates/skills/spotify/dependencies/charset_normalizer/version.py b/templates/skills/spotify/dependencies/charset_normalizer/version.py new file mode 100644 index 00000000..5a4da4ff --- /dev/null +++ b/templates/skills/spotify/dependencies/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "3.3.2" +VERSION = __version__.split(".") diff --git a/templates/skills/spotify/dependencies/idna-3.7.dist-info/INSTALLER b/templates/skills/spotify/dependencies/idna-3.7.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/idna-3.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/idna-3.7.dist-info/LICENSE.md b/templates/skills/spotify/dependencies/idna-3.7.dist-info/LICENSE.md new file mode 100644 index 00000000..19b6b452 --- /dev/null +++ b/templates/skills/spotify/dependencies/idna-3.7.dist-info/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2024, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/templates/skills/spotify/dependencies/idna-3.7.dist-info/METADATA b/templates/skills/spotify/dependencies/idna-3.7.dist-info/METADATA new file mode 100644 index 00000000..b28f6ecd --- /dev/null +++ b/templates/skills/spotify/dependencies/idna-3.7.dist-info/METADATA @@ -0,0 +1,243 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.7 +Summary: Internationalized Domain Names in Applications (IDNA) +Author-email: Kim Davies +Requires-Python: >=3.5 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst +Project-URL: Issue tracker, https://github.com/kjd/idna/issues +Project-URL: Source, https://github.com/kjd/idna + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalized Domain Names in +Applications (IDNA) protocol as specified in `RFC 5891 +`_. This is the latest version of +the protocol and is sometimes referred to as “IDNA 2008”. + +This library also provides support for Unicode Technical +Standard 46, `Unicode IDNA Compatibility Processing +`_. + +This acts as a suitable replacement for the “encodings.idna” +module that comes with the Python standard library, but which +only supports the older superseded IDNA specification (`RFC 3490 +`_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +This package is available for installation from PyPI: + +.. code-block:: bash + + $ python3 -m pip install idna + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a +domain name argument and perform a conversion to A-labels or U-labels +respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna2008')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or +``alabel`` functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the +IDNA specification does not normalize input from different potential +ways a user may input a domain name. This functionality, known as +a “mapping”, is considered by the specification to be a local +user-interface issue distinct from IDNA conversion functionality. + +This library provides one such mapping that was developed by the +Unicode Consortium. Known as `Unicode IDNA Compatibility Processing +`_, it provides for both a regular +mapping for typical applications, as well as a transitional mapping to +help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN +CAPITAL LETTER K* is not allowed (nor are capital letters in general). +UTS 46 will convert this into lower case prior to applying the IDNA +conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from +the older 2003 standard to the current standard. For example, in the +original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was +converted into two *LATIN SMALL LETTER S* (ss), whereas in the current +IDNA specification this conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementers should use transitional processing with caution, only in +rare cases where conversion from legacy labels to current labels must be +performed (i.e. IDNA implementations that pre-date 2008). For typical +applications that just need to convert labels, transitional processing +is unlikely to be beneficial and could produce unexpected incompatible +results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the new +module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification +should raise an exception derived from the ``idna.IDNAError`` base +class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and + ``uts46data.py``, the pre-calculated lookup tables used for IDNA and + UTS 46 conversions. Implementers who wish to track this library against + a different Unicode version may use this tool to manually generate a + different version of the ``idnadata.py`` and ``uts46data.py`` files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix + B.1 of RFC 5892 and the pre-computed tables published by `IANA + `_. + +* ``idna-data U+0061``. Prints debugging output on the various + properties associated with an individual Unicode codepoint (in this + case, U+0061), that are used to assess the IDNA and UTS 46 status of a + codepoint. This is helpful in debugging or analysis. + +The tool accepts a number of arguments, described using ``idna-data +-h``. Most notably, the ``--version`` argument allows the specification +of the version of Unicode to be used in computing the table data. For +example, ``idna-data --version 9.0.0 make-libdata`` will generate +library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.5 and higher. + As this library serves as a low-level toolkit for a variety of + applications, many of which strive for broad compatibility with older + Python versions, there is no rush to remove older interpreter support. + Removing support for older versions should be well justified in that the + maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. + While active development of the version 2.x series has ended, notable + issues being corrected may be backported to 2.x. Use "idna<3" in your + requirements file if you need this library for a Python 2 application. + +* **Testing**. The library has a test suite based on each rule of the + IDNA specification, as well as tests that are provided as part of the + Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing + `_. + +* **Emoji**. It is an occasional request to support emoji domains in + this library. Encoding of symbols like emoji is expressly prohibited by + the technical standard IDNA 2008 and emoji domains are broadly phased + out across the domain industry due to associated security risks. For + now, applications that need to support these non-compliant labels + may wish to consider trying the encode/decode operation in this library + first, and then falling back to using `encodings.idna`. See `the Github + project `_ for more discussion. + diff --git a/templates/skills/spotify/dependencies/idna-3.7.dist-info/RECORD b/templates/skills/spotify/dependencies/idna-3.7.dist-info/RECORD new file mode 100644 index 00000000..bf84d391 --- /dev/null +++ b/templates/skills/spotify/dependencies/idna-3.7.dist-info/RECORD @@ -0,0 +1,22 @@ +idna-3.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.7.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 +idna-3.7.dist-info/METADATA,sha256=OixCk-dKLZkPy-MfviOmiPvwJ1O2K_8rqCrFjC_uxy4,9888 +idna-3.7.dist-info/RECORD,, +idna-3.7.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/__pycache__/__init__.cpython-311.pyc,, +idna/__pycache__/codec.cpython-311.pyc,, +idna/__pycache__/compat.cpython-311.pyc,, +idna/__pycache__/core.cpython-311.pyc,, +idna/__pycache__/idnadata.cpython-311.pyc,, +idna/__pycache__/intranges.cpython-311.pyc,, +idna/__pycache__/package_data.cpython-311.pyc,, +idna/__pycache__/uts46data.cpython-311.pyc,, +idna/codec.py,sha256=PS6m-XmdST7Wj7J7ulRMakPDt5EBJyYrT3CPtjh-7t4,3426 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=lyhpoe2vulEaB_65xhXmoKgO-xUqFDvcwxu5hpNNO4E,12663 +idna/idnadata.py,sha256=dqRwytzkjIHMBa2R1lYvHDwACenZPt8eGVu1Y8UBE-E,78320 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=Tkt0KnIeyIlnHddOaz9WSkkislNgokJAuE-p5GorMqo,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=1KuksWqLuccPXm2uyRVkhfiFLNIhM_H2m4azCcnOqEU,206503 diff --git a/templates/skills/spotify/dependencies/idna-3.7.dist-info/WHEEL b/templates/skills/spotify/dependencies/idna-3.7.dist-info/WHEEL new file mode 100644 index 00000000..3b5e64b5 --- /dev/null +++ b/templates/skills/spotify/dependencies/idna-3.7.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/templates/skills/spotify/dependencies/idna/__init__.py b/templates/skills/spotify/dependencies/idna/__init__.py new file mode 100644 index 00000000..a40eeafc --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/__init__.py @@ -0,0 +1,44 @@ +from .package_data import __version__ +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain + +__all__ = [ + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/templates/skills/spotify/dependencies/idna/codec.py b/templates/skills/spotify/dependencies/idna/codec.py new file mode 100644 index 00000000..c855a4de --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/codec.py @@ -0,0 +1,118 @@ +from .core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re +from typing import Any, Tuple, Optional + +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b"", 0 + + return encode(data), len(data) + + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return '', 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return b'', 0 + + labels = _unicode_dots_re.split(data) + trailing_dot = b'' + if labels: + if not labels[-1]: + trailing_dot = b'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = b'.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result_bytes = b'.'.join(result) + trailing_dot + size += len(trailing_dot) + return result_bytes, size + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: + if errors != 'strict': + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) + + if not data: + return ('', 0) + + if not isinstance(data, str): + data = str(data, 'ascii') + + labels = _unicode_dots_re.split(data) + trailing_dot = '' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result_str = '.'.join(result) + trailing_dot + size += len(trailing_dot) + return (result_str, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + + +class StreamReader(Codec, codecs.StreamReader): + pass + + +def search_function(name: str) -> Optional[codecs.CodecInfo]: + if name != 'idna2008': + return None + return codecs.CodecInfo( + name=name, + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) + +codecs.register(search_function) diff --git a/templates/skills/spotify/dependencies/idna/compat.py b/templates/skills/spotify/dependencies/idna/compat.py new file mode 100644 index 00000000..786e6bda --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/compat.py @@ -0,0 +1,13 @@ +from .core import * +from .codec import * +from typing import Any, Union + +def ToASCII(label: str) -> bytes: + return encode(label) + +def ToUnicode(label: Union[bytes, bytearray]) -> str: + return decode(label) + +def nameprep(s: Any) -> None: + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') + diff --git a/templates/skills/spotify/dependencies/idna/core.py b/templates/skills/spotify/dependencies/idna/core.py new file mode 100644 index 00000000..0dae61ac --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/core.py @@ -0,0 +1,395 @@ +from . import idnadata +import bisect +import unicodedata +import re +from typing import Union, Optional +from .intranges import intranges_contain + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) + if v == 0: + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') + return v + +def _is_script(cp: str, script: str) -> bool: + return intranges_contain(ord(cp), idnadata.scripts[script]) + +def _punycode(s: str) -> bytes: + return s.encode('punycode') + +def _unot(s: int) -> str: + return 'U+{:04X}'.format(s) + + +def valid_label_length(label: Union[bytes, str]) -> bool: + if len(label) > 63: + return False + return True + + +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label: str, check_ltr: bool = False) -> bool: + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = None # type: Optional[str] + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label: str) -> bool: + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label: str) -> bool: + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label: str) -> None: + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label: str, pos: int) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + elif joining_type in [ord('L'), ord('D')]: + ok = True + break + else: + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == ord('T'): + continue + elif joining_type in [ord('R'), ord('D')]: + ok = True + break + else: + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == '\u30fb': + continue + if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): + return True + return False + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + return False + + +def check_label(label: Union[str, bytes, bytearray]) -> None: + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']): + continue + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( + _unot(cp_value), pos+1, repr(label))) + elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label: str) -> bytes: + try: + label_bytes = label.encode('ascii') + ulabel(label_bytes) + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + return label_bytes + except UnicodeEncodeError: + pass + + check_label(label) + label_bytes = _alabel_prefix + _punycode(label) + + if not valid_label_length(label_bytes): + raise IDNAError('Label too long') + + return label_bytes + + +def ulabel(label: Union[str, bytes, bytearray]) -> str: + if not isinstance(label, (bytes, bytearray)): + try: + label_bytes = label.encode('ascii') + except UnicodeEncodeError: + check_label(label) + return label + else: + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix):] + if not label_bytes: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label_bytes.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label_bytes) + return label_bytes.decode('ascii') + + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') + check_label(label) + return label + + +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = '' + + for pos, char in enumerate(domain): + code_point = ord(char) + try: + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] + status = uts46row[1] + replacement = None # type: Optional[str] + if len(uts46row) == 3: + replacement = uts46row[2] + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): + output += replacement + elif status != 'I': + raise IndexError() + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + + return unicodedata.normalize('NFC', output) + + +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: + if not isinstance(s, str): + try: + s = str(s, 'ascii') + except UnicodeDecodeError: + raise IDNAError('should pass a unicode string to the function rather than a byte string.') + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + s = alabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if not isinstance(s, str): + s = str(s, 'ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split('.') + if not labels or labels == ['']: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + s = ulabel(label) + if s: + result.append(s) + else: + raise IDNAError('Empty label') + if trailing_dot: + result.append('') + return '.'.join(result) diff --git a/templates/skills/spotify/dependencies/idna/idnadata.py b/templates/skills/spotify/dependencies/idna/idnadata.py new file mode 100644 index 00000000..c61dcf97 --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/idnadata.py @@ -0,0 +1,4245 @@ +# This file is automatically generated by tools/idna-data + +__version__ = '15.1.0' +scripts = { + 'Greek': ( + 0x37000000374, + 0x37500000378, + 0x37a0000037e, + 0x37f00000380, + 0x38400000385, + 0x38600000387, + 0x3880000038b, + 0x38c0000038d, + 0x38e000003a2, + 0x3a3000003e2, + 0x3f000000400, + 0x1d2600001d2b, + 0x1d5d00001d62, + 0x1d6600001d6b, + 0x1dbf00001dc0, + 0x1f0000001f16, + 0x1f1800001f1e, + 0x1f2000001f46, + 0x1f4800001f4e, + 0x1f5000001f58, + 0x1f5900001f5a, + 0x1f5b00001f5c, + 0x1f5d00001f5e, + 0x1f5f00001f7e, + 0x1f8000001fb5, + 0x1fb600001fc5, + 0x1fc600001fd4, + 0x1fd600001fdc, + 0x1fdd00001ff0, + 0x1ff200001ff5, + 0x1ff600001fff, + 0x212600002127, + 0xab650000ab66, + 0x101400001018f, + 0x101a0000101a1, + 0x1d2000001d246, + ), + 'Han': ( + 0x2e8000002e9a, + 0x2e9b00002ef4, + 0x2f0000002fd6, + 0x300500003006, + 0x300700003008, + 0x30210000302a, + 0x30380000303c, + 0x340000004dc0, + 0x4e000000a000, + 0xf9000000fa6e, + 0xfa700000fada, + 0x16fe200016fe4, + 0x16ff000016ff2, + 0x200000002a6e0, + 0x2a7000002b73a, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2ebf00002ee5e, + 0x2f8000002fa1e, + 0x300000003134b, + 0x31350000323b0, + ), + 'Hebrew': ( + 0x591000005c8, + 0x5d0000005eb, + 0x5ef000005f5, + 0xfb1d0000fb37, + 0xfb380000fb3d, + 0xfb3e0000fb3f, + 0xfb400000fb42, + 0xfb430000fb45, + 0xfb460000fb50, + ), + 'Hiragana': ( + 0x304100003097, + 0x309d000030a0, + 0x1b0010001b120, + 0x1b1320001b133, + 0x1b1500001b153, + 0x1f2000001f201, + ), + 'Katakana': ( + 0x30a1000030fb, + 0x30fd00003100, + 0x31f000003200, + 0x32d0000032ff, + 0x330000003358, + 0xff660000ff70, + 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1550001b156, + 0x1b1640001b168, + ), +} +joining_types = { + 0xad: 84, + 0x300: 84, + 0x301: 84, + 0x302: 84, + 0x303: 84, + 0x304: 84, + 0x305: 84, + 0x306: 84, + 0x307: 84, + 0x308: 84, + 0x309: 84, + 0x30a: 84, + 0x30b: 84, + 0x30c: 84, + 0x30d: 84, + 0x30e: 84, + 0x30f: 84, + 0x310: 84, + 0x311: 84, + 0x312: 84, + 0x313: 84, + 0x314: 84, + 0x315: 84, + 0x316: 84, + 0x317: 84, + 0x318: 84, + 0x319: 84, + 0x31a: 84, + 0x31b: 84, + 0x31c: 84, + 0x31d: 84, + 0x31e: 84, + 0x31f: 84, + 0x320: 84, + 0x321: 84, + 0x322: 84, + 0x323: 84, + 0x324: 84, + 0x325: 84, + 0x326: 84, + 0x327: 84, + 0x328: 84, + 0x329: 84, + 0x32a: 84, + 0x32b: 84, + 0x32c: 84, + 0x32d: 84, + 0x32e: 84, + 0x32f: 84, + 0x330: 84, + 0x331: 84, + 0x332: 84, + 0x333: 84, + 0x334: 84, + 0x335: 84, + 0x336: 84, + 0x337: 84, + 0x338: 84, + 0x339: 84, + 0x33a: 84, + 0x33b: 84, + 0x33c: 84, + 0x33d: 84, + 0x33e: 84, + 0x33f: 84, + 0x340: 84, + 0x341: 84, + 0x342: 84, + 0x343: 84, + 0x344: 84, + 0x345: 84, + 0x346: 84, + 0x347: 84, + 0x348: 84, + 0x349: 84, + 0x34a: 84, + 0x34b: 84, + 0x34c: 84, + 0x34d: 84, + 0x34e: 84, + 0x34f: 84, + 0x350: 84, + 0x351: 84, + 0x352: 84, + 0x353: 84, + 0x354: 84, + 0x355: 84, + 0x356: 84, + 0x357: 84, + 0x358: 84, + 0x359: 84, + 0x35a: 84, + 0x35b: 84, + 0x35c: 84, + 0x35d: 84, + 0x35e: 84, + 0x35f: 84, + 0x360: 84, + 0x361: 84, + 0x362: 84, + 0x363: 84, + 0x364: 84, + 0x365: 84, + 0x366: 84, + 0x367: 84, + 0x368: 84, + 0x369: 84, + 0x36a: 84, + 0x36b: 84, + 0x36c: 84, + 0x36d: 84, + 0x36e: 84, + 0x36f: 84, + 0x483: 84, + 0x484: 84, + 0x485: 84, + 0x486: 84, + 0x487: 84, + 0x488: 84, + 0x489: 84, + 0x591: 84, + 0x592: 84, + 0x593: 84, + 0x594: 84, + 0x595: 84, + 0x596: 84, + 0x597: 84, + 0x598: 84, + 0x599: 84, + 0x59a: 84, + 0x59b: 84, + 0x59c: 84, + 0x59d: 84, + 0x59e: 84, + 0x59f: 84, + 0x5a0: 84, + 0x5a1: 84, + 0x5a2: 84, + 0x5a3: 84, + 0x5a4: 84, + 0x5a5: 84, + 0x5a6: 84, + 0x5a7: 84, + 0x5a8: 84, + 0x5a9: 84, + 0x5aa: 84, + 0x5ab: 84, + 0x5ac: 84, + 0x5ad: 84, + 0x5ae: 84, + 0x5af: 84, + 0x5b0: 84, + 0x5b1: 84, + 0x5b2: 84, + 0x5b3: 84, + 0x5b4: 84, + 0x5b5: 84, + 0x5b6: 84, + 0x5b7: 84, + 0x5b8: 84, + 0x5b9: 84, + 0x5ba: 84, + 0x5bb: 84, + 0x5bc: 84, + 0x5bd: 84, + 0x5bf: 84, + 0x5c1: 84, + 0x5c2: 84, + 0x5c4: 84, + 0x5c5: 84, + 0x5c7: 84, + 0x610: 84, + 0x611: 84, + 0x612: 84, + 0x613: 84, + 0x614: 84, + 0x615: 84, + 0x616: 84, + 0x617: 84, + 0x618: 84, + 0x619: 84, + 0x61a: 84, + 0x61c: 84, + 0x620: 68, + 0x622: 82, + 0x623: 82, + 0x624: 82, + 0x625: 82, + 0x626: 68, + 0x627: 82, + 0x628: 68, + 0x629: 82, + 0x62a: 68, + 0x62b: 68, + 0x62c: 68, + 0x62d: 68, + 0x62e: 68, + 0x62f: 82, + 0x630: 82, + 0x631: 82, + 0x632: 82, + 0x633: 68, + 0x634: 68, + 0x635: 68, + 0x636: 68, + 0x637: 68, + 0x638: 68, + 0x639: 68, + 0x63a: 68, + 0x63b: 68, + 0x63c: 68, + 0x63d: 68, + 0x63e: 68, + 0x63f: 68, + 0x640: 67, + 0x641: 68, + 0x642: 68, + 0x643: 68, + 0x644: 68, + 0x645: 68, + 0x646: 68, + 0x647: 68, + 0x648: 82, + 0x649: 68, + 0x64a: 68, + 0x64b: 84, + 0x64c: 84, + 0x64d: 84, + 0x64e: 84, + 0x64f: 84, + 0x650: 84, + 0x651: 84, + 0x652: 84, + 0x653: 84, + 0x654: 84, + 0x655: 84, + 0x656: 84, + 0x657: 84, + 0x658: 84, + 0x659: 84, + 0x65a: 84, + 0x65b: 84, + 0x65c: 84, + 0x65d: 84, + 0x65e: 84, + 0x65f: 84, + 0x66e: 68, + 0x66f: 68, + 0x670: 84, + 0x671: 82, + 0x672: 82, + 0x673: 82, + 0x675: 82, + 0x676: 82, + 0x677: 82, + 0x678: 68, + 0x679: 68, + 0x67a: 68, + 0x67b: 68, + 0x67c: 68, + 0x67d: 68, + 0x67e: 68, + 0x67f: 68, + 0x680: 68, + 0x681: 68, + 0x682: 68, + 0x683: 68, + 0x684: 68, + 0x685: 68, + 0x686: 68, + 0x687: 68, + 0x688: 82, + 0x689: 82, + 0x68a: 82, + 0x68b: 82, + 0x68c: 82, + 0x68d: 82, + 0x68e: 82, + 0x68f: 82, + 0x690: 82, + 0x691: 82, + 0x692: 82, + 0x693: 82, + 0x694: 82, + 0x695: 82, + 0x696: 82, + 0x697: 82, + 0x698: 82, + 0x699: 82, + 0x69a: 68, + 0x69b: 68, + 0x69c: 68, + 0x69d: 68, + 0x69e: 68, + 0x69f: 68, + 0x6a0: 68, + 0x6a1: 68, + 0x6a2: 68, + 0x6a3: 68, + 0x6a4: 68, + 0x6a5: 68, + 0x6a6: 68, + 0x6a7: 68, + 0x6a8: 68, + 0x6a9: 68, + 0x6aa: 68, + 0x6ab: 68, + 0x6ac: 68, + 0x6ad: 68, + 0x6ae: 68, + 0x6af: 68, + 0x6b0: 68, + 0x6b1: 68, + 0x6b2: 68, + 0x6b3: 68, + 0x6b4: 68, + 0x6b5: 68, + 0x6b6: 68, + 0x6b7: 68, + 0x6b8: 68, + 0x6b9: 68, + 0x6ba: 68, + 0x6bb: 68, + 0x6bc: 68, + 0x6bd: 68, + 0x6be: 68, + 0x6bf: 68, + 0x6c0: 82, + 0x6c1: 68, + 0x6c2: 68, + 0x6c3: 82, + 0x6c4: 82, + 0x6c5: 82, + 0x6c6: 82, + 0x6c7: 82, + 0x6c8: 82, + 0x6c9: 82, + 0x6ca: 82, + 0x6cb: 82, + 0x6cc: 68, + 0x6cd: 82, + 0x6ce: 68, + 0x6cf: 82, + 0x6d0: 68, + 0x6d1: 68, + 0x6d2: 82, + 0x6d3: 82, + 0x6d5: 82, + 0x6d6: 84, + 0x6d7: 84, + 0x6d8: 84, + 0x6d9: 84, + 0x6da: 84, + 0x6db: 84, + 0x6dc: 84, + 0x6df: 84, + 0x6e0: 84, + 0x6e1: 84, + 0x6e2: 84, + 0x6e3: 84, + 0x6e4: 84, + 0x6e7: 84, + 0x6e8: 84, + 0x6ea: 84, + 0x6eb: 84, + 0x6ec: 84, + 0x6ed: 84, + 0x6ee: 82, + 0x6ef: 82, + 0x6fa: 68, + 0x6fb: 68, + 0x6fc: 68, + 0x6ff: 68, + 0x70f: 84, + 0x710: 82, + 0x711: 84, + 0x712: 68, + 0x713: 68, + 0x714: 68, + 0x715: 82, + 0x716: 82, + 0x717: 82, + 0x718: 82, + 0x719: 82, + 0x71a: 68, + 0x71b: 68, + 0x71c: 68, + 0x71d: 68, + 0x71e: 82, + 0x71f: 68, + 0x720: 68, + 0x721: 68, + 0x722: 68, + 0x723: 68, + 0x724: 68, + 0x725: 68, + 0x726: 68, + 0x727: 68, + 0x728: 82, + 0x729: 68, + 0x72a: 82, + 0x72b: 68, + 0x72c: 82, + 0x72d: 68, + 0x72e: 68, + 0x72f: 82, + 0x730: 84, + 0x731: 84, + 0x732: 84, + 0x733: 84, + 0x734: 84, + 0x735: 84, + 0x736: 84, + 0x737: 84, + 0x738: 84, + 0x739: 84, + 0x73a: 84, + 0x73b: 84, + 0x73c: 84, + 0x73d: 84, + 0x73e: 84, + 0x73f: 84, + 0x740: 84, + 0x741: 84, + 0x742: 84, + 0x743: 84, + 0x744: 84, + 0x745: 84, + 0x746: 84, + 0x747: 84, + 0x748: 84, + 0x749: 84, + 0x74a: 84, + 0x74d: 82, + 0x74e: 68, + 0x74f: 68, + 0x750: 68, + 0x751: 68, + 0x752: 68, + 0x753: 68, + 0x754: 68, + 0x755: 68, + 0x756: 68, + 0x757: 68, + 0x758: 68, + 0x759: 82, + 0x75a: 82, + 0x75b: 82, + 0x75c: 68, + 0x75d: 68, + 0x75e: 68, + 0x75f: 68, + 0x760: 68, + 0x761: 68, + 0x762: 68, + 0x763: 68, + 0x764: 68, + 0x765: 68, + 0x766: 68, + 0x767: 68, + 0x768: 68, + 0x769: 68, + 0x76a: 68, + 0x76b: 82, + 0x76c: 82, + 0x76d: 68, + 0x76e: 68, + 0x76f: 68, + 0x770: 68, + 0x771: 82, + 0x772: 68, + 0x773: 82, + 0x774: 82, + 0x775: 68, + 0x776: 68, + 0x777: 68, + 0x778: 82, + 0x779: 82, + 0x77a: 68, + 0x77b: 68, + 0x77c: 68, + 0x77d: 68, + 0x77e: 68, + 0x77f: 68, + 0x7a6: 84, + 0x7a7: 84, + 0x7a8: 84, + 0x7a9: 84, + 0x7aa: 84, + 0x7ab: 84, + 0x7ac: 84, + 0x7ad: 84, + 0x7ae: 84, + 0x7af: 84, + 0x7b0: 84, + 0x7ca: 68, + 0x7cb: 68, + 0x7cc: 68, + 0x7cd: 68, + 0x7ce: 68, + 0x7cf: 68, + 0x7d0: 68, + 0x7d1: 68, + 0x7d2: 68, + 0x7d3: 68, + 0x7d4: 68, + 0x7d5: 68, + 0x7d6: 68, + 0x7d7: 68, + 0x7d8: 68, + 0x7d9: 68, + 0x7da: 68, + 0x7db: 68, + 0x7dc: 68, + 0x7dd: 68, + 0x7de: 68, + 0x7df: 68, + 0x7e0: 68, + 0x7e1: 68, + 0x7e2: 68, + 0x7e3: 68, + 0x7e4: 68, + 0x7e5: 68, + 0x7e6: 68, + 0x7e7: 68, + 0x7e8: 68, + 0x7e9: 68, + 0x7ea: 68, + 0x7eb: 84, + 0x7ec: 84, + 0x7ed: 84, + 0x7ee: 84, + 0x7ef: 84, + 0x7f0: 84, + 0x7f1: 84, + 0x7f2: 84, + 0x7f3: 84, + 0x7fa: 67, + 0x7fd: 84, + 0x816: 84, + 0x817: 84, + 0x818: 84, + 0x819: 84, + 0x81b: 84, + 0x81c: 84, + 0x81d: 84, + 0x81e: 84, + 0x81f: 84, + 0x820: 84, + 0x821: 84, + 0x822: 84, + 0x823: 84, + 0x825: 84, + 0x826: 84, + 0x827: 84, + 0x829: 84, + 0x82a: 84, + 0x82b: 84, + 0x82c: 84, + 0x82d: 84, + 0x840: 82, + 0x841: 68, + 0x842: 68, + 0x843: 68, + 0x844: 68, + 0x845: 68, + 0x846: 82, + 0x847: 82, + 0x848: 68, + 0x849: 82, + 0x84a: 68, + 0x84b: 68, + 0x84c: 68, + 0x84d: 68, + 0x84e: 68, + 0x84f: 68, + 0x850: 68, + 0x851: 68, + 0x852: 68, + 0x853: 68, + 0x854: 82, + 0x855: 68, + 0x856: 82, + 0x857: 82, + 0x858: 82, + 0x859: 84, + 0x85a: 84, + 0x85b: 84, + 0x860: 68, + 0x862: 68, + 0x863: 68, + 0x864: 68, + 0x865: 68, + 0x867: 82, + 0x868: 68, + 0x869: 82, + 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x898: 84, + 0x899: 84, + 0x89a: 84, + 0x89b: 84, + 0x89c: 84, + 0x89d: 84, + 0x89e: 84, + 0x89f: 84, + 0x8a0: 68, + 0x8a1: 68, + 0x8a2: 68, + 0x8a3: 68, + 0x8a4: 68, + 0x8a5: 68, + 0x8a6: 68, + 0x8a7: 68, + 0x8a8: 68, + 0x8a9: 68, + 0x8aa: 82, + 0x8ab: 82, + 0x8ac: 82, + 0x8ae: 82, + 0x8af: 68, + 0x8b0: 68, + 0x8b1: 82, + 0x8b2: 82, + 0x8b3: 68, + 0x8b4: 68, + 0x8b5: 68, + 0x8b6: 68, + 0x8b7: 68, + 0x8b8: 68, + 0x8b9: 82, + 0x8ba: 68, + 0x8bb: 68, + 0x8bc: 68, + 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8c8: 68, + 0x8ca: 84, + 0x8cb: 84, + 0x8cc: 84, + 0x8cd: 84, + 0x8ce: 84, + 0x8cf: 84, + 0x8d0: 84, + 0x8d1: 84, + 0x8d2: 84, + 0x8d3: 84, + 0x8d4: 84, + 0x8d5: 84, + 0x8d6: 84, + 0x8d7: 84, + 0x8d8: 84, + 0x8d9: 84, + 0x8da: 84, + 0x8db: 84, + 0x8dc: 84, + 0x8dd: 84, + 0x8de: 84, + 0x8df: 84, + 0x8e0: 84, + 0x8e1: 84, + 0x8e3: 84, + 0x8e4: 84, + 0x8e5: 84, + 0x8e6: 84, + 0x8e7: 84, + 0x8e8: 84, + 0x8e9: 84, + 0x8ea: 84, + 0x8eb: 84, + 0x8ec: 84, + 0x8ed: 84, + 0x8ee: 84, + 0x8ef: 84, + 0x8f0: 84, + 0x8f1: 84, + 0x8f2: 84, + 0x8f3: 84, + 0x8f4: 84, + 0x8f5: 84, + 0x8f6: 84, + 0x8f7: 84, + 0x8f8: 84, + 0x8f9: 84, + 0x8fa: 84, + 0x8fb: 84, + 0x8fc: 84, + 0x8fd: 84, + 0x8fe: 84, + 0x8ff: 84, + 0x900: 84, + 0x901: 84, + 0x902: 84, + 0x93a: 84, + 0x93c: 84, + 0x941: 84, + 0x942: 84, + 0x943: 84, + 0x944: 84, + 0x945: 84, + 0x946: 84, + 0x947: 84, + 0x948: 84, + 0x94d: 84, + 0x951: 84, + 0x952: 84, + 0x953: 84, + 0x954: 84, + 0x955: 84, + 0x956: 84, + 0x957: 84, + 0x962: 84, + 0x963: 84, + 0x981: 84, + 0x9bc: 84, + 0x9c1: 84, + 0x9c2: 84, + 0x9c3: 84, + 0x9c4: 84, + 0x9cd: 84, + 0x9e2: 84, + 0x9e3: 84, + 0x9fe: 84, + 0xa01: 84, + 0xa02: 84, + 0xa3c: 84, + 0xa41: 84, + 0xa42: 84, + 0xa47: 84, + 0xa48: 84, + 0xa4b: 84, + 0xa4c: 84, + 0xa4d: 84, + 0xa51: 84, + 0xa70: 84, + 0xa71: 84, + 0xa75: 84, + 0xa81: 84, + 0xa82: 84, + 0xabc: 84, + 0xac1: 84, + 0xac2: 84, + 0xac3: 84, + 0xac4: 84, + 0xac5: 84, + 0xac7: 84, + 0xac8: 84, + 0xacd: 84, + 0xae2: 84, + 0xae3: 84, + 0xafa: 84, + 0xafb: 84, + 0xafc: 84, + 0xafd: 84, + 0xafe: 84, + 0xaff: 84, + 0xb01: 84, + 0xb3c: 84, + 0xb3f: 84, + 0xb41: 84, + 0xb42: 84, + 0xb43: 84, + 0xb44: 84, + 0xb4d: 84, + 0xb55: 84, + 0xb56: 84, + 0xb62: 84, + 0xb63: 84, + 0xb82: 84, + 0xbc0: 84, + 0xbcd: 84, + 0xc00: 84, + 0xc04: 84, + 0xc3c: 84, + 0xc3e: 84, + 0xc3f: 84, + 0xc40: 84, + 0xc46: 84, + 0xc47: 84, + 0xc48: 84, + 0xc4a: 84, + 0xc4b: 84, + 0xc4c: 84, + 0xc4d: 84, + 0xc55: 84, + 0xc56: 84, + 0xc62: 84, + 0xc63: 84, + 0xc81: 84, + 0xcbc: 84, + 0xcbf: 84, + 0xcc6: 84, + 0xccc: 84, + 0xccd: 84, + 0xce2: 84, + 0xce3: 84, + 0xd00: 84, + 0xd01: 84, + 0xd3b: 84, + 0xd3c: 84, + 0xd41: 84, + 0xd42: 84, + 0xd43: 84, + 0xd44: 84, + 0xd4d: 84, + 0xd62: 84, + 0xd63: 84, + 0xd81: 84, + 0xdca: 84, + 0xdd2: 84, + 0xdd3: 84, + 0xdd4: 84, + 0xdd6: 84, + 0xe31: 84, + 0xe34: 84, + 0xe35: 84, + 0xe36: 84, + 0xe37: 84, + 0xe38: 84, + 0xe39: 84, + 0xe3a: 84, + 0xe47: 84, + 0xe48: 84, + 0xe49: 84, + 0xe4a: 84, + 0xe4b: 84, + 0xe4c: 84, + 0xe4d: 84, + 0xe4e: 84, + 0xeb1: 84, + 0xeb4: 84, + 0xeb5: 84, + 0xeb6: 84, + 0xeb7: 84, + 0xeb8: 84, + 0xeb9: 84, + 0xeba: 84, + 0xebb: 84, + 0xebc: 84, + 0xec8: 84, + 0xec9: 84, + 0xeca: 84, + 0xecb: 84, + 0xecc: 84, + 0xecd: 84, + 0xece: 84, + 0xf18: 84, + 0xf19: 84, + 0xf35: 84, + 0xf37: 84, + 0xf39: 84, + 0xf71: 84, + 0xf72: 84, + 0xf73: 84, + 0xf74: 84, + 0xf75: 84, + 0xf76: 84, + 0xf77: 84, + 0xf78: 84, + 0xf79: 84, + 0xf7a: 84, + 0xf7b: 84, + 0xf7c: 84, + 0xf7d: 84, + 0xf7e: 84, + 0xf80: 84, + 0xf81: 84, + 0xf82: 84, + 0xf83: 84, + 0xf84: 84, + 0xf86: 84, + 0xf87: 84, + 0xf8d: 84, + 0xf8e: 84, + 0xf8f: 84, + 0xf90: 84, + 0xf91: 84, + 0xf92: 84, + 0xf93: 84, + 0xf94: 84, + 0xf95: 84, + 0xf96: 84, + 0xf97: 84, + 0xf99: 84, + 0xf9a: 84, + 0xf9b: 84, + 0xf9c: 84, + 0xf9d: 84, + 0xf9e: 84, + 0xf9f: 84, + 0xfa0: 84, + 0xfa1: 84, + 0xfa2: 84, + 0xfa3: 84, + 0xfa4: 84, + 0xfa5: 84, + 0xfa6: 84, + 0xfa7: 84, + 0xfa8: 84, + 0xfa9: 84, + 0xfaa: 84, + 0xfab: 84, + 0xfac: 84, + 0xfad: 84, + 0xfae: 84, + 0xfaf: 84, + 0xfb0: 84, + 0xfb1: 84, + 0xfb2: 84, + 0xfb3: 84, + 0xfb4: 84, + 0xfb5: 84, + 0xfb6: 84, + 0xfb7: 84, + 0xfb8: 84, + 0xfb9: 84, + 0xfba: 84, + 0xfbb: 84, + 0xfbc: 84, + 0xfc6: 84, + 0x102d: 84, + 0x102e: 84, + 0x102f: 84, + 0x1030: 84, + 0x1032: 84, + 0x1033: 84, + 0x1034: 84, + 0x1035: 84, + 0x1036: 84, + 0x1037: 84, + 0x1039: 84, + 0x103a: 84, + 0x103d: 84, + 0x103e: 84, + 0x1058: 84, + 0x1059: 84, + 0x105e: 84, + 0x105f: 84, + 0x1060: 84, + 0x1071: 84, + 0x1072: 84, + 0x1073: 84, + 0x1074: 84, + 0x1082: 84, + 0x1085: 84, + 0x1086: 84, + 0x108d: 84, + 0x109d: 84, + 0x135d: 84, + 0x135e: 84, + 0x135f: 84, + 0x1712: 84, + 0x1713: 84, + 0x1714: 84, + 0x1732: 84, + 0x1733: 84, + 0x1752: 84, + 0x1753: 84, + 0x1772: 84, + 0x1773: 84, + 0x17b4: 84, + 0x17b5: 84, + 0x17b7: 84, + 0x17b8: 84, + 0x17b9: 84, + 0x17ba: 84, + 0x17bb: 84, + 0x17bc: 84, + 0x17bd: 84, + 0x17c6: 84, + 0x17c9: 84, + 0x17ca: 84, + 0x17cb: 84, + 0x17cc: 84, + 0x17cd: 84, + 0x17ce: 84, + 0x17cf: 84, + 0x17d0: 84, + 0x17d1: 84, + 0x17d2: 84, + 0x17d3: 84, + 0x17dd: 84, + 0x1807: 68, + 0x180a: 67, + 0x180b: 84, + 0x180c: 84, + 0x180d: 84, + 0x180f: 84, + 0x1820: 68, + 0x1821: 68, + 0x1822: 68, + 0x1823: 68, + 0x1824: 68, + 0x1825: 68, + 0x1826: 68, + 0x1827: 68, + 0x1828: 68, + 0x1829: 68, + 0x182a: 68, + 0x182b: 68, + 0x182c: 68, + 0x182d: 68, + 0x182e: 68, + 0x182f: 68, + 0x1830: 68, + 0x1831: 68, + 0x1832: 68, + 0x1833: 68, + 0x1834: 68, + 0x1835: 68, + 0x1836: 68, + 0x1837: 68, + 0x1838: 68, + 0x1839: 68, + 0x183a: 68, + 0x183b: 68, + 0x183c: 68, + 0x183d: 68, + 0x183e: 68, + 0x183f: 68, + 0x1840: 68, + 0x1841: 68, + 0x1842: 68, + 0x1843: 68, + 0x1844: 68, + 0x1845: 68, + 0x1846: 68, + 0x1847: 68, + 0x1848: 68, + 0x1849: 68, + 0x184a: 68, + 0x184b: 68, + 0x184c: 68, + 0x184d: 68, + 0x184e: 68, + 0x184f: 68, + 0x1850: 68, + 0x1851: 68, + 0x1852: 68, + 0x1853: 68, + 0x1854: 68, + 0x1855: 68, + 0x1856: 68, + 0x1857: 68, + 0x1858: 68, + 0x1859: 68, + 0x185a: 68, + 0x185b: 68, + 0x185c: 68, + 0x185d: 68, + 0x185e: 68, + 0x185f: 68, + 0x1860: 68, + 0x1861: 68, + 0x1862: 68, + 0x1863: 68, + 0x1864: 68, + 0x1865: 68, + 0x1866: 68, + 0x1867: 68, + 0x1868: 68, + 0x1869: 68, + 0x186a: 68, + 0x186b: 68, + 0x186c: 68, + 0x186d: 68, + 0x186e: 68, + 0x186f: 68, + 0x1870: 68, + 0x1871: 68, + 0x1872: 68, + 0x1873: 68, + 0x1874: 68, + 0x1875: 68, + 0x1876: 68, + 0x1877: 68, + 0x1878: 68, + 0x1885: 84, + 0x1886: 84, + 0x1887: 68, + 0x1888: 68, + 0x1889: 68, + 0x188a: 68, + 0x188b: 68, + 0x188c: 68, + 0x188d: 68, + 0x188e: 68, + 0x188f: 68, + 0x1890: 68, + 0x1891: 68, + 0x1892: 68, + 0x1893: 68, + 0x1894: 68, + 0x1895: 68, + 0x1896: 68, + 0x1897: 68, + 0x1898: 68, + 0x1899: 68, + 0x189a: 68, + 0x189b: 68, + 0x189c: 68, + 0x189d: 68, + 0x189e: 68, + 0x189f: 68, + 0x18a0: 68, + 0x18a1: 68, + 0x18a2: 68, + 0x18a3: 68, + 0x18a4: 68, + 0x18a5: 68, + 0x18a6: 68, + 0x18a7: 68, + 0x18a8: 68, + 0x18a9: 84, + 0x18aa: 68, + 0x1920: 84, + 0x1921: 84, + 0x1922: 84, + 0x1927: 84, + 0x1928: 84, + 0x1932: 84, + 0x1939: 84, + 0x193a: 84, + 0x193b: 84, + 0x1a17: 84, + 0x1a18: 84, + 0x1a1b: 84, + 0x1a56: 84, + 0x1a58: 84, + 0x1a59: 84, + 0x1a5a: 84, + 0x1a5b: 84, + 0x1a5c: 84, + 0x1a5d: 84, + 0x1a5e: 84, + 0x1a60: 84, + 0x1a62: 84, + 0x1a65: 84, + 0x1a66: 84, + 0x1a67: 84, + 0x1a68: 84, + 0x1a69: 84, + 0x1a6a: 84, + 0x1a6b: 84, + 0x1a6c: 84, + 0x1a73: 84, + 0x1a74: 84, + 0x1a75: 84, + 0x1a76: 84, + 0x1a77: 84, + 0x1a78: 84, + 0x1a79: 84, + 0x1a7a: 84, + 0x1a7b: 84, + 0x1a7c: 84, + 0x1a7f: 84, + 0x1ab0: 84, + 0x1ab1: 84, + 0x1ab2: 84, + 0x1ab3: 84, + 0x1ab4: 84, + 0x1ab5: 84, + 0x1ab6: 84, + 0x1ab7: 84, + 0x1ab8: 84, + 0x1ab9: 84, + 0x1aba: 84, + 0x1abb: 84, + 0x1abc: 84, + 0x1abd: 84, + 0x1abe: 84, + 0x1abf: 84, + 0x1ac0: 84, + 0x1ac1: 84, + 0x1ac2: 84, + 0x1ac3: 84, + 0x1ac4: 84, + 0x1ac5: 84, + 0x1ac6: 84, + 0x1ac7: 84, + 0x1ac8: 84, + 0x1ac9: 84, + 0x1aca: 84, + 0x1acb: 84, + 0x1acc: 84, + 0x1acd: 84, + 0x1ace: 84, + 0x1b00: 84, + 0x1b01: 84, + 0x1b02: 84, + 0x1b03: 84, + 0x1b34: 84, + 0x1b36: 84, + 0x1b37: 84, + 0x1b38: 84, + 0x1b39: 84, + 0x1b3a: 84, + 0x1b3c: 84, + 0x1b42: 84, + 0x1b6b: 84, + 0x1b6c: 84, + 0x1b6d: 84, + 0x1b6e: 84, + 0x1b6f: 84, + 0x1b70: 84, + 0x1b71: 84, + 0x1b72: 84, + 0x1b73: 84, + 0x1b80: 84, + 0x1b81: 84, + 0x1ba2: 84, + 0x1ba3: 84, + 0x1ba4: 84, + 0x1ba5: 84, + 0x1ba8: 84, + 0x1ba9: 84, + 0x1bab: 84, + 0x1bac: 84, + 0x1bad: 84, + 0x1be6: 84, + 0x1be8: 84, + 0x1be9: 84, + 0x1bed: 84, + 0x1bef: 84, + 0x1bf0: 84, + 0x1bf1: 84, + 0x1c2c: 84, + 0x1c2d: 84, + 0x1c2e: 84, + 0x1c2f: 84, + 0x1c30: 84, + 0x1c31: 84, + 0x1c32: 84, + 0x1c33: 84, + 0x1c36: 84, + 0x1c37: 84, + 0x1cd0: 84, + 0x1cd1: 84, + 0x1cd2: 84, + 0x1cd4: 84, + 0x1cd5: 84, + 0x1cd6: 84, + 0x1cd7: 84, + 0x1cd8: 84, + 0x1cd9: 84, + 0x1cda: 84, + 0x1cdb: 84, + 0x1cdc: 84, + 0x1cdd: 84, + 0x1cde: 84, + 0x1cdf: 84, + 0x1ce0: 84, + 0x1ce2: 84, + 0x1ce3: 84, + 0x1ce4: 84, + 0x1ce5: 84, + 0x1ce6: 84, + 0x1ce7: 84, + 0x1ce8: 84, + 0x1ced: 84, + 0x1cf4: 84, + 0x1cf8: 84, + 0x1cf9: 84, + 0x1dc0: 84, + 0x1dc1: 84, + 0x1dc2: 84, + 0x1dc3: 84, + 0x1dc4: 84, + 0x1dc5: 84, + 0x1dc6: 84, + 0x1dc7: 84, + 0x1dc8: 84, + 0x1dc9: 84, + 0x1dca: 84, + 0x1dcb: 84, + 0x1dcc: 84, + 0x1dcd: 84, + 0x1dce: 84, + 0x1dcf: 84, + 0x1dd0: 84, + 0x1dd1: 84, + 0x1dd2: 84, + 0x1dd3: 84, + 0x1dd4: 84, + 0x1dd5: 84, + 0x1dd6: 84, + 0x1dd7: 84, + 0x1dd8: 84, + 0x1dd9: 84, + 0x1dda: 84, + 0x1ddb: 84, + 0x1ddc: 84, + 0x1ddd: 84, + 0x1dde: 84, + 0x1ddf: 84, + 0x1de0: 84, + 0x1de1: 84, + 0x1de2: 84, + 0x1de3: 84, + 0x1de4: 84, + 0x1de5: 84, + 0x1de6: 84, + 0x1de7: 84, + 0x1de8: 84, + 0x1de9: 84, + 0x1dea: 84, + 0x1deb: 84, + 0x1dec: 84, + 0x1ded: 84, + 0x1dee: 84, + 0x1def: 84, + 0x1df0: 84, + 0x1df1: 84, + 0x1df2: 84, + 0x1df3: 84, + 0x1df4: 84, + 0x1df5: 84, + 0x1df6: 84, + 0x1df7: 84, + 0x1df8: 84, + 0x1df9: 84, + 0x1dfa: 84, + 0x1dfb: 84, + 0x1dfc: 84, + 0x1dfd: 84, + 0x1dfe: 84, + 0x1dff: 84, + 0x200b: 84, + 0x200d: 67, + 0x200e: 84, + 0x200f: 84, + 0x202a: 84, + 0x202b: 84, + 0x202c: 84, + 0x202d: 84, + 0x202e: 84, + 0x2060: 84, + 0x2061: 84, + 0x2062: 84, + 0x2063: 84, + 0x2064: 84, + 0x206a: 84, + 0x206b: 84, + 0x206c: 84, + 0x206d: 84, + 0x206e: 84, + 0x206f: 84, + 0x20d0: 84, + 0x20d1: 84, + 0x20d2: 84, + 0x20d3: 84, + 0x20d4: 84, + 0x20d5: 84, + 0x20d6: 84, + 0x20d7: 84, + 0x20d8: 84, + 0x20d9: 84, + 0x20da: 84, + 0x20db: 84, + 0x20dc: 84, + 0x20dd: 84, + 0x20de: 84, + 0x20df: 84, + 0x20e0: 84, + 0x20e1: 84, + 0x20e2: 84, + 0x20e3: 84, + 0x20e4: 84, + 0x20e5: 84, + 0x20e6: 84, + 0x20e7: 84, + 0x20e8: 84, + 0x20e9: 84, + 0x20ea: 84, + 0x20eb: 84, + 0x20ec: 84, + 0x20ed: 84, + 0x20ee: 84, + 0x20ef: 84, + 0x20f0: 84, + 0x2cef: 84, + 0x2cf0: 84, + 0x2cf1: 84, + 0x2d7f: 84, + 0x2de0: 84, + 0x2de1: 84, + 0x2de2: 84, + 0x2de3: 84, + 0x2de4: 84, + 0x2de5: 84, + 0x2de6: 84, + 0x2de7: 84, + 0x2de8: 84, + 0x2de9: 84, + 0x2dea: 84, + 0x2deb: 84, + 0x2dec: 84, + 0x2ded: 84, + 0x2dee: 84, + 0x2def: 84, + 0x2df0: 84, + 0x2df1: 84, + 0x2df2: 84, + 0x2df3: 84, + 0x2df4: 84, + 0x2df5: 84, + 0x2df6: 84, + 0x2df7: 84, + 0x2df8: 84, + 0x2df9: 84, + 0x2dfa: 84, + 0x2dfb: 84, + 0x2dfc: 84, + 0x2dfd: 84, + 0x2dfe: 84, + 0x2dff: 84, + 0x302a: 84, + 0x302b: 84, + 0x302c: 84, + 0x302d: 84, + 0x3099: 84, + 0x309a: 84, + 0xa66f: 84, + 0xa670: 84, + 0xa671: 84, + 0xa672: 84, + 0xa674: 84, + 0xa675: 84, + 0xa676: 84, + 0xa677: 84, + 0xa678: 84, + 0xa679: 84, + 0xa67a: 84, + 0xa67b: 84, + 0xa67c: 84, + 0xa67d: 84, + 0xa69e: 84, + 0xa69f: 84, + 0xa6f0: 84, + 0xa6f1: 84, + 0xa802: 84, + 0xa806: 84, + 0xa80b: 84, + 0xa825: 84, + 0xa826: 84, + 0xa82c: 84, + 0xa840: 68, + 0xa841: 68, + 0xa842: 68, + 0xa843: 68, + 0xa844: 68, + 0xa845: 68, + 0xa846: 68, + 0xa847: 68, + 0xa848: 68, + 0xa849: 68, + 0xa84a: 68, + 0xa84b: 68, + 0xa84c: 68, + 0xa84d: 68, + 0xa84e: 68, + 0xa84f: 68, + 0xa850: 68, + 0xa851: 68, + 0xa852: 68, + 0xa853: 68, + 0xa854: 68, + 0xa855: 68, + 0xa856: 68, + 0xa857: 68, + 0xa858: 68, + 0xa859: 68, + 0xa85a: 68, + 0xa85b: 68, + 0xa85c: 68, + 0xa85d: 68, + 0xa85e: 68, + 0xa85f: 68, + 0xa860: 68, + 0xa861: 68, + 0xa862: 68, + 0xa863: 68, + 0xa864: 68, + 0xa865: 68, + 0xa866: 68, + 0xa867: 68, + 0xa868: 68, + 0xa869: 68, + 0xa86a: 68, + 0xa86b: 68, + 0xa86c: 68, + 0xa86d: 68, + 0xa86e: 68, + 0xa86f: 68, + 0xa870: 68, + 0xa871: 68, + 0xa872: 76, + 0xa8c4: 84, + 0xa8c5: 84, + 0xa8e0: 84, + 0xa8e1: 84, + 0xa8e2: 84, + 0xa8e3: 84, + 0xa8e4: 84, + 0xa8e5: 84, + 0xa8e6: 84, + 0xa8e7: 84, + 0xa8e8: 84, + 0xa8e9: 84, + 0xa8ea: 84, + 0xa8eb: 84, + 0xa8ec: 84, + 0xa8ed: 84, + 0xa8ee: 84, + 0xa8ef: 84, + 0xa8f0: 84, + 0xa8f1: 84, + 0xa8ff: 84, + 0xa926: 84, + 0xa927: 84, + 0xa928: 84, + 0xa929: 84, + 0xa92a: 84, + 0xa92b: 84, + 0xa92c: 84, + 0xa92d: 84, + 0xa947: 84, + 0xa948: 84, + 0xa949: 84, + 0xa94a: 84, + 0xa94b: 84, + 0xa94c: 84, + 0xa94d: 84, + 0xa94e: 84, + 0xa94f: 84, + 0xa950: 84, + 0xa951: 84, + 0xa980: 84, + 0xa981: 84, + 0xa982: 84, + 0xa9b3: 84, + 0xa9b6: 84, + 0xa9b7: 84, + 0xa9b8: 84, + 0xa9b9: 84, + 0xa9bc: 84, + 0xa9bd: 84, + 0xa9e5: 84, + 0xaa29: 84, + 0xaa2a: 84, + 0xaa2b: 84, + 0xaa2c: 84, + 0xaa2d: 84, + 0xaa2e: 84, + 0xaa31: 84, + 0xaa32: 84, + 0xaa35: 84, + 0xaa36: 84, + 0xaa43: 84, + 0xaa4c: 84, + 0xaa7c: 84, + 0xaab0: 84, + 0xaab2: 84, + 0xaab3: 84, + 0xaab4: 84, + 0xaab7: 84, + 0xaab8: 84, + 0xaabe: 84, + 0xaabf: 84, + 0xaac1: 84, + 0xaaec: 84, + 0xaaed: 84, + 0xaaf6: 84, + 0xabe5: 84, + 0xabe8: 84, + 0xabed: 84, + 0xfb1e: 84, + 0xfe00: 84, + 0xfe01: 84, + 0xfe02: 84, + 0xfe03: 84, + 0xfe04: 84, + 0xfe05: 84, + 0xfe06: 84, + 0xfe07: 84, + 0xfe08: 84, + 0xfe09: 84, + 0xfe0a: 84, + 0xfe0b: 84, + 0xfe0c: 84, + 0xfe0d: 84, + 0xfe0e: 84, + 0xfe0f: 84, + 0xfe20: 84, + 0xfe21: 84, + 0xfe22: 84, + 0xfe23: 84, + 0xfe24: 84, + 0xfe25: 84, + 0xfe26: 84, + 0xfe27: 84, + 0xfe28: 84, + 0xfe29: 84, + 0xfe2a: 84, + 0xfe2b: 84, + 0xfe2c: 84, + 0xfe2d: 84, + 0xfe2e: 84, + 0xfe2f: 84, + 0xfeff: 84, + 0xfff9: 84, + 0xfffa: 84, + 0xfffb: 84, + 0x101fd: 84, + 0x102e0: 84, + 0x10376: 84, + 0x10377: 84, + 0x10378: 84, + 0x10379: 84, + 0x1037a: 84, + 0x10a01: 84, + 0x10a02: 84, + 0x10a03: 84, + 0x10a05: 84, + 0x10a06: 84, + 0x10a0c: 84, + 0x10a0d: 84, + 0x10a0e: 84, + 0x10a0f: 84, + 0x10a38: 84, + 0x10a39: 84, + 0x10a3a: 84, + 0x10a3f: 84, + 0x10ac0: 68, + 0x10ac1: 68, + 0x10ac2: 68, + 0x10ac3: 68, + 0x10ac4: 68, + 0x10ac5: 82, + 0x10ac7: 82, + 0x10ac9: 82, + 0x10aca: 82, + 0x10acd: 76, + 0x10ace: 82, + 0x10acf: 82, + 0x10ad0: 82, + 0x10ad1: 82, + 0x10ad2: 82, + 0x10ad3: 68, + 0x10ad4: 68, + 0x10ad5: 68, + 0x10ad6: 68, + 0x10ad7: 76, + 0x10ad8: 68, + 0x10ad9: 68, + 0x10ada: 68, + 0x10adb: 68, + 0x10adc: 68, + 0x10add: 82, + 0x10ade: 68, + 0x10adf: 68, + 0x10ae0: 68, + 0x10ae1: 82, + 0x10ae4: 82, + 0x10ae5: 84, + 0x10ae6: 84, + 0x10aeb: 68, + 0x10aec: 68, + 0x10aed: 68, + 0x10aee: 68, + 0x10aef: 82, + 0x10b80: 68, + 0x10b81: 82, + 0x10b82: 68, + 0x10b83: 82, + 0x10b84: 82, + 0x10b85: 82, + 0x10b86: 68, + 0x10b87: 68, + 0x10b88: 68, + 0x10b89: 82, + 0x10b8a: 68, + 0x10b8b: 68, + 0x10b8c: 82, + 0x10b8d: 68, + 0x10b8e: 82, + 0x10b8f: 82, + 0x10b90: 68, + 0x10b91: 82, + 0x10ba9: 82, + 0x10baa: 82, + 0x10bab: 82, + 0x10bac: 82, + 0x10bad: 68, + 0x10bae: 68, + 0x10d00: 76, + 0x10d01: 68, + 0x10d02: 68, + 0x10d03: 68, + 0x10d04: 68, + 0x10d05: 68, + 0x10d06: 68, + 0x10d07: 68, + 0x10d08: 68, + 0x10d09: 68, + 0x10d0a: 68, + 0x10d0b: 68, + 0x10d0c: 68, + 0x10d0d: 68, + 0x10d0e: 68, + 0x10d0f: 68, + 0x10d10: 68, + 0x10d11: 68, + 0x10d12: 68, + 0x10d13: 68, + 0x10d14: 68, + 0x10d15: 68, + 0x10d16: 68, + 0x10d17: 68, + 0x10d18: 68, + 0x10d19: 68, + 0x10d1a: 68, + 0x10d1b: 68, + 0x10d1c: 68, + 0x10d1d: 68, + 0x10d1e: 68, + 0x10d1f: 68, + 0x10d20: 68, + 0x10d21: 68, + 0x10d22: 82, + 0x10d23: 68, + 0x10d24: 84, + 0x10d25: 84, + 0x10d26: 84, + 0x10d27: 84, + 0x10eab: 84, + 0x10eac: 84, + 0x10efd: 84, + 0x10efe: 84, + 0x10eff: 84, + 0x10f30: 68, + 0x10f31: 68, + 0x10f32: 68, + 0x10f33: 82, + 0x10f34: 68, + 0x10f35: 68, + 0x10f36: 68, + 0x10f37: 68, + 0x10f38: 68, + 0x10f39: 68, + 0x10f3a: 68, + 0x10f3b: 68, + 0x10f3c: 68, + 0x10f3d: 68, + 0x10f3e: 68, + 0x10f3f: 68, + 0x10f40: 68, + 0x10f41: 68, + 0x10f42: 68, + 0x10f43: 68, + 0x10f44: 68, + 0x10f46: 84, + 0x10f47: 84, + 0x10f48: 84, + 0x10f49: 84, + 0x10f4a: 84, + 0x10f4b: 84, + 0x10f4c: 84, + 0x10f4d: 84, + 0x10f4e: 84, + 0x10f4f: 84, + 0x10f50: 84, + 0x10f51: 68, + 0x10f52: 68, + 0x10f53: 68, + 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, + 0x10f82: 84, + 0x10f83: 84, + 0x10f84: 84, + 0x10f85: 84, + 0x10fb0: 68, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, + 0x11001: 84, + 0x11038: 84, + 0x11039: 84, + 0x1103a: 84, + 0x1103b: 84, + 0x1103c: 84, + 0x1103d: 84, + 0x1103e: 84, + 0x1103f: 84, + 0x11040: 84, + 0x11041: 84, + 0x11042: 84, + 0x11043: 84, + 0x11044: 84, + 0x11045: 84, + 0x11046: 84, + 0x11070: 84, + 0x11073: 84, + 0x11074: 84, + 0x1107f: 84, + 0x11080: 84, + 0x11081: 84, + 0x110b3: 84, + 0x110b4: 84, + 0x110b5: 84, + 0x110b6: 84, + 0x110b9: 84, + 0x110ba: 84, + 0x110c2: 84, + 0x11100: 84, + 0x11101: 84, + 0x11102: 84, + 0x11127: 84, + 0x11128: 84, + 0x11129: 84, + 0x1112a: 84, + 0x1112b: 84, + 0x1112d: 84, + 0x1112e: 84, + 0x1112f: 84, + 0x11130: 84, + 0x11131: 84, + 0x11132: 84, + 0x11133: 84, + 0x11134: 84, + 0x11173: 84, + 0x11180: 84, + 0x11181: 84, + 0x111b6: 84, + 0x111b7: 84, + 0x111b8: 84, + 0x111b9: 84, + 0x111ba: 84, + 0x111bb: 84, + 0x111bc: 84, + 0x111bd: 84, + 0x111be: 84, + 0x111c9: 84, + 0x111ca: 84, + 0x111cb: 84, + 0x111cc: 84, + 0x111cf: 84, + 0x1122f: 84, + 0x11230: 84, + 0x11231: 84, + 0x11234: 84, + 0x11236: 84, + 0x11237: 84, + 0x1123e: 84, + 0x11241: 84, + 0x112df: 84, + 0x112e3: 84, + 0x112e4: 84, + 0x112e5: 84, + 0x112e6: 84, + 0x112e7: 84, + 0x112e8: 84, + 0x112e9: 84, + 0x112ea: 84, + 0x11300: 84, + 0x11301: 84, + 0x1133b: 84, + 0x1133c: 84, + 0x11340: 84, + 0x11366: 84, + 0x11367: 84, + 0x11368: 84, + 0x11369: 84, + 0x1136a: 84, + 0x1136b: 84, + 0x1136c: 84, + 0x11370: 84, + 0x11371: 84, + 0x11372: 84, + 0x11373: 84, + 0x11374: 84, + 0x11438: 84, + 0x11439: 84, + 0x1143a: 84, + 0x1143b: 84, + 0x1143c: 84, + 0x1143d: 84, + 0x1143e: 84, + 0x1143f: 84, + 0x11442: 84, + 0x11443: 84, + 0x11444: 84, + 0x11446: 84, + 0x1145e: 84, + 0x114b3: 84, + 0x114b4: 84, + 0x114b5: 84, + 0x114b6: 84, + 0x114b7: 84, + 0x114b8: 84, + 0x114ba: 84, + 0x114bf: 84, + 0x114c0: 84, + 0x114c2: 84, + 0x114c3: 84, + 0x115b2: 84, + 0x115b3: 84, + 0x115b4: 84, + 0x115b5: 84, + 0x115bc: 84, + 0x115bd: 84, + 0x115bf: 84, + 0x115c0: 84, + 0x115dc: 84, + 0x115dd: 84, + 0x11633: 84, + 0x11634: 84, + 0x11635: 84, + 0x11636: 84, + 0x11637: 84, + 0x11638: 84, + 0x11639: 84, + 0x1163a: 84, + 0x1163d: 84, + 0x1163f: 84, + 0x11640: 84, + 0x116ab: 84, + 0x116ad: 84, + 0x116b0: 84, + 0x116b1: 84, + 0x116b2: 84, + 0x116b3: 84, + 0x116b4: 84, + 0x116b5: 84, + 0x116b7: 84, + 0x1171d: 84, + 0x1171e: 84, + 0x1171f: 84, + 0x11722: 84, + 0x11723: 84, + 0x11724: 84, + 0x11725: 84, + 0x11727: 84, + 0x11728: 84, + 0x11729: 84, + 0x1172a: 84, + 0x1172b: 84, + 0x1182f: 84, + 0x11830: 84, + 0x11831: 84, + 0x11832: 84, + 0x11833: 84, + 0x11834: 84, + 0x11835: 84, + 0x11836: 84, + 0x11837: 84, + 0x11839: 84, + 0x1183a: 84, + 0x1193b: 84, + 0x1193c: 84, + 0x1193e: 84, + 0x11943: 84, + 0x119d4: 84, + 0x119d5: 84, + 0x119d6: 84, + 0x119d7: 84, + 0x119da: 84, + 0x119db: 84, + 0x119e0: 84, + 0x11a01: 84, + 0x11a02: 84, + 0x11a03: 84, + 0x11a04: 84, + 0x11a05: 84, + 0x11a06: 84, + 0x11a07: 84, + 0x11a08: 84, + 0x11a09: 84, + 0x11a0a: 84, + 0x11a33: 84, + 0x11a34: 84, + 0x11a35: 84, + 0x11a36: 84, + 0x11a37: 84, + 0x11a38: 84, + 0x11a3b: 84, + 0x11a3c: 84, + 0x11a3d: 84, + 0x11a3e: 84, + 0x11a47: 84, + 0x11a51: 84, + 0x11a52: 84, + 0x11a53: 84, + 0x11a54: 84, + 0x11a55: 84, + 0x11a56: 84, + 0x11a59: 84, + 0x11a5a: 84, + 0x11a5b: 84, + 0x11a8a: 84, + 0x11a8b: 84, + 0x11a8c: 84, + 0x11a8d: 84, + 0x11a8e: 84, + 0x11a8f: 84, + 0x11a90: 84, + 0x11a91: 84, + 0x11a92: 84, + 0x11a93: 84, + 0x11a94: 84, + 0x11a95: 84, + 0x11a96: 84, + 0x11a98: 84, + 0x11a99: 84, + 0x11c30: 84, + 0x11c31: 84, + 0x11c32: 84, + 0x11c33: 84, + 0x11c34: 84, + 0x11c35: 84, + 0x11c36: 84, + 0x11c38: 84, + 0x11c39: 84, + 0x11c3a: 84, + 0x11c3b: 84, + 0x11c3c: 84, + 0x11c3d: 84, + 0x11c3f: 84, + 0x11c92: 84, + 0x11c93: 84, + 0x11c94: 84, + 0x11c95: 84, + 0x11c96: 84, + 0x11c97: 84, + 0x11c98: 84, + 0x11c99: 84, + 0x11c9a: 84, + 0x11c9b: 84, + 0x11c9c: 84, + 0x11c9d: 84, + 0x11c9e: 84, + 0x11c9f: 84, + 0x11ca0: 84, + 0x11ca1: 84, + 0x11ca2: 84, + 0x11ca3: 84, + 0x11ca4: 84, + 0x11ca5: 84, + 0x11ca6: 84, + 0x11ca7: 84, + 0x11caa: 84, + 0x11cab: 84, + 0x11cac: 84, + 0x11cad: 84, + 0x11cae: 84, + 0x11caf: 84, + 0x11cb0: 84, + 0x11cb2: 84, + 0x11cb3: 84, + 0x11cb5: 84, + 0x11cb6: 84, + 0x11d31: 84, + 0x11d32: 84, + 0x11d33: 84, + 0x11d34: 84, + 0x11d35: 84, + 0x11d36: 84, + 0x11d3a: 84, + 0x11d3c: 84, + 0x11d3d: 84, + 0x11d3f: 84, + 0x11d40: 84, + 0x11d41: 84, + 0x11d42: 84, + 0x11d43: 84, + 0x11d44: 84, + 0x11d45: 84, + 0x11d47: 84, + 0x11d90: 84, + 0x11d91: 84, + 0x11d95: 84, + 0x11d97: 84, + 0x11ef3: 84, + 0x11ef4: 84, + 0x11f00: 84, + 0x11f01: 84, + 0x11f36: 84, + 0x11f37: 84, + 0x11f38: 84, + 0x11f39: 84, + 0x11f3a: 84, + 0x11f40: 84, + 0x11f42: 84, + 0x13430: 84, + 0x13431: 84, + 0x13432: 84, + 0x13433: 84, + 0x13434: 84, + 0x13435: 84, + 0x13436: 84, + 0x13437: 84, + 0x13438: 84, + 0x13439: 84, + 0x1343a: 84, + 0x1343b: 84, + 0x1343c: 84, + 0x1343d: 84, + 0x1343e: 84, + 0x1343f: 84, + 0x13440: 84, + 0x13447: 84, + 0x13448: 84, + 0x13449: 84, + 0x1344a: 84, + 0x1344b: 84, + 0x1344c: 84, + 0x1344d: 84, + 0x1344e: 84, + 0x1344f: 84, + 0x13450: 84, + 0x13451: 84, + 0x13452: 84, + 0x13453: 84, + 0x13454: 84, + 0x13455: 84, + 0x16af0: 84, + 0x16af1: 84, + 0x16af2: 84, + 0x16af3: 84, + 0x16af4: 84, + 0x16b30: 84, + 0x16b31: 84, + 0x16b32: 84, + 0x16b33: 84, + 0x16b34: 84, + 0x16b35: 84, + 0x16b36: 84, + 0x16f4f: 84, + 0x16f8f: 84, + 0x16f90: 84, + 0x16f91: 84, + 0x16f92: 84, + 0x16fe4: 84, + 0x1bc9d: 84, + 0x1bc9e: 84, + 0x1bca0: 84, + 0x1bca1: 84, + 0x1bca2: 84, + 0x1bca3: 84, + 0x1cf00: 84, + 0x1cf01: 84, + 0x1cf02: 84, + 0x1cf03: 84, + 0x1cf04: 84, + 0x1cf05: 84, + 0x1cf06: 84, + 0x1cf07: 84, + 0x1cf08: 84, + 0x1cf09: 84, + 0x1cf0a: 84, + 0x1cf0b: 84, + 0x1cf0c: 84, + 0x1cf0d: 84, + 0x1cf0e: 84, + 0x1cf0f: 84, + 0x1cf10: 84, + 0x1cf11: 84, + 0x1cf12: 84, + 0x1cf13: 84, + 0x1cf14: 84, + 0x1cf15: 84, + 0x1cf16: 84, + 0x1cf17: 84, + 0x1cf18: 84, + 0x1cf19: 84, + 0x1cf1a: 84, + 0x1cf1b: 84, + 0x1cf1c: 84, + 0x1cf1d: 84, + 0x1cf1e: 84, + 0x1cf1f: 84, + 0x1cf20: 84, + 0x1cf21: 84, + 0x1cf22: 84, + 0x1cf23: 84, + 0x1cf24: 84, + 0x1cf25: 84, + 0x1cf26: 84, + 0x1cf27: 84, + 0x1cf28: 84, + 0x1cf29: 84, + 0x1cf2a: 84, + 0x1cf2b: 84, + 0x1cf2c: 84, + 0x1cf2d: 84, + 0x1cf30: 84, + 0x1cf31: 84, + 0x1cf32: 84, + 0x1cf33: 84, + 0x1cf34: 84, + 0x1cf35: 84, + 0x1cf36: 84, + 0x1cf37: 84, + 0x1cf38: 84, + 0x1cf39: 84, + 0x1cf3a: 84, + 0x1cf3b: 84, + 0x1cf3c: 84, + 0x1cf3d: 84, + 0x1cf3e: 84, + 0x1cf3f: 84, + 0x1cf40: 84, + 0x1cf41: 84, + 0x1cf42: 84, + 0x1cf43: 84, + 0x1cf44: 84, + 0x1cf45: 84, + 0x1cf46: 84, + 0x1d167: 84, + 0x1d168: 84, + 0x1d169: 84, + 0x1d173: 84, + 0x1d174: 84, + 0x1d175: 84, + 0x1d176: 84, + 0x1d177: 84, + 0x1d178: 84, + 0x1d179: 84, + 0x1d17a: 84, + 0x1d17b: 84, + 0x1d17c: 84, + 0x1d17d: 84, + 0x1d17e: 84, + 0x1d17f: 84, + 0x1d180: 84, + 0x1d181: 84, + 0x1d182: 84, + 0x1d185: 84, + 0x1d186: 84, + 0x1d187: 84, + 0x1d188: 84, + 0x1d189: 84, + 0x1d18a: 84, + 0x1d18b: 84, + 0x1d1aa: 84, + 0x1d1ab: 84, + 0x1d1ac: 84, + 0x1d1ad: 84, + 0x1d242: 84, + 0x1d243: 84, + 0x1d244: 84, + 0x1da00: 84, + 0x1da01: 84, + 0x1da02: 84, + 0x1da03: 84, + 0x1da04: 84, + 0x1da05: 84, + 0x1da06: 84, + 0x1da07: 84, + 0x1da08: 84, + 0x1da09: 84, + 0x1da0a: 84, + 0x1da0b: 84, + 0x1da0c: 84, + 0x1da0d: 84, + 0x1da0e: 84, + 0x1da0f: 84, + 0x1da10: 84, + 0x1da11: 84, + 0x1da12: 84, + 0x1da13: 84, + 0x1da14: 84, + 0x1da15: 84, + 0x1da16: 84, + 0x1da17: 84, + 0x1da18: 84, + 0x1da19: 84, + 0x1da1a: 84, + 0x1da1b: 84, + 0x1da1c: 84, + 0x1da1d: 84, + 0x1da1e: 84, + 0x1da1f: 84, + 0x1da20: 84, + 0x1da21: 84, + 0x1da22: 84, + 0x1da23: 84, + 0x1da24: 84, + 0x1da25: 84, + 0x1da26: 84, + 0x1da27: 84, + 0x1da28: 84, + 0x1da29: 84, + 0x1da2a: 84, + 0x1da2b: 84, + 0x1da2c: 84, + 0x1da2d: 84, + 0x1da2e: 84, + 0x1da2f: 84, + 0x1da30: 84, + 0x1da31: 84, + 0x1da32: 84, + 0x1da33: 84, + 0x1da34: 84, + 0x1da35: 84, + 0x1da36: 84, + 0x1da3b: 84, + 0x1da3c: 84, + 0x1da3d: 84, + 0x1da3e: 84, + 0x1da3f: 84, + 0x1da40: 84, + 0x1da41: 84, + 0x1da42: 84, + 0x1da43: 84, + 0x1da44: 84, + 0x1da45: 84, + 0x1da46: 84, + 0x1da47: 84, + 0x1da48: 84, + 0x1da49: 84, + 0x1da4a: 84, + 0x1da4b: 84, + 0x1da4c: 84, + 0x1da4d: 84, + 0x1da4e: 84, + 0x1da4f: 84, + 0x1da50: 84, + 0x1da51: 84, + 0x1da52: 84, + 0x1da53: 84, + 0x1da54: 84, + 0x1da55: 84, + 0x1da56: 84, + 0x1da57: 84, + 0x1da58: 84, + 0x1da59: 84, + 0x1da5a: 84, + 0x1da5b: 84, + 0x1da5c: 84, + 0x1da5d: 84, + 0x1da5e: 84, + 0x1da5f: 84, + 0x1da60: 84, + 0x1da61: 84, + 0x1da62: 84, + 0x1da63: 84, + 0x1da64: 84, + 0x1da65: 84, + 0x1da66: 84, + 0x1da67: 84, + 0x1da68: 84, + 0x1da69: 84, + 0x1da6a: 84, + 0x1da6b: 84, + 0x1da6c: 84, + 0x1da75: 84, + 0x1da84: 84, + 0x1da9b: 84, + 0x1da9c: 84, + 0x1da9d: 84, + 0x1da9e: 84, + 0x1da9f: 84, + 0x1daa1: 84, + 0x1daa2: 84, + 0x1daa3: 84, + 0x1daa4: 84, + 0x1daa5: 84, + 0x1daa6: 84, + 0x1daa7: 84, + 0x1daa8: 84, + 0x1daa9: 84, + 0x1daaa: 84, + 0x1daab: 84, + 0x1daac: 84, + 0x1daad: 84, + 0x1daae: 84, + 0x1daaf: 84, + 0x1e000: 84, + 0x1e001: 84, + 0x1e002: 84, + 0x1e003: 84, + 0x1e004: 84, + 0x1e005: 84, + 0x1e006: 84, + 0x1e008: 84, + 0x1e009: 84, + 0x1e00a: 84, + 0x1e00b: 84, + 0x1e00c: 84, + 0x1e00d: 84, + 0x1e00e: 84, + 0x1e00f: 84, + 0x1e010: 84, + 0x1e011: 84, + 0x1e012: 84, + 0x1e013: 84, + 0x1e014: 84, + 0x1e015: 84, + 0x1e016: 84, + 0x1e017: 84, + 0x1e018: 84, + 0x1e01b: 84, + 0x1e01c: 84, + 0x1e01d: 84, + 0x1e01e: 84, + 0x1e01f: 84, + 0x1e020: 84, + 0x1e021: 84, + 0x1e023: 84, + 0x1e024: 84, + 0x1e026: 84, + 0x1e027: 84, + 0x1e028: 84, + 0x1e029: 84, + 0x1e02a: 84, + 0x1e08f: 84, + 0x1e130: 84, + 0x1e131: 84, + 0x1e132: 84, + 0x1e133: 84, + 0x1e134: 84, + 0x1e135: 84, + 0x1e136: 84, + 0x1e2ae: 84, + 0x1e2ec: 84, + 0x1e2ed: 84, + 0x1e2ee: 84, + 0x1e2ef: 84, + 0x1e4ec: 84, + 0x1e4ed: 84, + 0x1e4ee: 84, + 0x1e4ef: 84, + 0x1e8d0: 84, + 0x1e8d1: 84, + 0x1e8d2: 84, + 0x1e8d3: 84, + 0x1e8d4: 84, + 0x1e8d5: 84, + 0x1e8d6: 84, + 0x1e900: 68, + 0x1e901: 68, + 0x1e902: 68, + 0x1e903: 68, + 0x1e904: 68, + 0x1e905: 68, + 0x1e906: 68, + 0x1e907: 68, + 0x1e908: 68, + 0x1e909: 68, + 0x1e90a: 68, + 0x1e90b: 68, + 0x1e90c: 68, + 0x1e90d: 68, + 0x1e90e: 68, + 0x1e90f: 68, + 0x1e910: 68, + 0x1e911: 68, + 0x1e912: 68, + 0x1e913: 68, + 0x1e914: 68, + 0x1e915: 68, + 0x1e916: 68, + 0x1e917: 68, + 0x1e918: 68, + 0x1e919: 68, + 0x1e91a: 68, + 0x1e91b: 68, + 0x1e91c: 68, + 0x1e91d: 68, + 0x1e91e: 68, + 0x1e91f: 68, + 0x1e920: 68, + 0x1e921: 68, + 0x1e922: 68, + 0x1e923: 68, + 0x1e924: 68, + 0x1e925: 68, + 0x1e926: 68, + 0x1e927: 68, + 0x1e928: 68, + 0x1e929: 68, + 0x1e92a: 68, + 0x1e92b: 68, + 0x1e92c: 68, + 0x1e92d: 68, + 0x1e92e: 68, + 0x1e92f: 68, + 0x1e930: 68, + 0x1e931: 68, + 0x1e932: 68, + 0x1e933: 68, + 0x1e934: 68, + 0x1e935: 68, + 0x1e936: 68, + 0x1e937: 68, + 0x1e938: 68, + 0x1e939: 68, + 0x1e93a: 68, + 0x1e93b: 68, + 0x1e93c: 68, + 0x1e93d: 68, + 0x1e93e: 68, + 0x1e93f: 68, + 0x1e940: 68, + 0x1e941: 68, + 0x1e942: 68, + 0x1e943: 68, + 0x1e944: 84, + 0x1e945: 84, + 0x1e946: 84, + 0x1e947: 84, + 0x1e948: 84, + 0x1e949: 84, + 0x1e94a: 84, + 0x1e94b: 84, + 0xe0001: 84, + 0xe0020: 84, + 0xe0021: 84, + 0xe0022: 84, + 0xe0023: 84, + 0xe0024: 84, + 0xe0025: 84, + 0xe0026: 84, + 0xe0027: 84, + 0xe0028: 84, + 0xe0029: 84, + 0xe002a: 84, + 0xe002b: 84, + 0xe002c: 84, + 0xe002d: 84, + 0xe002e: 84, + 0xe002f: 84, + 0xe0030: 84, + 0xe0031: 84, + 0xe0032: 84, + 0xe0033: 84, + 0xe0034: 84, + 0xe0035: 84, + 0xe0036: 84, + 0xe0037: 84, + 0xe0038: 84, + 0xe0039: 84, + 0xe003a: 84, + 0xe003b: 84, + 0xe003c: 84, + 0xe003d: 84, + 0xe003e: 84, + 0xe003f: 84, + 0xe0040: 84, + 0xe0041: 84, + 0xe0042: 84, + 0xe0043: 84, + 0xe0044: 84, + 0xe0045: 84, + 0xe0046: 84, + 0xe0047: 84, + 0xe0048: 84, + 0xe0049: 84, + 0xe004a: 84, + 0xe004b: 84, + 0xe004c: 84, + 0xe004d: 84, + 0xe004e: 84, + 0xe004f: 84, + 0xe0050: 84, + 0xe0051: 84, + 0xe0052: 84, + 0xe0053: 84, + 0xe0054: 84, + 0xe0055: 84, + 0xe0056: 84, + 0xe0057: 84, + 0xe0058: 84, + 0xe0059: 84, + 0xe005a: 84, + 0xe005b: 84, + 0xe005c: 84, + 0xe005d: 84, + 0xe005e: 84, + 0xe005f: 84, + 0xe0060: 84, + 0xe0061: 84, + 0xe0062: 84, + 0xe0063: 84, + 0xe0064: 84, + 0xe0065: 84, + 0xe0066: 84, + 0xe0067: 84, + 0xe0068: 84, + 0xe0069: 84, + 0xe006a: 84, + 0xe006b: 84, + 0xe006c: 84, + 0xe006d: 84, + 0xe006e: 84, + 0xe006f: 84, + 0xe0070: 84, + 0xe0071: 84, + 0xe0072: 84, + 0xe0073: 84, + 0xe0074: 84, + 0xe0075: 84, + 0xe0076: 84, + 0xe0077: 84, + 0xe0078: 84, + 0xe0079: 84, + 0xe007a: 84, + 0xe007b: 84, + 0xe007c: 84, + 0xe007d: 84, + 0xe007e: 84, + 0xe007f: 84, + 0xe0100: 84, + 0xe0101: 84, + 0xe0102: 84, + 0xe0103: 84, + 0xe0104: 84, + 0xe0105: 84, + 0xe0106: 84, + 0xe0107: 84, + 0xe0108: 84, + 0xe0109: 84, + 0xe010a: 84, + 0xe010b: 84, + 0xe010c: 84, + 0xe010d: 84, + 0xe010e: 84, + 0xe010f: 84, + 0xe0110: 84, + 0xe0111: 84, + 0xe0112: 84, + 0xe0113: 84, + 0xe0114: 84, + 0xe0115: 84, + 0xe0116: 84, + 0xe0117: 84, + 0xe0118: 84, + 0xe0119: 84, + 0xe011a: 84, + 0xe011b: 84, + 0xe011c: 84, + 0xe011d: 84, + 0xe011e: 84, + 0xe011f: 84, + 0xe0120: 84, + 0xe0121: 84, + 0xe0122: 84, + 0xe0123: 84, + 0xe0124: 84, + 0xe0125: 84, + 0xe0126: 84, + 0xe0127: 84, + 0xe0128: 84, + 0xe0129: 84, + 0xe012a: 84, + 0xe012b: 84, + 0xe012c: 84, + 0xe012d: 84, + 0xe012e: 84, + 0xe012f: 84, + 0xe0130: 84, + 0xe0131: 84, + 0xe0132: 84, + 0xe0133: 84, + 0xe0134: 84, + 0xe0135: 84, + 0xe0136: 84, + 0xe0137: 84, + 0xe0138: 84, + 0xe0139: 84, + 0xe013a: 84, + 0xe013b: 84, + 0xe013c: 84, + 0xe013d: 84, + 0xe013e: 84, + 0xe013f: 84, + 0xe0140: 84, + 0xe0141: 84, + 0xe0142: 84, + 0xe0143: 84, + 0xe0144: 84, + 0xe0145: 84, + 0xe0146: 84, + 0xe0147: 84, + 0xe0148: 84, + 0xe0149: 84, + 0xe014a: 84, + 0xe014b: 84, + 0xe014c: 84, + 0xe014d: 84, + 0xe014e: 84, + 0xe014f: 84, + 0xe0150: 84, + 0xe0151: 84, + 0xe0152: 84, + 0xe0153: 84, + 0xe0154: 84, + 0xe0155: 84, + 0xe0156: 84, + 0xe0157: 84, + 0xe0158: 84, + 0xe0159: 84, + 0xe015a: 84, + 0xe015b: 84, + 0xe015c: 84, + 0xe015d: 84, + 0xe015e: 84, + 0xe015f: 84, + 0xe0160: 84, + 0xe0161: 84, + 0xe0162: 84, + 0xe0163: 84, + 0xe0164: 84, + 0xe0165: 84, + 0xe0166: 84, + 0xe0167: 84, + 0xe0168: 84, + 0xe0169: 84, + 0xe016a: 84, + 0xe016b: 84, + 0xe016c: 84, + 0xe016d: 84, + 0xe016e: 84, + 0xe016f: 84, + 0xe0170: 84, + 0xe0171: 84, + 0xe0172: 84, + 0xe0173: 84, + 0xe0174: 84, + 0xe0175: 84, + 0xe0176: 84, + 0xe0177: 84, + 0xe0178: 84, + 0xe0179: 84, + 0xe017a: 84, + 0xe017b: 84, + 0xe017c: 84, + 0xe017d: 84, + 0xe017e: 84, + 0xe017f: 84, + 0xe0180: 84, + 0xe0181: 84, + 0xe0182: 84, + 0xe0183: 84, + 0xe0184: 84, + 0xe0185: 84, + 0xe0186: 84, + 0xe0187: 84, + 0xe0188: 84, + 0xe0189: 84, + 0xe018a: 84, + 0xe018b: 84, + 0xe018c: 84, + 0xe018d: 84, + 0xe018e: 84, + 0xe018f: 84, + 0xe0190: 84, + 0xe0191: 84, + 0xe0192: 84, + 0xe0193: 84, + 0xe0194: 84, + 0xe0195: 84, + 0xe0196: 84, + 0xe0197: 84, + 0xe0198: 84, + 0xe0199: 84, + 0xe019a: 84, + 0xe019b: 84, + 0xe019c: 84, + 0xe019d: 84, + 0xe019e: 84, + 0xe019f: 84, + 0xe01a0: 84, + 0xe01a1: 84, + 0xe01a2: 84, + 0xe01a3: 84, + 0xe01a4: 84, + 0xe01a5: 84, + 0xe01a6: 84, + 0xe01a7: 84, + 0xe01a8: 84, + 0xe01a9: 84, + 0xe01aa: 84, + 0xe01ab: 84, + 0xe01ac: 84, + 0xe01ad: 84, + 0xe01ae: 84, + 0xe01af: 84, + 0xe01b0: 84, + 0xe01b1: 84, + 0xe01b2: 84, + 0xe01b3: 84, + 0xe01b4: 84, + 0xe01b5: 84, + 0xe01b6: 84, + 0xe01b7: 84, + 0xe01b8: 84, + 0xe01b9: 84, + 0xe01ba: 84, + 0xe01bb: 84, + 0xe01bc: 84, + 0xe01bd: 84, + 0xe01be: 84, + 0xe01bf: 84, + 0xe01c0: 84, + 0xe01c1: 84, + 0xe01c2: 84, + 0xe01c3: 84, + 0xe01c4: 84, + 0xe01c5: 84, + 0xe01c6: 84, + 0xe01c7: 84, + 0xe01c8: 84, + 0xe01c9: 84, + 0xe01ca: 84, + 0xe01cb: 84, + 0xe01cc: 84, + 0xe01cd: 84, + 0xe01ce: 84, + 0xe01cf: 84, + 0xe01d0: 84, + 0xe01d1: 84, + 0xe01d2: 84, + 0xe01d3: 84, + 0xe01d4: 84, + 0xe01d5: 84, + 0xe01d6: 84, + 0xe01d7: 84, + 0xe01d8: 84, + 0xe01d9: 84, + 0xe01da: 84, + 0xe01db: 84, + 0xe01dc: 84, + 0xe01dd: 84, + 0xe01de: 84, + 0xe01df: 84, + 0xe01e0: 84, + 0xe01e1: 84, + 0xe01e2: 84, + 0xe01e3: 84, + 0xe01e4: 84, + 0xe01e5: 84, + 0xe01e6: 84, + 0xe01e7: 84, + 0xe01e8: 84, + 0xe01e9: 84, + 0xe01ea: 84, + 0xe01eb: 84, + 0xe01ec: 84, + 0xe01ed: 84, + 0xe01ee: 84, + 0xe01ef: 84, +} +codepoint_classes = { + 'PVALID': ( + 0x2d0000002e, + 0x300000003a, + 0x610000007b, + 0xdf000000f7, + 0xf800000100, + 0x10100000102, + 0x10300000104, + 0x10500000106, + 0x10700000108, + 0x1090000010a, + 0x10b0000010c, + 0x10d0000010e, + 0x10f00000110, + 0x11100000112, + 0x11300000114, + 0x11500000116, + 0x11700000118, + 0x1190000011a, + 0x11b0000011c, + 0x11d0000011e, + 0x11f00000120, + 0x12100000122, + 0x12300000124, + 0x12500000126, + 0x12700000128, + 0x1290000012a, + 0x12b0000012c, + 0x12d0000012e, + 0x12f00000130, + 0x13100000132, + 0x13500000136, + 0x13700000139, + 0x13a0000013b, + 0x13c0000013d, + 0x13e0000013f, + 0x14200000143, + 0x14400000145, + 0x14600000147, + 0x14800000149, + 0x14b0000014c, + 0x14d0000014e, + 0x14f00000150, + 0x15100000152, + 0x15300000154, + 0x15500000156, + 0x15700000158, + 0x1590000015a, + 0x15b0000015c, + 0x15d0000015e, + 0x15f00000160, + 0x16100000162, + 0x16300000164, + 0x16500000166, + 0x16700000168, + 0x1690000016a, + 0x16b0000016c, + 0x16d0000016e, + 0x16f00000170, + 0x17100000172, + 0x17300000174, + 0x17500000176, + 0x17700000178, + 0x17a0000017b, + 0x17c0000017d, + 0x17e0000017f, + 0x18000000181, + 0x18300000184, + 0x18500000186, + 0x18800000189, + 0x18c0000018e, + 0x19200000193, + 0x19500000196, + 0x1990000019c, + 0x19e0000019f, + 0x1a1000001a2, + 0x1a3000001a4, + 0x1a5000001a6, + 0x1a8000001a9, + 0x1aa000001ac, + 0x1ad000001ae, + 0x1b0000001b1, + 0x1b4000001b5, + 0x1b6000001b7, + 0x1b9000001bc, + 0x1bd000001c4, + 0x1ce000001cf, + 0x1d0000001d1, + 0x1d2000001d3, + 0x1d4000001d5, + 0x1d6000001d7, + 0x1d8000001d9, + 0x1da000001db, + 0x1dc000001de, + 0x1df000001e0, + 0x1e1000001e2, + 0x1e3000001e4, + 0x1e5000001e6, + 0x1e7000001e8, + 0x1e9000001ea, + 0x1eb000001ec, + 0x1ed000001ee, + 0x1ef000001f1, + 0x1f5000001f6, + 0x1f9000001fa, + 0x1fb000001fc, + 0x1fd000001fe, + 0x1ff00000200, + 0x20100000202, + 0x20300000204, + 0x20500000206, + 0x20700000208, + 0x2090000020a, + 0x20b0000020c, + 0x20d0000020e, + 0x20f00000210, + 0x21100000212, + 0x21300000214, + 0x21500000216, + 0x21700000218, + 0x2190000021a, + 0x21b0000021c, + 0x21d0000021e, + 0x21f00000220, + 0x22100000222, + 0x22300000224, + 0x22500000226, + 0x22700000228, + 0x2290000022a, + 0x22b0000022c, + 0x22d0000022e, + 0x22f00000230, + 0x23100000232, + 0x2330000023a, + 0x23c0000023d, + 0x23f00000241, + 0x24200000243, + 0x24700000248, + 0x2490000024a, + 0x24b0000024c, + 0x24d0000024e, + 0x24f000002b0, + 0x2b9000002c2, + 0x2c6000002d2, + 0x2ec000002ed, + 0x2ee000002ef, + 0x30000000340, + 0x34200000343, + 0x3460000034f, + 0x35000000370, + 0x37100000372, + 0x37300000374, + 0x37700000378, + 0x37b0000037e, + 0x39000000391, + 0x3ac000003cf, + 0x3d7000003d8, + 0x3d9000003da, + 0x3db000003dc, + 0x3dd000003de, + 0x3df000003e0, + 0x3e1000003e2, + 0x3e3000003e4, + 0x3e5000003e6, + 0x3e7000003e8, + 0x3e9000003ea, + 0x3eb000003ec, + 0x3ed000003ee, + 0x3ef000003f0, + 0x3f3000003f4, + 0x3f8000003f9, + 0x3fb000003fd, + 0x43000000460, + 0x46100000462, + 0x46300000464, + 0x46500000466, + 0x46700000468, + 0x4690000046a, + 0x46b0000046c, + 0x46d0000046e, + 0x46f00000470, + 0x47100000472, + 0x47300000474, + 0x47500000476, + 0x47700000478, + 0x4790000047a, + 0x47b0000047c, + 0x47d0000047e, + 0x47f00000480, + 0x48100000482, + 0x48300000488, + 0x48b0000048c, + 0x48d0000048e, + 0x48f00000490, + 0x49100000492, + 0x49300000494, + 0x49500000496, + 0x49700000498, + 0x4990000049a, + 0x49b0000049c, + 0x49d0000049e, + 0x49f000004a0, + 0x4a1000004a2, + 0x4a3000004a4, + 0x4a5000004a6, + 0x4a7000004a8, + 0x4a9000004aa, + 0x4ab000004ac, + 0x4ad000004ae, + 0x4af000004b0, + 0x4b1000004b2, + 0x4b3000004b4, + 0x4b5000004b6, + 0x4b7000004b8, + 0x4b9000004ba, + 0x4bb000004bc, + 0x4bd000004be, + 0x4bf000004c0, + 0x4c2000004c3, + 0x4c4000004c5, + 0x4c6000004c7, + 0x4c8000004c9, + 0x4ca000004cb, + 0x4cc000004cd, + 0x4ce000004d0, + 0x4d1000004d2, + 0x4d3000004d4, + 0x4d5000004d6, + 0x4d7000004d8, + 0x4d9000004da, + 0x4db000004dc, + 0x4dd000004de, + 0x4df000004e0, + 0x4e1000004e2, + 0x4e3000004e4, + 0x4e5000004e6, + 0x4e7000004e8, + 0x4e9000004ea, + 0x4eb000004ec, + 0x4ed000004ee, + 0x4ef000004f0, + 0x4f1000004f2, + 0x4f3000004f4, + 0x4f5000004f6, + 0x4f7000004f8, + 0x4f9000004fa, + 0x4fb000004fc, + 0x4fd000004fe, + 0x4ff00000500, + 0x50100000502, + 0x50300000504, + 0x50500000506, + 0x50700000508, + 0x5090000050a, + 0x50b0000050c, + 0x50d0000050e, + 0x50f00000510, + 0x51100000512, + 0x51300000514, + 0x51500000516, + 0x51700000518, + 0x5190000051a, + 0x51b0000051c, + 0x51d0000051e, + 0x51f00000520, + 0x52100000522, + 0x52300000524, + 0x52500000526, + 0x52700000528, + 0x5290000052a, + 0x52b0000052c, + 0x52d0000052e, + 0x52f00000530, + 0x5590000055a, + 0x56000000587, + 0x58800000589, + 0x591000005be, + 0x5bf000005c0, + 0x5c1000005c3, + 0x5c4000005c6, + 0x5c7000005c8, + 0x5d0000005eb, + 0x5ef000005f3, + 0x6100000061b, + 0x62000000640, + 0x64100000660, + 0x66e00000675, + 0x679000006d4, + 0x6d5000006dd, + 0x6df000006e9, + 0x6ea000006f0, + 0x6fa00000700, + 0x7100000074b, + 0x74d000007b2, + 0x7c0000007f6, + 0x7fd000007fe, + 0x8000000082e, + 0x8400000085c, + 0x8600000086b, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, + 0x8e300000958, + 0x96000000964, + 0x96600000970, + 0x97100000984, + 0x9850000098d, + 0x98f00000991, + 0x993000009a9, + 0x9aa000009b1, + 0x9b2000009b3, + 0x9b6000009ba, + 0x9bc000009c5, + 0x9c7000009c9, + 0x9cb000009cf, + 0x9d7000009d8, + 0x9e0000009e4, + 0x9e6000009f2, + 0x9fc000009fd, + 0x9fe000009ff, + 0xa0100000a04, + 0xa0500000a0b, + 0xa0f00000a11, + 0xa1300000a29, + 0xa2a00000a31, + 0xa3200000a33, + 0xa3500000a36, + 0xa3800000a3a, + 0xa3c00000a3d, + 0xa3e00000a43, + 0xa4700000a49, + 0xa4b00000a4e, + 0xa5100000a52, + 0xa5c00000a5d, + 0xa6600000a76, + 0xa8100000a84, + 0xa8500000a8e, + 0xa8f00000a92, + 0xa9300000aa9, + 0xaaa00000ab1, + 0xab200000ab4, + 0xab500000aba, + 0xabc00000ac6, + 0xac700000aca, + 0xacb00000ace, + 0xad000000ad1, + 0xae000000ae4, + 0xae600000af0, + 0xaf900000b00, + 0xb0100000b04, + 0xb0500000b0d, + 0xb0f00000b11, + 0xb1300000b29, + 0xb2a00000b31, + 0xb3200000b34, + 0xb3500000b3a, + 0xb3c00000b45, + 0xb4700000b49, + 0xb4b00000b4e, + 0xb5500000b58, + 0xb5f00000b64, + 0xb6600000b70, + 0xb7100000b72, + 0xb8200000b84, + 0xb8500000b8b, + 0xb8e00000b91, + 0xb9200000b96, + 0xb9900000b9b, + 0xb9c00000b9d, + 0xb9e00000ba0, + 0xba300000ba5, + 0xba800000bab, + 0xbae00000bba, + 0xbbe00000bc3, + 0xbc600000bc9, + 0xbca00000bce, + 0xbd000000bd1, + 0xbd700000bd8, + 0xbe600000bf0, + 0xc0000000c0d, + 0xc0e00000c11, + 0xc1200000c29, + 0xc2a00000c3a, + 0xc3c00000c45, + 0xc4600000c49, + 0xc4a00000c4e, + 0xc5500000c57, + 0xc5800000c5b, + 0xc5d00000c5e, + 0xc6000000c64, + 0xc6600000c70, + 0xc8000000c84, + 0xc8500000c8d, + 0xc8e00000c91, + 0xc9200000ca9, + 0xcaa00000cb4, + 0xcb500000cba, + 0xcbc00000cc5, + 0xcc600000cc9, + 0xcca00000cce, + 0xcd500000cd7, + 0xcdd00000cdf, + 0xce000000ce4, + 0xce600000cf0, + 0xcf100000cf4, + 0xd0000000d0d, + 0xd0e00000d11, + 0xd1200000d45, + 0xd4600000d49, + 0xd4a00000d4f, + 0xd5400000d58, + 0xd5f00000d64, + 0xd6600000d70, + 0xd7a00000d80, + 0xd8100000d84, + 0xd8500000d97, + 0xd9a00000db2, + 0xdb300000dbc, + 0xdbd00000dbe, + 0xdc000000dc7, + 0xdca00000dcb, + 0xdcf00000dd5, + 0xdd600000dd7, + 0xdd800000de0, + 0xde600000df0, + 0xdf200000df4, + 0xe0100000e33, + 0xe3400000e3b, + 0xe4000000e4f, + 0xe5000000e5a, + 0xe8100000e83, + 0xe8400000e85, + 0xe8600000e8b, + 0xe8c00000ea4, + 0xea500000ea6, + 0xea700000eb3, + 0xeb400000ebe, + 0xec000000ec5, + 0xec600000ec7, + 0xec800000ecf, + 0xed000000eda, + 0xede00000ee0, + 0xf0000000f01, + 0xf0b00000f0c, + 0xf1800000f1a, + 0xf2000000f2a, + 0xf3500000f36, + 0xf3700000f38, + 0xf3900000f3a, + 0xf3e00000f43, + 0xf4400000f48, + 0xf4900000f4d, + 0xf4e00000f52, + 0xf5300000f57, + 0xf5800000f5c, + 0xf5d00000f69, + 0xf6a00000f6d, + 0xf7100000f73, + 0xf7400000f75, + 0xf7a00000f81, + 0xf8200000f85, + 0xf8600000f93, + 0xf9400000f98, + 0xf9900000f9d, + 0xf9e00000fa2, + 0xfa300000fa7, + 0xfa800000fac, + 0xfad00000fb9, + 0xfba00000fbd, + 0xfc600000fc7, + 0x10000000104a, + 0x10500000109e, + 0x10d0000010fb, + 0x10fd00001100, + 0x120000001249, + 0x124a0000124e, + 0x125000001257, + 0x125800001259, + 0x125a0000125e, + 0x126000001289, + 0x128a0000128e, + 0x1290000012b1, + 0x12b2000012b6, + 0x12b8000012bf, + 0x12c0000012c1, + 0x12c2000012c6, + 0x12c8000012d7, + 0x12d800001311, + 0x131200001316, + 0x13180000135b, + 0x135d00001360, + 0x138000001390, + 0x13a0000013f6, + 0x14010000166d, + 0x166f00001680, + 0x16810000169b, + 0x16a0000016eb, + 0x16f1000016f9, + 0x170000001716, + 0x171f00001735, + 0x174000001754, + 0x17600000176d, + 0x176e00001771, + 0x177200001774, + 0x1780000017b4, + 0x17b6000017d4, + 0x17d7000017d8, + 0x17dc000017de, + 0x17e0000017ea, + 0x18100000181a, + 0x182000001879, + 0x1880000018ab, + 0x18b0000018f6, + 0x19000000191f, + 0x19200000192c, + 0x19300000193c, + 0x19460000196e, + 0x197000001975, + 0x1980000019ac, + 0x19b0000019ca, + 0x19d0000019da, + 0x1a0000001a1c, + 0x1a2000001a5f, + 0x1a6000001a7d, + 0x1a7f00001a8a, + 0x1a9000001a9a, + 0x1aa700001aa8, + 0x1ab000001abe, + 0x1abf00001acf, + 0x1b0000001b4d, + 0x1b5000001b5a, + 0x1b6b00001b74, + 0x1b8000001bf4, + 0x1c0000001c38, + 0x1c4000001c4a, + 0x1c4d00001c7e, + 0x1cd000001cd3, + 0x1cd400001cfb, + 0x1d0000001d2c, + 0x1d2f00001d30, + 0x1d3b00001d3c, + 0x1d4e00001d4f, + 0x1d6b00001d78, + 0x1d7900001d9b, + 0x1dc000001e00, + 0x1e0100001e02, + 0x1e0300001e04, + 0x1e0500001e06, + 0x1e0700001e08, + 0x1e0900001e0a, + 0x1e0b00001e0c, + 0x1e0d00001e0e, + 0x1e0f00001e10, + 0x1e1100001e12, + 0x1e1300001e14, + 0x1e1500001e16, + 0x1e1700001e18, + 0x1e1900001e1a, + 0x1e1b00001e1c, + 0x1e1d00001e1e, + 0x1e1f00001e20, + 0x1e2100001e22, + 0x1e2300001e24, + 0x1e2500001e26, + 0x1e2700001e28, + 0x1e2900001e2a, + 0x1e2b00001e2c, + 0x1e2d00001e2e, + 0x1e2f00001e30, + 0x1e3100001e32, + 0x1e3300001e34, + 0x1e3500001e36, + 0x1e3700001e38, + 0x1e3900001e3a, + 0x1e3b00001e3c, + 0x1e3d00001e3e, + 0x1e3f00001e40, + 0x1e4100001e42, + 0x1e4300001e44, + 0x1e4500001e46, + 0x1e4700001e48, + 0x1e4900001e4a, + 0x1e4b00001e4c, + 0x1e4d00001e4e, + 0x1e4f00001e50, + 0x1e5100001e52, + 0x1e5300001e54, + 0x1e5500001e56, + 0x1e5700001e58, + 0x1e5900001e5a, + 0x1e5b00001e5c, + 0x1e5d00001e5e, + 0x1e5f00001e60, + 0x1e6100001e62, + 0x1e6300001e64, + 0x1e6500001e66, + 0x1e6700001e68, + 0x1e6900001e6a, + 0x1e6b00001e6c, + 0x1e6d00001e6e, + 0x1e6f00001e70, + 0x1e7100001e72, + 0x1e7300001e74, + 0x1e7500001e76, + 0x1e7700001e78, + 0x1e7900001e7a, + 0x1e7b00001e7c, + 0x1e7d00001e7e, + 0x1e7f00001e80, + 0x1e8100001e82, + 0x1e8300001e84, + 0x1e8500001e86, + 0x1e8700001e88, + 0x1e8900001e8a, + 0x1e8b00001e8c, + 0x1e8d00001e8e, + 0x1e8f00001e90, + 0x1e9100001e92, + 0x1e9300001e94, + 0x1e9500001e9a, + 0x1e9c00001e9e, + 0x1e9f00001ea0, + 0x1ea100001ea2, + 0x1ea300001ea4, + 0x1ea500001ea6, + 0x1ea700001ea8, + 0x1ea900001eaa, + 0x1eab00001eac, + 0x1ead00001eae, + 0x1eaf00001eb0, + 0x1eb100001eb2, + 0x1eb300001eb4, + 0x1eb500001eb6, + 0x1eb700001eb8, + 0x1eb900001eba, + 0x1ebb00001ebc, + 0x1ebd00001ebe, + 0x1ebf00001ec0, + 0x1ec100001ec2, + 0x1ec300001ec4, + 0x1ec500001ec6, + 0x1ec700001ec8, + 0x1ec900001eca, + 0x1ecb00001ecc, + 0x1ecd00001ece, + 0x1ecf00001ed0, + 0x1ed100001ed2, + 0x1ed300001ed4, + 0x1ed500001ed6, + 0x1ed700001ed8, + 0x1ed900001eda, + 0x1edb00001edc, + 0x1edd00001ede, + 0x1edf00001ee0, + 0x1ee100001ee2, + 0x1ee300001ee4, + 0x1ee500001ee6, + 0x1ee700001ee8, + 0x1ee900001eea, + 0x1eeb00001eec, + 0x1eed00001eee, + 0x1eef00001ef0, + 0x1ef100001ef2, + 0x1ef300001ef4, + 0x1ef500001ef6, + 0x1ef700001ef8, + 0x1ef900001efa, + 0x1efb00001efc, + 0x1efd00001efe, + 0x1eff00001f08, + 0x1f1000001f16, + 0x1f2000001f28, + 0x1f3000001f38, + 0x1f4000001f46, + 0x1f5000001f58, + 0x1f6000001f68, + 0x1f7000001f71, + 0x1f7200001f73, + 0x1f7400001f75, + 0x1f7600001f77, + 0x1f7800001f79, + 0x1f7a00001f7b, + 0x1f7c00001f7d, + 0x1fb000001fb2, + 0x1fb600001fb7, + 0x1fc600001fc7, + 0x1fd000001fd3, + 0x1fd600001fd8, + 0x1fe000001fe3, + 0x1fe400001fe8, + 0x1ff600001ff7, + 0x214e0000214f, + 0x218400002185, + 0x2c3000002c60, + 0x2c6100002c62, + 0x2c6500002c67, + 0x2c6800002c69, + 0x2c6a00002c6b, + 0x2c6c00002c6d, + 0x2c7100002c72, + 0x2c7300002c75, + 0x2c7600002c7c, + 0x2c8100002c82, + 0x2c8300002c84, + 0x2c8500002c86, + 0x2c8700002c88, + 0x2c8900002c8a, + 0x2c8b00002c8c, + 0x2c8d00002c8e, + 0x2c8f00002c90, + 0x2c9100002c92, + 0x2c9300002c94, + 0x2c9500002c96, + 0x2c9700002c98, + 0x2c9900002c9a, + 0x2c9b00002c9c, + 0x2c9d00002c9e, + 0x2c9f00002ca0, + 0x2ca100002ca2, + 0x2ca300002ca4, + 0x2ca500002ca6, + 0x2ca700002ca8, + 0x2ca900002caa, + 0x2cab00002cac, + 0x2cad00002cae, + 0x2caf00002cb0, + 0x2cb100002cb2, + 0x2cb300002cb4, + 0x2cb500002cb6, + 0x2cb700002cb8, + 0x2cb900002cba, + 0x2cbb00002cbc, + 0x2cbd00002cbe, + 0x2cbf00002cc0, + 0x2cc100002cc2, + 0x2cc300002cc4, + 0x2cc500002cc6, + 0x2cc700002cc8, + 0x2cc900002cca, + 0x2ccb00002ccc, + 0x2ccd00002cce, + 0x2ccf00002cd0, + 0x2cd100002cd2, + 0x2cd300002cd4, + 0x2cd500002cd6, + 0x2cd700002cd8, + 0x2cd900002cda, + 0x2cdb00002cdc, + 0x2cdd00002cde, + 0x2cdf00002ce0, + 0x2ce100002ce2, + 0x2ce300002ce5, + 0x2cec00002ced, + 0x2cee00002cf2, + 0x2cf300002cf4, + 0x2d0000002d26, + 0x2d2700002d28, + 0x2d2d00002d2e, + 0x2d3000002d68, + 0x2d7f00002d97, + 0x2da000002da7, + 0x2da800002daf, + 0x2db000002db7, + 0x2db800002dbf, + 0x2dc000002dc7, + 0x2dc800002dcf, + 0x2dd000002dd7, + 0x2dd800002ddf, + 0x2de000002e00, + 0x2e2f00002e30, + 0x300500003008, + 0x302a0000302e, + 0x303c0000303d, + 0x304100003097, + 0x30990000309b, + 0x309d0000309f, + 0x30a1000030fb, + 0x30fc000030ff, + 0x310500003130, + 0x31a0000031c0, + 0x31f000003200, + 0x340000004dc0, + 0x4e000000a48d, + 0xa4d00000a4fe, + 0xa5000000a60d, + 0xa6100000a62c, + 0xa6410000a642, + 0xa6430000a644, + 0xa6450000a646, + 0xa6470000a648, + 0xa6490000a64a, + 0xa64b0000a64c, + 0xa64d0000a64e, + 0xa64f0000a650, + 0xa6510000a652, + 0xa6530000a654, + 0xa6550000a656, + 0xa6570000a658, + 0xa6590000a65a, + 0xa65b0000a65c, + 0xa65d0000a65e, + 0xa65f0000a660, + 0xa6610000a662, + 0xa6630000a664, + 0xa6650000a666, + 0xa6670000a668, + 0xa6690000a66a, + 0xa66b0000a66c, + 0xa66d0000a670, + 0xa6740000a67e, + 0xa67f0000a680, + 0xa6810000a682, + 0xa6830000a684, + 0xa6850000a686, + 0xa6870000a688, + 0xa6890000a68a, + 0xa68b0000a68c, + 0xa68d0000a68e, + 0xa68f0000a690, + 0xa6910000a692, + 0xa6930000a694, + 0xa6950000a696, + 0xa6970000a698, + 0xa6990000a69a, + 0xa69b0000a69c, + 0xa69e0000a6e6, + 0xa6f00000a6f2, + 0xa7170000a720, + 0xa7230000a724, + 0xa7250000a726, + 0xa7270000a728, + 0xa7290000a72a, + 0xa72b0000a72c, + 0xa72d0000a72e, + 0xa72f0000a732, + 0xa7330000a734, + 0xa7350000a736, + 0xa7370000a738, + 0xa7390000a73a, + 0xa73b0000a73c, + 0xa73d0000a73e, + 0xa73f0000a740, + 0xa7410000a742, + 0xa7430000a744, + 0xa7450000a746, + 0xa7470000a748, + 0xa7490000a74a, + 0xa74b0000a74c, + 0xa74d0000a74e, + 0xa74f0000a750, + 0xa7510000a752, + 0xa7530000a754, + 0xa7550000a756, + 0xa7570000a758, + 0xa7590000a75a, + 0xa75b0000a75c, + 0xa75d0000a75e, + 0xa75f0000a760, + 0xa7610000a762, + 0xa7630000a764, + 0xa7650000a766, + 0xa7670000a768, + 0xa7690000a76a, + 0xa76b0000a76c, + 0xa76d0000a76e, + 0xa76f0000a770, + 0xa7710000a779, + 0xa77a0000a77b, + 0xa77c0000a77d, + 0xa77f0000a780, + 0xa7810000a782, + 0xa7830000a784, + 0xa7850000a786, + 0xa7870000a789, + 0xa78c0000a78d, + 0xa78e0000a790, + 0xa7910000a792, + 0xa7930000a796, + 0xa7970000a798, + 0xa7990000a79a, + 0xa79b0000a79c, + 0xa79d0000a79e, + 0xa79f0000a7a0, + 0xa7a10000a7a2, + 0xa7a30000a7a4, + 0xa7a50000a7a6, + 0xa7a70000a7a8, + 0xa7a90000a7aa, + 0xa7af0000a7b0, + 0xa7b50000a7b6, + 0xa7b70000a7b8, + 0xa7b90000a7ba, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c10000a7c2, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, + 0xa7f60000a7f8, + 0xa7fa0000a828, + 0xa82c0000a82d, + 0xa8400000a874, + 0xa8800000a8c6, + 0xa8d00000a8da, + 0xa8e00000a8f8, + 0xa8fb0000a8fc, + 0xa8fd0000a92e, + 0xa9300000a954, + 0xa9800000a9c1, + 0xa9cf0000a9da, + 0xa9e00000a9ff, + 0xaa000000aa37, + 0xaa400000aa4e, + 0xaa500000aa5a, + 0xaa600000aa77, + 0xaa7a0000aac3, + 0xaadb0000aade, + 0xaae00000aaf0, + 0xaaf20000aaf7, + 0xab010000ab07, + 0xab090000ab0f, + 0xab110000ab17, + 0xab200000ab27, + 0xab280000ab2f, + 0xab300000ab5b, + 0xab600000ab69, + 0xabc00000abeb, + 0xabec0000abee, + 0xabf00000abfa, + 0xac000000d7a4, + 0xfa0e0000fa10, + 0xfa110000fa12, + 0xfa130000fa15, + 0xfa1f0000fa20, + 0xfa210000fa22, + 0xfa230000fa25, + 0xfa270000fa2a, + 0xfb1e0000fb1f, + 0xfe200000fe30, + 0xfe730000fe74, + 0x100000001000c, + 0x1000d00010027, + 0x100280001003b, + 0x1003c0001003e, + 0x1003f0001004e, + 0x100500001005e, + 0x10080000100fb, + 0x101fd000101fe, + 0x102800001029d, + 0x102a0000102d1, + 0x102e0000102e1, + 0x1030000010320, + 0x1032d00010341, + 0x103420001034a, + 0x103500001037b, + 0x103800001039e, + 0x103a0000103c4, + 0x103c8000103d0, + 0x104280001049e, + 0x104a0000104aa, + 0x104d8000104fc, + 0x1050000010528, + 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, + 0x1060000010737, + 0x1074000010756, + 0x1076000010768, + 0x1078000010781, + 0x1080000010806, + 0x1080800010809, + 0x1080a00010836, + 0x1083700010839, + 0x1083c0001083d, + 0x1083f00010856, + 0x1086000010877, + 0x108800001089f, + 0x108e0000108f3, + 0x108f4000108f6, + 0x1090000010916, + 0x109200001093a, + 0x10980000109b8, + 0x109be000109c0, + 0x10a0000010a04, + 0x10a0500010a07, + 0x10a0c00010a14, + 0x10a1500010a18, + 0x10a1900010a36, + 0x10a3800010a3b, + 0x10a3f00010a40, + 0x10a6000010a7d, + 0x10a8000010a9d, + 0x10ac000010ac8, + 0x10ac900010ae7, + 0x10b0000010b36, + 0x10b4000010b56, + 0x10b6000010b73, + 0x10b8000010b92, + 0x10c0000010c49, + 0x10cc000010cf3, + 0x10d0000010d28, + 0x10d3000010d3a, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10efd00010f1d, + 0x10f2700010f28, + 0x10f3000010f51, + 0x10f7000010f86, + 0x10fb000010fc5, + 0x10fe000010ff7, + 0x1100000011047, + 0x1106600011076, + 0x1107f000110bb, + 0x110c2000110c3, + 0x110d0000110e9, + 0x110f0000110fa, + 0x1110000011135, + 0x1113600011140, + 0x1114400011148, + 0x1115000011174, + 0x1117600011177, + 0x11180000111c5, + 0x111c9000111cd, + 0x111ce000111db, + 0x111dc000111dd, + 0x1120000011212, + 0x1121300011238, + 0x1123e00011242, + 0x1128000011287, + 0x1128800011289, + 0x1128a0001128e, + 0x1128f0001129e, + 0x1129f000112a9, + 0x112b0000112eb, + 0x112f0000112fa, + 0x1130000011304, + 0x113050001130d, + 0x1130f00011311, + 0x1131300011329, + 0x1132a00011331, + 0x1133200011334, + 0x113350001133a, + 0x1133b00011345, + 0x1134700011349, + 0x1134b0001134e, + 0x1135000011351, + 0x1135700011358, + 0x1135d00011364, + 0x113660001136d, + 0x1137000011375, + 0x114000001144b, + 0x114500001145a, + 0x1145e00011462, + 0x11480000114c6, + 0x114c7000114c8, + 0x114d0000114da, + 0x11580000115b6, + 0x115b8000115c1, + 0x115d8000115de, + 0x1160000011641, + 0x1164400011645, + 0x116500001165a, + 0x11680000116b9, + 0x116c0000116ca, + 0x117000001171b, + 0x1171d0001172c, + 0x117300001173a, + 0x1174000011747, + 0x118000001183b, + 0x118c0000118ea, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, + 0x11a0000011a3f, + 0x11a4700011a48, + 0x11a5000011a9a, + 0x11a9d00011a9e, + 0x11ab000011af9, + 0x11c0000011c09, + 0x11c0a00011c37, + 0x11c3800011c41, + 0x11c5000011c5a, + 0x11c7200011c90, + 0x11c9200011ca8, + 0x11ca900011cb7, + 0x11d0000011d07, + 0x11d0800011d0a, + 0x11d0b00011d37, + 0x11d3a00011d3b, + 0x11d3c00011d3e, + 0x11d3f00011d48, + 0x11d5000011d5a, + 0x11d6000011d66, + 0x11d6700011d69, + 0x11d6a00011d8f, + 0x11d9000011d92, + 0x11d9300011d99, + 0x11da000011daa, + 0x11ee000011ef7, + 0x11f0000011f11, + 0x11f1200011f3b, + 0x11f3e00011f43, + 0x11f5000011f5a, + 0x11fb000011fb1, + 0x120000001239a, + 0x1248000012544, + 0x12f9000012ff1, + 0x1300000013430, + 0x1344000013456, + 0x1440000014647, + 0x1680000016a39, + 0x16a4000016a5f, + 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, + 0x16ad000016aee, + 0x16af000016af5, + 0x16b0000016b37, + 0x16b4000016b44, + 0x16b5000016b5a, + 0x16b6300016b78, + 0x16b7d00016b90, + 0x16e6000016e80, + 0x16f0000016f4b, + 0x16f4f00016f88, + 0x16f8f00016fa0, + 0x16fe000016fe2, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1320001b133, + 0x1b1500001b153, + 0x1b1550001b156, + 0x1b1640001b168, + 0x1b1700001b2fc, + 0x1bc000001bc6b, + 0x1bc700001bc7d, + 0x1bc800001bc89, + 0x1bc900001bc9a, + 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, + 0x1da000001da37, + 0x1da3b0001da6d, + 0x1da750001da76, + 0x1da840001da85, + 0x1da9b0001daa0, + 0x1daa10001dab0, + 0x1df000001df1f, + 0x1df250001df2b, + 0x1e0000001e007, + 0x1e0080001e019, + 0x1e01b0001e022, + 0x1e0230001e025, + 0x1e0260001e02b, + 0x1e08f0001e090, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2900001e2af, + 0x1e2c00001e2fa, + 0x1e4d00001e4fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, + 0x1e8000001e8c5, + 0x1e8d00001e8d7, + 0x1e9220001e94c, + 0x1e9500001e95a, + 0x200000002a6e0, + 0x2a7000002b73a, + 0x2b7400002b81e, + 0x2b8200002cea2, + 0x2ceb00002ebe1, + 0x2ebf00002ee5e, + 0x300000003134b, + 0x31350000323b0, + ), + 'CONTEXTJ': ( + 0x200c0000200e, + ), + 'CONTEXTO': ( + 0xb7000000b8, + 0x37500000376, + 0x5f3000005f5, + 0x6600000066a, + 0x6f0000006fa, + 0x30fb000030fc, + ), +} diff --git a/templates/skills/spotify/dependencies/idna/intranges.py b/templates/skills/spotify/dependencies/idna/intranges.py new file mode 100644 index 00000000..6a43b047 --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/intranges.py @@ -0,0 +1,54 @@ +""" +Given a list of integers, made up of (hopefully) a small number of long runs +of consecutive integers, compute a representation of the form +((start1, end1), (start2, end2) ...). Then answer the question "was x present +in the original list?" in time O(log(# runs)). +""" + +import bisect +from typing import List, Tuple + +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: + """Represent a list of integers as a sequence of ranges: + ((start_0, end_0), (start_1, end_1), ...), such that the original + integers are exactly those x such that start_i <= x < end_i for some i. + + Ranges are encoded as single integers (start << 32 | end), not as tuples. + """ + + sorted_list = sorted(list_) + ranges = [] + last_write = -1 + for i in range(len(sorted_list)): + if i+1 < len(sorted_list): + if sorted_list[i] == sorted_list[i+1]-1: + continue + current_range = sorted_list[last_write+1:i+1] + ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) + last_write = i + + return tuple(ranges) + +def _encode_range(start: int, end: int) -> int: + return (start << 32) | end + +def _decode_range(r: int) -> Tuple[int, int]: + return (r >> 32), (r & ((1 << 32) - 1)) + + +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: + """Determine if `int_` falls into one of the ranges in `ranges`.""" + tuple_ = _encode_range(int_, 0) + pos = bisect.bisect_left(ranges, tuple_) + # we could be immediately ahead of a tuple (start, end) + # with start < int_ <= end + if pos > 0: + left, right = _decode_range(ranges[pos-1]) + if left <= int_ < right: + return True + # or we could be immediately behind a tuple (int_, end) + if pos < len(ranges): + left, _ = _decode_range(ranges[pos]) + if left == int_: + return True + return False diff --git a/templates/skills/spotify/dependencies/idna/package_data.py b/templates/skills/spotify/dependencies/idna/package_data.py new file mode 100644 index 00000000..ed811133 --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/package_data.py @@ -0,0 +1,2 @@ +__version__ = '3.7' + diff --git a/templates/skills/spotify/dependencies/idna/py.typed b/templates/skills/spotify/dependencies/idna/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/templates/skills/spotify/dependencies/idna/uts46data.py b/templates/skills/spotify/dependencies/idna/uts46data.py new file mode 100644 index 00000000..6a1eddbf --- /dev/null +++ b/templates/skills/spotify/dependencies/idna/uts46data.py @@ -0,0 +1,8598 @@ +# This file is automatically generated by tools/idna-data +# vim: set fileencoding=utf-8 : + +from typing import List, Tuple, Union + + +"""IDNA Mapping Table from UTS46.""" + + +__version__ = '15.1.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + ] + +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', ' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', ' ̈'), + (0xA9, 'V'), + (0xAA, 'M', 'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', ' ̄'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), + (0xBB, 'V'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), + (0xBF, 'V'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), + ] + +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), + (0xD7, 'V'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', 'ā'), + (0x101, 'V'), + (0x102, 'M', 'ă'), + (0x103, 'V'), + (0x104, 'M', 'ą'), + (0x105, 'V'), + (0x106, 'M', 'ć'), + (0x107, 'V'), + (0x108, 'M', 'ĉ'), + (0x109, 'V'), + (0x10A, 'M', 'ċ'), + (0x10B, 'V'), + (0x10C, 'M', 'č'), + (0x10D, 'V'), + (0x10E, 'M', 'ď'), + (0x10F, 'V'), + (0x110, 'M', 'đ'), + (0x111, 'V'), + (0x112, 'M', 'ē'), + (0x113, 'V'), + (0x114, 'M', 'ĕ'), + (0x115, 'V'), + (0x116, 'M', 'ė'), + (0x117, 'V'), + (0x118, 'M', 'ę'), + (0x119, 'V'), + (0x11A, 'M', 'ě'), + (0x11B, 'V'), + (0x11C, 'M', 'ĝ'), + (0x11D, 'V'), + (0x11E, 'M', 'ğ'), + (0x11F, 'V'), + (0x120, 'M', 'ġ'), + (0x121, 'V'), + (0x122, 'M', 'ģ'), + (0x123, 'V'), + (0x124, 'M', 'ĥ'), + (0x125, 'V'), + (0x126, 'M', 'ħ'), + (0x127, 'V'), + (0x128, 'M', 'ĩ'), + (0x129, 'V'), + (0x12A, 'M', 'ī'), + (0x12B, 'V'), + ] + +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x12C, 'M', 'ĭ'), + (0x12D, 'V'), + (0x12E, 'M', 'į'), + (0x12F, 'V'), + (0x130, 'M', 'i̇'), + (0x131, 'V'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), + (0x135, 'V'), + (0x136, 'M', 'ķ'), + (0x137, 'V'), + (0x139, 'M', 'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', 'ļ'), + (0x13C, 'V'), + (0x13D, 'M', 'ľ'), + (0x13E, 'V'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), + (0x142, 'V'), + (0x143, 'M', 'ń'), + (0x144, 'V'), + (0x145, 'M', 'ņ'), + (0x146, 'V'), + (0x147, 'M', 'ň'), + (0x148, 'V'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), + (0x14B, 'V'), + (0x14C, 'M', 'ō'), + (0x14D, 'V'), + (0x14E, 'M', 'ŏ'), + (0x14F, 'V'), + (0x150, 'M', 'ő'), + (0x151, 'V'), + (0x152, 'M', 'œ'), + (0x153, 'V'), + (0x154, 'M', 'ŕ'), + (0x155, 'V'), + (0x156, 'M', 'ŗ'), + (0x157, 'V'), + (0x158, 'M', 'ř'), + (0x159, 'V'), + (0x15A, 'M', 'ś'), + (0x15B, 'V'), + (0x15C, 'M', 'ŝ'), + (0x15D, 'V'), + (0x15E, 'M', 'ş'), + (0x15F, 'V'), + (0x160, 'M', 'š'), + (0x161, 'V'), + (0x162, 'M', 'ţ'), + (0x163, 'V'), + (0x164, 'M', 'ť'), + (0x165, 'V'), + (0x166, 'M', 'ŧ'), + (0x167, 'V'), + (0x168, 'M', 'ũ'), + (0x169, 'V'), + (0x16A, 'M', 'ū'), + (0x16B, 'V'), + (0x16C, 'M', 'ŭ'), + (0x16D, 'V'), + (0x16E, 'M', 'ů'), + (0x16F, 'V'), + (0x170, 'M', 'ű'), + (0x171, 'V'), + (0x172, 'M', 'ų'), + (0x173, 'V'), + (0x174, 'M', 'ŵ'), + (0x175, 'V'), + (0x176, 'M', 'ŷ'), + (0x177, 'V'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), + (0x17A, 'V'), + (0x17B, 'M', 'ż'), + (0x17C, 'V'), + (0x17D, 'M', 'ž'), + (0x17E, 'V'), + (0x17F, 'M', 's'), + (0x180, 'V'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), + (0x183, 'V'), + (0x184, 'M', 'ƅ'), + (0x185, 'V'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), + (0x188, 'V'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), + (0x18C, 'V'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), + (0x192, 'V'), + (0x193, 'M', 'ɠ'), + ] + +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x194, 'M', 'ɣ'), + (0x195, 'V'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), + (0x199, 'V'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), + (0x1A1, 'V'), + (0x1A2, 'M', 'ƣ'), + (0x1A3, 'V'), + (0x1A4, 'M', 'ƥ'), + (0x1A5, 'V'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', 'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', 'ƭ'), + (0x1AD, 'V'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), + (0x1B0, 'V'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), + (0x1B4, 'V'), + (0x1B5, 'M', 'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', 'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), + (0x1CE, 'V'), + (0x1CF, 'M', 'ǐ'), + (0x1D0, 'V'), + (0x1D1, 'M', 'ǒ'), + (0x1D2, 'V'), + (0x1D3, 'M', 'ǔ'), + (0x1D4, 'V'), + (0x1D5, 'M', 'ǖ'), + (0x1D6, 'V'), + (0x1D7, 'M', 'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', 'ǚ'), + (0x1DA, 'V'), + (0x1DB, 'M', 'ǜ'), + (0x1DC, 'V'), + (0x1DE, 'M', 'ǟ'), + (0x1DF, 'V'), + (0x1E0, 'M', 'ǡ'), + (0x1E1, 'V'), + (0x1E2, 'M', 'ǣ'), + (0x1E3, 'V'), + (0x1E4, 'M', 'ǥ'), + (0x1E5, 'V'), + (0x1E6, 'M', 'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', 'ǩ'), + (0x1E9, 'V'), + (0x1EA, 'M', 'ǫ'), + (0x1EB, 'V'), + (0x1EC, 'M', 'ǭ'), + (0x1ED, 'V'), + (0x1EE, 'M', 'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', 'ǻ'), + (0x1FB, 'V'), + (0x1FC, 'M', 'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', 'ǿ'), + (0x1FF, 'V'), + (0x200, 'M', 'ȁ'), + (0x201, 'V'), + (0x202, 'M', 'ȃ'), + (0x203, 'V'), + (0x204, 'M', 'ȅ'), + (0x205, 'V'), + (0x206, 'M', 'ȇ'), + (0x207, 'V'), + (0x208, 'M', 'ȉ'), + (0x209, 'V'), + (0x20A, 'M', 'ȋ'), + (0x20B, 'V'), + (0x20C, 'M', 'ȍ'), + ] + +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x20D, 'V'), + (0x20E, 'M', 'ȏ'), + (0x20F, 'V'), + (0x210, 'M', 'ȑ'), + (0x211, 'V'), + (0x212, 'M', 'ȓ'), + (0x213, 'V'), + (0x214, 'M', 'ȕ'), + (0x215, 'V'), + (0x216, 'M', 'ȗ'), + (0x217, 'V'), + (0x218, 'M', 'ș'), + (0x219, 'V'), + (0x21A, 'M', 'ț'), + (0x21B, 'V'), + (0x21C, 'M', 'ȝ'), + (0x21D, 'V'), + (0x21E, 'M', 'ȟ'), + (0x21F, 'V'), + (0x220, 'M', 'ƞ'), + (0x221, 'V'), + (0x222, 'M', 'ȣ'), + (0x223, 'V'), + (0x224, 'M', 'ȥ'), + (0x225, 'V'), + (0x226, 'M', 'ȧ'), + (0x227, 'V'), + (0x228, 'M', 'ȩ'), + (0x229, 'V'), + (0x22A, 'M', 'ȫ'), + (0x22B, 'V'), + (0x22C, 'M', 'ȭ'), + (0x22D, 'V'), + (0x22E, 'M', 'ȯ'), + (0x22F, 'V'), + (0x230, 'M', 'ȱ'), + (0x231, 'V'), + (0x232, 'M', 'ȳ'), + (0x233, 'V'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', 'ɂ'), + (0x242, 'V'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), + (0x247, 'V'), + (0x248, 'M', 'ɉ'), + (0x249, 'V'), + (0x24A, 'M', 'ɋ'), + (0x24B, 'V'), + (0x24C, 'M', 'ɍ'), + (0x24D, 'V'), + (0x24E, 'M', 'ɏ'), + (0x24F, 'V'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), + (0x2B9, 'V'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), + (0x2DE, 'V'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), + (0x2E5, 'V'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), + (0x342, 'V'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', 'ͱ'), + (0x371, 'V'), + (0x372, 'M', 'ͳ'), + (0x373, 'V'), + (0x374, 'M', 'ʹ'), + (0x375, 'V'), + (0x376, 'M', 'ͷ'), + (0x377, 'V'), + ] + +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x378, 'X'), + (0x37A, '3', ' ι'), + (0x37B, 'V'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), + (0x380, 'X'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), + (0x38B, 'X'), + (0x38C, 'M', 'ό'), + (0x38D, 'X'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), + (0x390, 'V'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), + (0x3A2, 'X'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), + (0x3AC, 'V'), + (0x3C2, 'D', 'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), + (0x3D7, 'V'), + (0x3D8, 'M', 'ϙ'), + (0x3D9, 'V'), + (0x3DA, 'M', 'ϛ'), + (0x3DB, 'V'), + (0x3DC, 'M', 'ϝ'), + (0x3DD, 'V'), + (0x3DE, 'M', 'ϟ'), + (0x3DF, 'V'), + (0x3E0, 'M', 'ϡ'), + (0x3E1, 'V'), + (0x3E2, 'M', 'ϣ'), + (0x3E3, 'V'), + (0x3E4, 'M', 'ϥ'), + (0x3E5, 'V'), + (0x3E6, 'M', 'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', 'ϩ'), + (0x3E9, 'V'), + (0x3EA, 'M', 'ϫ'), + (0x3EB, 'V'), + (0x3EC, 'M', 'ϭ'), + (0x3ED, 'V'), + (0x3EE, 'M', 'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', 'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), + (0x3FB, 'V'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), + ] + +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), + (0x430, 'V'), + (0x460, 'M', 'ѡ'), + (0x461, 'V'), + (0x462, 'M', 'ѣ'), + (0x463, 'V'), + (0x464, 'M', 'ѥ'), + (0x465, 'V'), + (0x466, 'M', 'ѧ'), + (0x467, 'V'), + (0x468, 'M', 'ѩ'), + (0x469, 'V'), + (0x46A, 'M', 'ѫ'), + (0x46B, 'V'), + (0x46C, 'M', 'ѭ'), + (0x46D, 'V'), + (0x46E, 'M', 'ѯ'), + (0x46F, 'V'), + (0x470, 'M', 'ѱ'), + (0x471, 'V'), + (0x472, 'M', 'ѳ'), + (0x473, 'V'), + (0x474, 'M', 'ѵ'), + (0x475, 'V'), + (0x476, 'M', 'ѷ'), + (0x477, 'V'), + (0x478, 'M', 'ѹ'), + (0x479, 'V'), + (0x47A, 'M', 'ѻ'), + (0x47B, 'V'), + (0x47C, 'M', 'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', 'ѿ'), + (0x47F, 'V'), + (0x480, 'M', 'ҁ'), + (0x481, 'V'), + (0x48A, 'M', 'ҋ'), + (0x48B, 'V'), + (0x48C, 'M', 'ҍ'), + (0x48D, 'V'), + (0x48E, 'M', 'ҏ'), + (0x48F, 'V'), + (0x490, 'M', 'ґ'), + (0x491, 'V'), + (0x492, 'M', 'ғ'), + (0x493, 'V'), + (0x494, 'M', 'ҕ'), + (0x495, 'V'), + (0x496, 'M', 'җ'), + (0x497, 'V'), + (0x498, 'M', 'ҙ'), + (0x499, 'V'), + (0x49A, 'M', 'қ'), + (0x49B, 'V'), + (0x49C, 'M', 'ҝ'), + (0x49D, 'V'), + ] + +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x49E, 'M', 'ҟ'), + (0x49F, 'V'), + (0x4A0, 'M', 'ҡ'), + (0x4A1, 'V'), + (0x4A2, 'M', 'ң'), + (0x4A3, 'V'), + (0x4A4, 'M', 'ҥ'), + (0x4A5, 'V'), + (0x4A6, 'M', 'ҧ'), + (0x4A7, 'V'), + (0x4A8, 'M', 'ҩ'), + (0x4A9, 'V'), + (0x4AA, 'M', 'ҫ'), + (0x4AB, 'V'), + (0x4AC, 'M', 'ҭ'), + (0x4AD, 'V'), + (0x4AE, 'M', 'ү'), + (0x4AF, 'V'), + (0x4B0, 'M', 'ұ'), + (0x4B1, 'V'), + (0x4B2, 'M', 'ҳ'), + (0x4B3, 'V'), + (0x4B4, 'M', 'ҵ'), + (0x4B5, 'V'), + (0x4B6, 'M', 'ҷ'), + (0x4B7, 'V'), + (0x4B8, 'M', 'ҹ'), + (0x4B9, 'V'), + (0x4BA, 'M', 'һ'), + (0x4BB, 'V'), + (0x4BC, 'M', 'ҽ'), + (0x4BD, 'V'), + (0x4BE, 'M', 'ҿ'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', 'ӂ'), + (0x4C2, 'V'), + (0x4C3, 'M', 'ӄ'), + (0x4C4, 'V'), + (0x4C5, 'M', 'ӆ'), + (0x4C6, 'V'), + (0x4C7, 'M', 'ӈ'), + (0x4C8, 'V'), + (0x4C9, 'M', 'ӊ'), + (0x4CA, 'V'), + (0x4CB, 'M', 'ӌ'), + (0x4CC, 'V'), + (0x4CD, 'M', 'ӎ'), + (0x4CE, 'V'), + (0x4D0, 'M', 'ӑ'), + (0x4D1, 'V'), + (0x4D2, 'M', 'ӓ'), + (0x4D3, 'V'), + (0x4D4, 'M', 'ӕ'), + (0x4D5, 'V'), + (0x4D6, 'M', 'ӗ'), + (0x4D7, 'V'), + (0x4D8, 'M', 'ә'), + (0x4D9, 'V'), + (0x4DA, 'M', 'ӛ'), + (0x4DB, 'V'), + (0x4DC, 'M', 'ӝ'), + (0x4DD, 'V'), + (0x4DE, 'M', 'ӟ'), + (0x4DF, 'V'), + (0x4E0, 'M', 'ӡ'), + (0x4E1, 'V'), + (0x4E2, 'M', 'ӣ'), + (0x4E3, 'V'), + (0x4E4, 'M', 'ӥ'), + (0x4E5, 'V'), + (0x4E6, 'M', 'ӧ'), + (0x4E7, 'V'), + (0x4E8, 'M', 'ө'), + (0x4E9, 'V'), + (0x4EA, 'M', 'ӫ'), + (0x4EB, 'V'), + (0x4EC, 'M', 'ӭ'), + (0x4ED, 'V'), + (0x4EE, 'M', 'ӯ'), + (0x4EF, 'V'), + (0x4F0, 'M', 'ӱ'), + (0x4F1, 'V'), + (0x4F2, 'M', 'ӳ'), + (0x4F3, 'V'), + (0x4F4, 'M', 'ӵ'), + (0x4F5, 'V'), + (0x4F6, 'M', 'ӷ'), + (0x4F7, 'V'), + (0x4F8, 'M', 'ӹ'), + (0x4F9, 'V'), + (0x4FA, 'M', 'ӻ'), + (0x4FB, 'V'), + (0x4FC, 'M', 'ӽ'), + (0x4FD, 'V'), + (0x4FE, 'M', 'ӿ'), + (0x4FF, 'V'), + (0x500, 'M', 'ԁ'), + (0x501, 'V'), + (0x502, 'M', 'ԃ'), + ] + +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x503, 'V'), + (0x504, 'M', 'ԅ'), + (0x505, 'V'), + (0x506, 'M', 'ԇ'), + (0x507, 'V'), + (0x508, 'M', 'ԉ'), + (0x509, 'V'), + (0x50A, 'M', 'ԋ'), + (0x50B, 'V'), + (0x50C, 'M', 'ԍ'), + (0x50D, 'V'), + (0x50E, 'M', 'ԏ'), + (0x50F, 'V'), + (0x510, 'M', 'ԑ'), + (0x511, 'V'), + (0x512, 'M', 'ԓ'), + (0x513, 'V'), + (0x514, 'M', 'ԕ'), + (0x515, 'V'), + (0x516, 'M', 'ԗ'), + (0x517, 'V'), + (0x518, 'M', 'ԙ'), + (0x519, 'V'), + (0x51A, 'M', 'ԛ'), + (0x51B, 'V'), + (0x51C, 'M', 'ԝ'), + (0x51D, 'V'), + (0x51E, 'M', 'ԟ'), + (0x51F, 'V'), + (0x520, 'M', 'ԡ'), + (0x521, 'V'), + (0x522, 'M', 'ԣ'), + (0x523, 'V'), + (0x524, 'M', 'ԥ'), + (0x525, 'V'), + (0x526, 'M', 'ԧ'), + (0x527, 'V'), + (0x528, 'M', 'ԩ'), + (0x529, 'V'), + (0x52A, 'M', 'ԫ'), + (0x52B, 'V'), + (0x52C, 'M', 'ԭ'), + (0x52D, 'V'), + (0x52E, 'M', 'ԯ'), + (0x52F, 'V'), + (0x530, 'X'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), + (0x557, 'X'), + (0x559, 'V'), + (0x587, 'M', 'եւ'), + (0x588, 'V'), + (0x58B, 'X'), + (0x58D, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5EF, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61D, 'V'), + ] + +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x7FD, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x860, 'V'), + (0x86B, 'X'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), + (0x8E2, 'X'), + (0x8E3, 'V'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), + (0x960, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', 'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FF, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', 'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', 'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + ] + +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA5E, 'M', 'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA77, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xAF9, 'V'), + (0xB00, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB55, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC00, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + ] + +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xC3A, 'X'), + (0xC3C, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC77, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDD, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF4, 'X'), + (0xD00, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD50, 'X'), + (0xD54, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD80, 'X'), + (0xD81, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDE6, 'V'), + (0xDF0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', 'ํา'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE86, 'V'), + (0xE8B, 'X'), + (0xE8C, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEB3, 'M', 'ໍາ'), + (0xEB4, 'V'), + ] + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECF, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', '་'), + (0xF0D, 'V'), + (0xF43, 'M', 'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', 'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', 'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', 'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', 'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', 'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', 'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', 'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', 'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', 'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', 'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', 'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', 'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', 'ྐྵ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', 'ⴧ'), + (0x10C8, 'X'), + (0x10CD, 'M', 'ⴭ'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', 'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + ] + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F6, 'X'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), + (0x13FE, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F9, 'X'), + (0x1700, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x180F, 'I'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1879, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191F, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1AB0, 'V'), + (0x1ACF, 'X'), + (0x1B00, 'V'), + (0x1B4D, 'X'), + (0x1B50, 'V'), + (0x1B7F, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'M', 'в'), + ] + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), + (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CFB, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), + ] + +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), + (0x1DC0, 'V'), + (0x1E00, 'M', 'ḁ'), + (0x1E01, 'V'), + (0x1E02, 'M', 'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', 'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', 'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', 'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', 'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', 'ḍ'), + (0x1E0D, 'V'), + (0x1E0E, 'M', 'ḏ'), + (0x1E0F, 'V'), + (0x1E10, 'M', 'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', 'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', 'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', 'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', 'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', 'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', 'ḝ'), + (0x1E1D, 'V'), + (0x1E1E, 'M', 'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', 'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', 'ḣ'), + (0x1E23, 'V'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, 'M', 'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', 'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', 'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', 'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', 'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', 'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', 'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', 'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', 'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', 'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', 'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', 'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', 'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', 'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', 'ṁ'), + (0x1E41, 'V'), + (0x1E42, 'M', 'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', 'ṅ'), + (0x1E45, 'V'), + (0x1E46, 'M', 'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', 'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', 'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', 'ṍ'), + (0x1E4D, 'V'), + (0x1E4E, 'M', 'ṏ'), + (0x1E4F, 'V'), + (0x1E50, 'M', 'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', 'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', 'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', 'ṗ'), + (0x1E57, 'V'), + (0x1E58, 'M', 'ṙ'), + (0x1E59, 'V'), + (0x1E5A, 'M', 'ṛ'), + (0x1E5B, 'V'), + (0x1E5C, 'M', 'ṝ'), + (0x1E5D, 'V'), + (0x1E5E, 'M', 'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', 'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', 'ṣ'), + (0x1E63, 'V'), + (0x1E64, 'M', 'ṥ'), + (0x1E65, 'V'), + (0x1E66, 'M', 'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', 'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', 'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', 'ṭ'), + (0x1E6D, 'V'), + (0x1E6E, 'M', 'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', 'ṱ'), + (0x1E71, 'V'), + (0x1E72, 'M', 'ṳ'), + (0x1E73, 'V'), + (0x1E74, 'M', 'ṵ'), + (0x1E75, 'V'), + (0x1E76, 'M', 'ṷ'), + (0x1E77, 'V'), + (0x1E78, 'M', 'ṹ'), + (0x1E79, 'V'), + (0x1E7A, 'M', 'ṻ'), + (0x1E7B, 'V'), + (0x1E7C, 'M', 'ṽ'), + (0x1E7D, 'V'), + (0x1E7E, 'M', 'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', 'ẁ'), + (0x1E81, 'V'), + (0x1E82, 'M', 'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', 'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', 'ẇ'), + (0x1E87, 'V'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, 'M', 'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', 'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', 'ẍ'), + (0x1E8D, 'V'), + (0x1E8E, 'M', 'ẏ'), + (0x1E8F, 'V'), + (0x1E90, 'M', 'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', 'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', 'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', 'ß'), + (0x1E9F, 'V'), + (0x1EA0, 'M', 'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', 'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', 'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', 'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', 'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', 'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', 'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', 'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', 'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', 'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', 'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', 'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', 'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', 'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', 'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', 'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', 'ề'), + (0x1EC1, 'V'), + (0x1EC2, 'M', 'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', 'ễ'), + (0x1EC5, 'V'), + (0x1EC6, 'M', 'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', 'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', 'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', 'ọ'), + (0x1ECD, 'V'), + (0x1ECE, 'M', 'ỏ'), + (0x1ECF, 'V'), + (0x1ED0, 'M', 'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', 'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', 'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', 'ỗ'), + (0x1ED7, 'V'), + (0x1ED8, 'M', 'ộ'), + (0x1ED9, 'V'), + (0x1EDA, 'M', 'ớ'), + (0x1EDB, 'V'), + (0x1EDC, 'M', 'ờ'), + (0x1EDD, 'V'), + (0x1EDE, 'M', 'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', 'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', 'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', 'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', 'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', 'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', 'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', 'ử'), + (0x1EED, 'V'), + (0x1EEE, 'M', 'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EF1, 'V'), + (0x1EF2, 'M', 'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', 'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', 'ỷ'), + (0x1EF7, 'V'), + (0x1EF8, 'M', 'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', 'ỻ'), + (0x1EFB, 'V'), + (0x1EFC, 'M', 'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', 'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', 'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', 'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', 'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', 'ὗ'), + (0x1F60, 'V'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', 'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', 'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', 'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', 'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', 'ό'), + (0x1F7A, 'V'), + (0x1F7B, 'M', 'ύ'), + (0x1F7C, 'V'), + (0x1F7D, 'M', 'ώ'), + (0x1F7E, 'X'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), + ] + +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), + (0x1FD0, 'V'), + (0x1FD3, 'M', 'ΐ'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', 'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + (0x1FF4, 'M', 'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), + (0x1FFF, 'X'), + (0x2000, '3', ' '), + (0x200B, 'I'), + (0x200C, 'D', ''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', '‐'), + (0x2012, 'V'), + (0x2017, '3', ' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', ' '), + (0x2030, 'V'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), + (0x2035, 'V'), + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', '!!'), + (0x203D, 'V'), + (0x203E, '3', ' ̅'), + (0x203F, 'V'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), + (0x204A, 'V'), + (0x2057, 'M', '′′′′'), + (0x2058, 'V'), + (0x205F, '3', ' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), + (0x2072, 'X'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), + (0x208F, 'X'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', 'rs'), + (0x20A9, 'V'), + (0x20C1, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), + (0x2104, 'V'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), + (0x2108, 'V'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), + (0x2114, 'V'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), + (0x2117, 'V'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), + (0x211E, 'V'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), + (0x2123, 'V'), + (0x2124, 'M', 'z'), + (0x2125, 'V'), + (0x2126, 'M', 'ω'), + (0x2127, 'V'), + (0x2128, 'M', 'z'), + (0x2129, 'V'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), + (0x212E, 'V'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), + (0x2132, 'X'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), + (0x213A, 'V'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), + (0x2141, 'V'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), + (0x214A, 'V'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', '0⁄3'), + (0x218A, 'V'), + (0x218C, 'X'), + (0x2190, 'V'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), + (0x2231, 'V'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), + (0x232B, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), + (0x2488, 'X'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + ] + +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), + (0x24EB, 'V'), + (0x2A0C, 'M', '∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), + (0x2A77, 'V'), + (0x2ADC, 'M', '⫝̸'), + (0x2ADD, 'V'), + (0x2B74, 'X'), + (0x2B76, 'V'), + (0x2B96, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'M', 'ⱟ'), + (0x2C30, 'V'), + (0x2C60, 'M', 'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', 'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', 'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', 'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), + (0x2C71, 'V'), + (0x2C72, 'M', 'ⱳ'), + (0x2C73, 'V'), + (0x2C75, 'M', 'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), + (0x2C81, 'V'), + (0x2C82, 'M', 'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', 'ⲅ'), + (0x2C85, 'V'), + (0x2C86, 'M', 'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', 'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', 'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', 'ⲍ'), + (0x2C8D, 'V'), + (0x2C8E, 'M', 'ⲏ'), + (0x2C8F, 'V'), + (0x2C90, 'M', 'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', 'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', 'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', 'ⲗ'), + (0x2C97, 'V'), + (0x2C98, 'M', 'ⲙ'), + (0x2C99, 'V'), + (0x2C9A, 'M', 'ⲛ'), + (0x2C9B, 'V'), + (0x2C9C, 'M', 'ⲝ'), + (0x2C9D, 'V'), + (0x2C9E, 'M', 'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', 'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', 'ⲣ'), + (0x2CA3, 'V'), + (0x2CA4, 'M', 'ⲥ'), + (0x2CA5, 'V'), + (0x2CA6, 'M', 'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', 'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', 'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', 'ⲭ'), + (0x2CAD, 'V'), + (0x2CAE, 'M', 'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', 'ⲱ'), + (0x2CB1, 'V'), + (0x2CB2, 'M', 'ⲳ'), + (0x2CB3, 'V'), + (0x2CB4, 'M', 'ⲵ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', 'ⲷ'), + (0x2CB7, 'V'), + (0x2CB8, 'M', 'ⲹ'), + (0x2CB9, 'V'), + (0x2CBA, 'M', 'ⲻ'), + (0x2CBB, 'V'), + (0x2CBC, 'M', 'ⲽ'), + (0x2CBD, 'V'), + (0x2CBE, 'M', 'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', 'ⳁ'), + (0x2CC1, 'V'), + (0x2CC2, 'M', 'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', 'ⳅ'), + (0x2CC5, 'V'), + (0x2CC6, 'M', 'ⳇ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2CC7, 'V'), + (0x2CC8, 'M', 'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', 'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', 'ⳍ'), + (0x2CCD, 'V'), + (0x2CCE, 'M', 'ⳏ'), + (0x2CCF, 'V'), + (0x2CD0, 'M', 'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', 'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', 'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', 'ⳗ'), + (0x2CD7, 'V'), + (0x2CD8, 'M', 'ⳙ'), + (0x2CD9, 'V'), + (0x2CDA, 'M', 'ⳛ'), + (0x2CDB, 'V'), + (0x2CDC, 'M', 'ⳝ'), + (0x2CDD, 'V'), + (0x2CDE, 'M', 'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', 'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', 'ⳣ'), + (0x2CE3, 'V'), + (0x2CEB, 'M', 'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', 'ⳮ'), + (0x2CEE, 'V'), + (0x2CF2, 'M', 'ⳳ'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', 'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E5E, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', '母'), + (0x2EA0, 'V'), + (0x2EF3, 'M', '龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + ] + +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + ] + +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), + (0x2FD6, 'X'), + (0x3000, '3', ' '), + (0x3001, 'V'), + (0x3002, 'M', '.'), + (0x3003, 'V'), + (0x3036, 'M', '〒'), + (0x3037, 'V'), + (0x3038, 'M', '十'), + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), + (0x303B, 'V'), + (0x3040, 'X'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), + (0x309D, 'V'), + (0x309F, 'M', 'より'), + (0x30A0, 'V'), + (0x30FF, 'M', 'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x3130, 'X'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + ] + +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), + (0x31A0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), + (0x321F, 'X'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + ] + +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), + (0x327F, 'V'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + ] + +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + ] + +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + ] + +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', 'ppm'), + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + ] + +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), + (0x3400, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', 'ꙁ'), + (0xA641, 'V'), + (0xA642, 'M', 'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', 'ꙅ'), + (0xA645, 'V'), + (0xA646, 'M', 'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', 'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', 'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', 'ꙍ'), + (0xA64D, 'V'), + (0xA64E, 'M', 'ꙏ'), + (0xA64F, 'V'), + (0xA650, 'M', 'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', 'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', 'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', 'ꙗ'), + (0xA657, 'V'), + (0xA658, 'M', 'ꙙ'), + (0xA659, 'V'), + (0xA65A, 'M', 'ꙛ'), + (0xA65B, 'V'), + (0xA65C, 'M', 'ꙝ'), + (0xA65D, 'V'), + (0xA65E, 'M', 'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', 'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', 'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', 'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', 'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', 'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', 'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', 'ꙭ'), + (0xA66D, 'V'), + (0xA680, 'M', 'ꚁ'), + (0xA681, 'V'), + (0xA682, 'M', 'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', 'ꚅ'), + (0xA685, 'V'), + (0xA686, 'M', 'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', 'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', 'ꚋ'), + (0xA68B, 'V'), + (0xA68C, 'M', 'ꚍ'), + (0xA68D, 'V'), + (0xA68E, 'M', 'ꚏ'), + (0xA68F, 'V'), + (0xA690, 'M', 'ꚑ'), + (0xA691, 'V'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA692, 'M', 'ꚓ'), + (0xA693, 'V'), + (0xA694, 'M', 'ꚕ'), + (0xA695, 'V'), + (0xA696, 'M', 'ꚗ'), + (0xA697, 'V'), + (0xA698, 'M', 'ꚙ'), + (0xA699, 'V'), + (0xA69A, 'M', 'ꚛ'), + (0xA69B, 'V'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), + (0xA69E, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', 'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', 'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', 'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', 'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', 'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', 'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', 'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', 'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', 'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', 'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', 'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', 'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', 'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', 'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', 'ꝁ'), + (0xA741, 'V'), + (0xA742, 'M', 'ꝃ'), + (0xA743, 'V'), + (0xA744, 'M', 'ꝅ'), + (0xA745, 'V'), + (0xA746, 'M', 'ꝇ'), + (0xA747, 'V'), + (0xA748, 'M', 'ꝉ'), + (0xA749, 'V'), + (0xA74A, 'M', 'ꝋ'), + (0xA74B, 'V'), + (0xA74C, 'M', 'ꝍ'), + (0xA74D, 'V'), + (0xA74E, 'M', 'ꝏ'), + (0xA74F, 'V'), + (0xA750, 'M', 'ꝑ'), + (0xA751, 'V'), + (0xA752, 'M', 'ꝓ'), + (0xA753, 'V'), + (0xA754, 'M', 'ꝕ'), + (0xA755, 'V'), + (0xA756, 'M', 'ꝗ'), + (0xA757, 'V'), + (0xA758, 'M', 'ꝙ'), + (0xA759, 'V'), + (0xA75A, 'M', 'ꝛ'), + (0xA75B, 'V'), + (0xA75C, 'M', 'ꝝ'), + (0xA75D, 'V'), + (0xA75E, 'M', 'ꝟ'), + (0xA75F, 'V'), + (0xA760, 'M', 'ꝡ'), + (0xA761, 'V'), + (0xA762, 'M', 'ꝣ'), + (0xA763, 'V'), + (0xA764, 'M', 'ꝥ'), + (0xA765, 'V'), + (0xA766, 'M', 'ꝧ'), + (0xA767, 'V'), + (0xA768, 'M', 'ꝩ'), + (0xA769, 'V'), + (0xA76A, 'M', 'ꝫ'), + (0xA76B, 'V'), + (0xA76C, 'M', 'ꝭ'), + (0xA76D, 'V'), + (0xA76E, 'M', 'ꝯ'), + (0xA76F, 'V'), + (0xA770, 'M', 'ꝯ'), + (0xA771, 'V'), + (0xA779, 'M', 'ꝺ'), + (0xA77A, 'V'), + (0xA77B, 'M', 'ꝼ'), + (0xA77C, 'V'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), + (0xA77F, 'V'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA780, 'M', 'ꞁ'), + (0xA781, 'V'), + (0xA782, 'M', 'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', 'ꞅ'), + (0xA785, 'V'), + (0xA786, 'M', 'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', 'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', 'ɥ'), + (0xA78E, 'V'), + (0xA790, 'M', 'ꞑ'), + (0xA791, 'V'), + (0xA792, 'M', 'ꞓ'), + (0xA793, 'V'), + (0xA796, 'M', 'ꞗ'), + (0xA797, 'V'), + (0xA798, 'M', 'ꞙ'), + (0xA799, 'V'), + (0xA79A, 'M', 'ꞛ'), + (0xA79B, 'V'), + (0xA79C, 'M', 'ꞝ'), + (0xA79D, 'V'), + (0xA79E, 'M', 'ꞟ'), + (0xA79F, 'V'), + (0xA7A0, 'M', 'ꞡ'), + (0xA7A1, 'V'), + (0xA7A2, 'M', 'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', 'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', 'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', 'ꞩ'), + (0xA7A9, 'V'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), + (0xA7AF, 'V'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), + (0xA7B5, 'V'), + (0xA7B6, 'M', 'ꞷ'), + (0xA7B7, 'V'), + (0xA7B8, 'M', 'ꞹ'), + (0xA7B9, 'V'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), + (0xA7FA, 'V'), + (0xA82D, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C6, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA954, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9FF, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xAB30, 'V'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), + (0xAB60, 'V'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + ] + +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + ] + +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + ] + +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), + (0xFA0E, 'V'), + (0xFA10, 'M', '塚'), + (0xFA11, 'V'), + (0xFA12, 'M', '晴'), + ] + +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA13, 'V'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), + (0xFA1F, 'V'), + (0xFA20, 'M', '蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', '諸'), + (0xFA23, 'V'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), + (0xFA27, 'V'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + ] + +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), + ] + +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFB07, 'X'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + (0xFB17, 'M', 'մխ'), + (0xFB18, 'X'), + (0xFB1D, 'M', 'יִ'), + (0xFB1E, 'V'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), + (0xFB37, 'X'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', 'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC3, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + ] + +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + ] + +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + ] + +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + ] + +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), + (0xFD3E, 'V'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + ] + +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), + (0xFDC8, 'X'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), + (0xFDFD, 'V'), + (0xFE00, 'I'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), + (0xFE12, 'X'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE30, 'X'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), + (0xFE45, 'V'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), + (0xFE52, 'X'), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), + (0xFE67, 'X'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), + (0xFE6C, 'X'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), + (0xFE73, 'V'), + (0xFE74, '3', ' ٍ'), + (0xFE75, 'X'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + ] + +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + ] + +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), + (0xFFBF, 'X'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), + (0xFFC8, 'X'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), + (0xFFD0, 'X'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), + (0xFFD8, 'X'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), + (0xFFE7, 'X'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEE, 'M', '○'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018F, 'X'), + (0x10190, 'V'), + (0x1019D, 'X'), + (0x101A0, 'V'), + (0x101A1, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x102E0, 'V'), + (0x102FC, 'X'), + (0x10300, 'V'), + (0x10324, 'X'), + (0x1032D, 'V'), + (0x1034B, 'X'), + (0x10350, 'V'), + (0x1037B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), + (0x104D4, 'X'), + (0x104D8, 'V'), + (0x104FC, 'X'), + (0x10500, 'V'), + (0x10528, 'X'), + (0x10530, 'V'), + (0x10564, 'X'), + (0x1056F, 'V'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), + (0x10600, 'V'), + (0x10737, 'X'), + (0x10740, 'V'), + (0x10756, 'X'), + (0x10760, 'V'), + (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x1089F, 'X'), + (0x108A7, 'V'), + (0x108B0, 'X'), + (0x108E0, 'V'), + (0x108F3, 'X'), + (0x108F4, 'V'), + (0x108F6, 'X'), + (0x108FB, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BC, 'V'), + (0x109D0, 'X'), + (0x109D2, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A36, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A49, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10AA0, 'X'), + (0x10AC0, 'V'), + (0x10AE7, 'X'), + (0x10AEB, 'V'), + (0x10AF7, 'X'), + (0x10B00, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B92, 'X'), + (0x10B99, 'V'), + (0x10B9D, 'X'), + (0x10BA9, 'V'), + (0x10BB0, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), + (0x10CB3, 'X'), + (0x10CC0, 'V'), + (0x10CF3, 'X'), + (0x10CFA, 'V'), + (0x10D28, 'X'), + (0x10D30, 'V'), + (0x10D3A, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10EFD, 'V'), + (0x10F28, 'X'), + (0x10F30, 'V'), + (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11076, 'X'), + (0x1107F, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C3, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11148, 'X'), + (0x11150, 'V'), + (0x11177, 'X'), + (0x11180, 'V'), + (0x111E0, 'X'), + (0x111E1, 'V'), + (0x111F5, 'X'), + (0x11200, 'V'), + (0x11212, 'X'), + (0x11213, 'V'), + (0x11242, 'X'), + (0x11280, 'V'), + (0x11287, 'X'), + (0x11288, 'V'), + (0x11289, 'X'), + (0x1128A, 'V'), + (0x1128E, 'X'), + (0x1128F, 'V'), + (0x1129E, 'X'), + (0x1129F, 'V'), + (0x112AA, 'X'), + (0x112B0, 'V'), + (0x112EB, 'X'), + (0x112F0, 'V'), + (0x112FA, 'X'), + (0x11300, 'V'), + (0x11304, 'X'), + (0x11305, 'V'), + (0x1130D, 'X'), + (0x1130F, 'V'), + (0x11311, 'X'), + (0x11313, 'V'), + (0x11329, 'X'), + (0x1132A, 'V'), + (0x11331, 'X'), + (0x11332, 'V'), + (0x11334, 'X'), + (0x11335, 'V'), + (0x1133A, 'X'), + (0x1133B, 'V'), + (0x11345, 'X'), + (0x11347, 'V'), + (0x11349, 'X'), + (0x1134B, 'V'), + (0x1134E, 'X'), + (0x11350, 'V'), + (0x11351, 'X'), + (0x11357, 'V'), + (0x11358, 'X'), + (0x1135D, 'V'), + (0x11364, 'X'), + (0x11366, 'V'), + (0x1136D, 'X'), + (0x11370, 'V'), + (0x11375, 'X'), + (0x11400, 'V'), + (0x1145C, 'X'), + (0x1145D, 'V'), + (0x11462, 'X'), + (0x11480, 'V'), + (0x114C8, 'X'), + (0x114D0, 'V'), + (0x114DA, 'X'), + (0x11580, 'V'), + (0x115B6, 'X'), + (0x115B8, 'V'), + (0x115DE, 'X'), + (0x11600, 'V'), + (0x11645, 'X'), + (0x11650, 'V'), + (0x1165A, 'X'), + (0x11660, 'V'), + (0x1166D, 'X'), + (0x11680, 'V'), + (0x116BA, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x11700, 'V'), + (0x1171B, 'X'), + (0x1171D, 'V'), + (0x1172C, 'X'), + (0x11730, 'V'), + (0x11747, 'X'), + (0x11800, 'V'), + (0x1183C, 'X'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), + (0x118C0, 'V'), + (0x118F3, 'X'), + (0x118FF, 'V'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), + (0x11A00, 'V'), + (0x11A48, 'X'), + (0x11A50, 'V'), + (0x11AA3, 'X'), + (0x11AB0, 'V'), + (0x11AF9, 'X'), + (0x11B00, 'V'), + (0x11B0A, 'X'), + (0x11C00, 'V'), + (0x11C09, 'X'), + (0x11C0A, 'V'), + (0x11C37, 'X'), + (0x11C38, 'V'), + (0x11C46, 'X'), + (0x11C50, 'V'), + (0x11C6D, 'X'), + (0x11C70, 'V'), + (0x11C90, 'X'), + (0x11C92, 'V'), + (0x11CA8, 'X'), + (0x11CA9, 'V'), + (0x11CB7, 'X'), + (0x11D00, 'V'), + (0x11D07, 'X'), + (0x11D08, 'V'), + (0x11D0A, 'X'), + (0x11D0B, 'V'), + (0x11D37, 'X'), + (0x11D3A, 'V'), + (0x11D3B, 'X'), + (0x11D3C, 'V'), + (0x11D3E, 'X'), + (0x11D3F, 'V'), + (0x11D48, 'X'), + (0x11D50, 'V'), + (0x11D5A, 'X'), + (0x11D60, 'V'), + (0x11D66, 'X'), + (0x11D67, 'V'), + (0x11D69, 'X'), + (0x11D6A, 'V'), + (0x11D8F, 'X'), + (0x11D90, 'V'), + (0x11D92, 'X'), + (0x11D93, 'V'), + (0x11D99, 'X'), + (0x11DA0, 'V'), + (0x11DAA, 'X'), + (0x11EE0, 'V'), + (0x11EF9, 'X'), + (0x11F00, 'V'), + (0x11F11, 'X'), + (0x11F12, 'V'), + (0x11F3B, 'X'), + (0x11F3E, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F5A, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), + (0x1239A, 'X'), + (0x12400, 'V'), + (0x1246F, 'X'), + (0x12470, 'V'), + (0x12475, 'X'), + (0x12480, 'V'), + (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), + (0x13000, 'V'), + (0x13430, 'X'), + (0x13440, 'V'), + (0x13456, 'X'), + (0x14400, 'V'), + (0x14647, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16A40, 'V'), + (0x16A5F, 'X'), + (0x16A60, 'V'), + (0x16A6A, 'X'), + (0x16A6E, 'V'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), + (0x16AD0, 'V'), + (0x16AEE, 'X'), + (0x16AF0, 'V'), + (0x16AF6, 'X'), + (0x16B00, 'V'), + (0x16B46, 'X'), + (0x16B50, 'V'), + (0x16B5A, 'X'), + (0x16B5B, 'V'), + (0x16B62, 'X'), + (0x16B63, 'V'), + (0x16B78, 'X'), + (0x16B7D, 'V'), + (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), + (0x16E60, 'V'), + (0x16E9B, 'X'), + (0x16F00, 'V'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x16FE0, 'V'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), + (0x17000, 'V'), + (0x187F8, 'X'), + (0x18800, 'V'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1AFF0, 'V'), + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFFF, 'X'), + (0x1B000, 'V'), + (0x1B123, 'X'), + (0x1B132, 'V'), + (0x1B133, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B155, 'V'), + (0x1B156, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), + (0x1B170, 'V'), + (0x1B2FC, 'X'), + (0x1BC00, 'V'), + (0x1BC6B, 'X'), + (0x1BC70, 'V'), + (0x1BC7D, 'X'), + (0x1BC80, 'V'), + (0x1BC89, 'X'), + (0x1BC90, 'V'), + (0x1BC9A, 'X'), + (0x1BC9C, 'V'), + (0x1BCA0, 'I'), + (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), + (0x1D1C1, 'V'), + (0x1D1EB, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D2C0, 'V'), + (0x1D2D4, 'X'), + (0x1D2E0, 'V'), + (0x1D2F4, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D379, 'X'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), + (0x1D455, 'X'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', 'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + (0x1D4B9, 'M', 'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', 'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), + (0x1D506, 'X'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), + (0x1D515, 'X'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), + (0x1D545, 'X'), + (0x1D546, 'M', 'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), + (0x1D551, 'X'), + (0x1D552, 'M', 'a'), + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), + (0x1D800, 'V'), + (0x1DA8C, 'X'), + (0x1DA9B, 'V'), + (0x1DAA0, 'X'), + (0x1DAA1, 'V'), + (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1DF25, 'V'), + (0x1DF2B, 'X'), + (0x1E000, 'V'), + (0x1E007, 'X'), + (0x1E008, 'V'), + (0x1E019, 'X'), + (0x1E01B, 'V'), + (0x1E022, 'X'), + (0x1E023, 'V'), + (0x1E025, 'X'), + (0x1E026, 'V'), + (0x1E02B, 'X'), + (0x1E030, 'M', 'а'), + (0x1E031, 'M', 'б'), + (0x1E032, 'M', 'в'), + (0x1E033, 'M', 'г'), + (0x1E034, 'M', 'д'), + (0x1E035, 'M', 'е'), + (0x1E036, 'M', 'ж'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E037, 'M', 'з'), + (0x1E038, 'M', 'и'), + (0x1E039, 'M', 'к'), + (0x1E03A, 'M', 'л'), + (0x1E03B, 'M', 'м'), + (0x1E03C, 'M', 'о'), + (0x1E03D, 'M', 'п'), + (0x1E03E, 'M', 'р'), + (0x1E03F, 'M', 'с'), + (0x1E040, 'M', 'т'), + (0x1E041, 'M', 'у'), + (0x1E042, 'M', 'ф'), + (0x1E043, 'M', 'х'), + (0x1E044, 'M', 'ц'), + (0x1E045, 'M', 'ч'), + (0x1E046, 'M', 'ш'), + (0x1E047, 'M', 'ы'), + (0x1E048, 'M', 'э'), + (0x1E049, 'M', 'ю'), + (0x1E04A, 'M', 'ꚉ'), + (0x1E04B, 'M', 'ә'), + (0x1E04C, 'M', 'і'), + (0x1E04D, 'M', 'ј'), + (0x1E04E, 'M', 'ө'), + (0x1E04F, 'M', 'ү'), + (0x1E050, 'M', 'ӏ'), + (0x1E051, 'M', 'а'), + (0x1E052, 'M', 'б'), + (0x1E053, 'M', 'в'), + (0x1E054, 'M', 'г'), + (0x1E055, 'M', 'д'), + (0x1E056, 'M', 'е'), + (0x1E057, 'M', 'ж'), + (0x1E058, 'M', 'з'), + (0x1E059, 'M', 'и'), + (0x1E05A, 'M', 'к'), + (0x1E05B, 'M', 'л'), + (0x1E05C, 'M', 'о'), + (0x1E05D, 'M', 'п'), + (0x1E05E, 'M', 'с'), + (0x1E05F, 'M', 'у'), + (0x1E060, 'M', 'ф'), + (0x1E061, 'M', 'х'), + (0x1E062, 'M', 'ц'), + (0x1E063, 'M', 'ч'), + (0x1E064, 'M', 'ш'), + (0x1E065, 'M', 'ъ'), + (0x1E066, 'M', 'ы'), + (0x1E067, 'M', 'ґ'), + (0x1E068, 'M', 'і'), + (0x1E069, 'M', 'ѕ'), + (0x1E06A, 'M', 'џ'), + (0x1E06B, 'M', 'ҫ'), + (0x1E06C, 'M', 'ꙑ'), + (0x1E06D, 'M', 'ұ'), + (0x1E06E, 'X'), + (0x1E08F, 'V'), + (0x1E090, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E4D0, 'V'), + (0x1E4FA, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + (0x1E7FF, 'X'), + (0x1E800, 'V'), + (0x1E8C5, 'X'), + (0x1E8C7, 'V'), + (0x1E8D7, 'X'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), + (0x1E922, 'V'), + (0x1E94C, 'X'), + (0x1E950, 'V'), + (0x1E95A, 'X'), + (0x1E95E, 'V'), + (0x1E960, 'X'), + (0x1EC71, 'V'), + (0x1ECB5, 'X'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), + (0x1EE20, 'X'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', 'ه'), + (0x1EE25, 'X'), + (0x1EE27, 'M', 'ح'), + (0x1EE28, 'X'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), + (0x1EE33, 'X'), + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), + (0x1EE38, 'X'), + (0x1EE39, 'M', 'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', 'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', 'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', 'ح'), + (0x1EE48, 'X'), + (0x1EE49, 'M', 'ي'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', 'ل'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE4F, 'M', 'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), + (0x1EE53, 'X'), + (0x1EE54, 'M', 'ش'), + (0x1EE55, 'X'), + (0x1EE57, 'M', 'خ'), + (0x1EE58, 'X'), + (0x1EE59, 'M', 'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', 'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', 'ں'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', 'ٯ'), + (0x1EE60, 'X'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', 'ه'), + (0x1EE65, 'X'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), + (0x1EE73, 'X'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), + (0x1EE78, 'X'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', 'ڡ'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', 'و'), + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0C0, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0F6, 'X'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), + (0x1F10B, 'V'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), + (0x1F191, 'V'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1F1AE, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), + (0x1F23C, 'X'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), + (0x1F252, 'X'), + (0x1F260, 'V'), + (0x1F266, 'X'), + (0x1F300, 'V'), + (0x1F6D8, 'X'), + (0x1F6DC, 'V'), + (0x1F6ED, 'X'), + (0x1F6F0, 'V'), + (0x1F6FD, 'X'), + (0x1F700, 'V'), + (0x1F777, 'X'), + (0x1F77B, 'V'), + (0x1F7DA, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), + (0x1F800, 'V'), + (0x1F80C, 'X'), + (0x1F810, 'V'), + (0x1F848, 'X'), + (0x1F850, 'V'), + (0x1F85A, 'X'), + (0x1F860, 'V'), + (0x1F888, 'X'), + (0x1F890, 'V'), + (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), + (0x1F900, 'V'), + (0x1FA54, 'X'), + (0x1FA60, 'V'), + (0x1FA6E, 'X'), + (0x1FA70, 'V'), + (0x1FA7D, 'X'), + (0x1FA80, 'V'), + (0x1FA89, 'X'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FA90, 'V'), + (0x1FABE, 'X'), + (0x1FABF, 'V'), + (0x1FAC6, 'X'), + (0x1FACE, 'V'), + (0x1FADC, 'X'), + (0x1FAE0, 'V'), + (0x1FAE9, 'X'), + (0x1FAF0, 'V'), + (0x1FAF9, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + (0x1FBFA, 'X'), + (0x20000, 'V'), + (0x2A6E0, 'X'), + (0x2A700, 'V'), + (0x2B73A, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2B820, 'V'), + (0x2CEA2, 'X'), + (0x2CEB0, 'V'), + (0x2EBE1, 'X'), + (0x2EBF0, 'V'), + (0x2EE5E, 'X'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), + (0x2F868, 'X'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), + (0x2F874, 'X'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + ] + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0x31350, 'V'), + (0x323B0, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), + ] + +uts46data = tuple( + _seg_0() + + _seg_1() + + _seg_2() + + _seg_3() + + _seg_4() + + _seg_5() + + _seg_6() + + _seg_7() + + _seg_8() + + _seg_9() + + _seg_10() + + _seg_11() + + _seg_12() + + _seg_13() + + _seg_14() + + _seg_15() + + _seg_16() + + _seg_17() + + _seg_18() + + _seg_19() + + _seg_20() + + _seg_21() + + _seg_22() + + _seg_23() + + _seg_24() + + _seg_25() + + _seg_26() + + _seg_27() + + _seg_28() + + _seg_29() + + _seg_30() + + _seg_31() + + _seg_32() + + _seg_33() + + _seg_34() + + _seg_35() + + _seg_36() + + _seg_37() + + _seg_38() + + _seg_39() + + _seg_40() + + _seg_41() + + _seg_42() + + _seg_43() + + _seg_44() + + _seg_45() + + _seg_46() + + _seg_47() + + _seg_48() + + _seg_49() + + _seg_50() + + _seg_51() + + _seg_52() + + _seg_53() + + _seg_54() + + _seg_55() + + _seg_56() + + _seg_57() + + _seg_58() + + _seg_59() + + _seg_60() + + _seg_61() + + _seg_62() + + _seg_63() + + _seg_64() + + _seg_65() + + _seg_66() + + _seg_67() + + _seg_68() + + _seg_69() + + _seg_70() + + _seg_71() + + _seg_72() + + _seg_73() + + _seg_74() + + _seg_75() + + _seg_76() + + _seg_77() + + _seg_78() + + _seg_79() + + _seg_80() + + _seg_81() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/INSTALLER b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/LICENSE b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/LICENSE new file mode 100644 index 00000000..8509ccd6 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022-2023, Redis, inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/METADATA b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/METADATA new file mode 100644 index 00000000..0b2c369a --- /dev/null +++ b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/METADATA @@ -0,0 +1,217 @@ +Metadata-Version: 2.1 +Name: redis +Version: 5.0.4 +Summary: Python client for Redis database and key-value store +Home-page: https://github.com/redis/redis-py +Author: Redis Inc. +Author-email: oss@redis.com +License: MIT +Project-URL: Documentation, https://redis.readthedocs.io/en/latest/ +Project-URL: Changes, https://github.com/redis/redis-py/releases +Project-URL: Code, https://github.com/redis/redis-py +Project-URL: Issue tracker, https://github.com/redis/redis-py/issues +Keywords: Redis,key-value store,database +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: async-timeout >=4.0.3 ; python_full_version < "3.11.3" +Requires-Dist: importlib-metadata >=1.0 ; python_version < "3.8" +Requires-Dist: typing-extensions ; python_version < "3.8" +Provides-Extra: hiredis +Requires-Dist: hiredis >=1.0.0 ; extra == 'hiredis' +Provides-Extra: ocsp +Requires-Dist: cryptography >=36.0.1 ; extra == 'ocsp' +Requires-Dist: pyopenssl ==20.0.1 ; extra == 'ocsp' +Requires-Dist: requests >=2.26.0 ; extra == 'ocsp' + +# redis-py + +The Python interface to the Redis key-value store. + +[![CI](https://github.com/redis/redis-py/workflows/CI/badge.svg?branch=master)](https://github.com/redis/redis-py/actions?query=workflow%3ACI+branch%3Amaster) +[![docs](https://readthedocs.org/projects/redis/badge/?version=stable&style=flat)](https://redis-py.readthedocs.io/en/stable/) +[![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg)](./LICENSE) +[![pypi](https://badge.fury.io/py/redis.svg)](https://pypi.org/project/redis/) +[![pre-release](https://img.shields.io/github/v/release/redis/redis-py?include_prereleases&label=latest-prerelease)](https://github.com/redis/redis-py/releases) +[![codecov](https://codecov.io/gh/redis/redis-py/branch/master/graph/badge.svg?token=yenl5fzxxr)](https://codecov.io/gh/redis/redis-py) + +[Installation](#installation) | [Usage](#usage) | [Advanced Topics](#advanced-topics) | [Contributing](https://github.com/redis/redis-py/blob/master/CONTRIBUTING.md) + +--------------------------------------------- + +**Note: ** redis-py 5.0 will be the last version of redis-py to support Python 3.7, as it has reached [end of life](https://devguide.python.org/versions/). redis-py 5.1 will support Python 3.8+. + +--------------------------------------------- + +## How do I Redis? + +[Learn for free at Redis University](https://university.redis.com/) + +[Build faster with the Redis Launchpad](https://launchpad.redis.com/) + +[Try the Redis Cloud](https://redis.com/try-free/) + +[Dive in developer tutorials](https://developer.redis.com/) + +[Join the Redis community](https://redis.com/community/) + +[Work at Redis](https://redis.com/company/careers/jobs/) + +## Installation + +Start a redis via docker: + +``` bash +docker run -p 6379:6379 -it redis/redis-stack:latest +``` + +To install redis-py, simply: + +``` bash +$ pip install redis +``` + +For faster performance, install redis with hiredis support, this provides a compiled response parser, and *for most cases* requires zero code changes. +By default, if hiredis >= 1.0 is available, redis-py will attempt to use it for response parsing. + +``` bash +$ pip install "redis[hiredis]" +``` + +Looking for a high-level library to handle object mapping? See [redis-om-python](https://github.com/redis/redis-om-python)! + +## Supported Redis Versions + +The most recent version of this library supports redis version [5.0](https://github.com/redis/redis/blob/5.0/00-RELEASENOTES), [6.0](https://github.com/redis/redis/blob/6.0/00-RELEASENOTES), [6.2](https://github.com/redis/redis/blob/6.2/00-RELEASENOTES), [7.0](https://github.com/redis/redis/blob/7.0/00-RELEASENOTES) and [7.2](https://github.com/redis/redis/blob/7.2/00-RELEASENOTES). + +The table below highlights version compatibility of the most-recent library versions and redis versions. + +| Library version | Supported redis versions | +|-----------------|-------------------| +| 3.5.3 | <= 6.2 Family of releases | +| >= 4.5.0 | Version 5.0 to 7.0 | +| >= 5.0.0 | Version 5.0 to current | + + +## Usage + +### Basic Example + +``` python +>>> import redis +>>> r = redis.Redis(host='localhost', port=6379, db=0) +>>> r.set('foo', 'bar') +True +>>> r.get('foo') +b'bar' +``` + +The above code connects to localhost on port 6379, sets a value in Redis, and retrieves it. All responses are returned as bytes in Python, to receive decoded strings, set *decode_responses=True*. For this, and more connection options, see [these examples](https://redis.readthedocs.io/en/stable/examples.html). + + +#### RESP3 Support +To enable support for RESP3, ensure you have at least version 5.0 of the client, and change your connection object to include *protocol=3* + +``` python +>>> import redis +>>> r = redis.Redis(host='localhost', port=6379, db=0, protocol=3) +``` + +### Connection Pools + +By default, redis-py uses a connection pool to manage connections. Each instance of a Redis class receives its own connection pool. You can however define your own [redis.ConnectionPool](https://redis.readthedocs.io/en/stable/connections.html#connection-pools). + +``` python +>>> pool = redis.ConnectionPool(host='localhost', port=6379, db=0) +>>> r = redis.Redis(connection_pool=pool) +``` + +Alternatively, you might want to look at [Async connections](https://redis.readthedocs.io/en/stable/examples/asyncio_examples.html), or [Cluster connections](https://redis.readthedocs.io/en/stable/connections.html#cluster-client), or even [Async Cluster connections](https://redis.readthedocs.io/en/stable/connections.html#async-cluster-client). + +### Redis Commands + +There is built-in support for all of the [out-of-the-box Redis commands](https://redis.io/commands). They are exposed using the raw Redis command names (`HSET`, `HGETALL`, etc.) except where a word (i.e. del) is reserved by the language. The complete set of commands can be found [here](https://github.com/redis/redis-py/tree/master/redis/commands), or [the documentation](https://redis.readthedocs.io/en/stable/commands.html). + +## Advanced Topics + +The [official Redis command documentation](https://redis.io/commands) +does a great job of explaining each command in detail. redis-py attempts +to adhere to the official command syntax. There are a few exceptions: + +- **MULTI/EXEC**: These are implemented as part of the Pipeline class. + The pipeline is wrapped with the MULTI and EXEC statements by + default when it is executed, which can be disabled by specifying + transaction=False. See more about Pipelines below. + +- **SUBSCRIBE/LISTEN**: Similar to pipelines, PubSub is implemented as + a separate class as it places the underlying connection in a state + where it can\'t execute non-pubsub commands. Calling the pubsub + method from the Redis client will return a PubSub instance where you + can subscribe to channels and listen for messages. You can only call + PUBLISH from the Redis client (see [this comment on issue + #151](https://github.com/redis/redis-py/issues/151#issuecomment-1545015) + for details). + +For more details, please see the documentation on [advanced topics page](https://redis.readthedocs.io/en/stable/advanced_features.html). + +### Pipelines + +The following is a basic example of a [Redis pipeline](https://redis.io/docs/manual/pipelining/), a method to optimize round-trip calls, by batching Redis commands, and receiving their results as a list. + + +``` python +>>> pipe = r.pipeline() +>>> pipe.set('foo', 5) +>>> pipe.set('bar', 18.5) +>>> pipe.set('blee', "hello world!") +>>> pipe.execute() +[True, True, True] +``` + +### PubSub + +The following example shows how to utilize [Redis Pub/Sub](https://redis.io/docs/manual/pubsub/) to subscribe to specific channels. + +``` python +>>> r = redis.Redis(...) +>>> p = r.pubsub() +>>> p.subscribe('my-first-channel', 'my-second-channel', ...) +>>> p.get_message() +{'pattern': None, 'type': 'subscribe', 'channel': b'my-second-channel', 'data': 1} +``` + + +-------------------------- + +### Author + +redis-py is developed and maintained by [Redis Inc](https://redis.com). It can be found [here]( +https://github.com/redis/redis-py), or downloaded from [pypi](https://pypi.org/project/redis/). + +Special thanks to: + +- Andy McCurdy () the original author of redis-py. +- Ludovico Magnocavallo, author of the original Python Redis client, + from which some of the socket code is still used. +- Alexander Solovyov for ideas on the generic response callback + system. +- Paul Hubbard for initial packaging support. + +[![Redis](./docs/logo-redis.png)](https://www.redis.com) + diff --git a/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/RECORD b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/RECORD new file mode 100644 index 00000000..115a319c --- /dev/null +++ b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/RECORD @@ -0,0 +1,147 @@ +redis-5.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +redis-5.0.4.dist-info/LICENSE,sha256=pXslClvwPXr-VbdAYzE_Ktt7ANVGwKsUmok5gzP-PMg,1074 +redis-5.0.4.dist-info/METADATA,sha256=tOuwQIADxsbY5DlM5LnRLFvzZUTTEYdrWMEBRL4AwAs,9298 +redis-5.0.4.dist-info/RECORD,, +redis-5.0.4.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 +redis-5.0.4.dist-info/top_level.txt,sha256=OMAefszlde6ZoOtlM35AWzpRIrwtcqAMHGlRit-w2-4,6 +redis/__init__.py,sha256=PthSOEfXKlYV9xBgroOnO2tJD7uu0BWwvztgsKUvK48,2110 +redis/__pycache__/__init__.cpython-311.pyc,, +redis/__pycache__/backoff.cpython-311.pyc,, +redis/__pycache__/client.cpython-311.pyc,, +redis/__pycache__/cluster.cpython-311.pyc,, +redis/__pycache__/compat.cpython-311.pyc,, +redis/__pycache__/connection.cpython-311.pyc,, +redis/__pycache__/crc.cpython-311.pyc,, +redis/__pycache__/credentials.cpython-311.pyc,, +redis/__pycache__/exceptions.cpython-311.pyc,, +redis/__pycache__/lock.cpython-311.pyc,, +redis/__pycache__/ocsp.cpython-311.pyc,, +redis/__pycache__/retry.cpython-311.pyc,, +redis/__pycache__/sentinel.cpython-311.pyc,, +redis/__pycache__/typing.cpython-311.pyc,, +redis/__pycache__/utils.cpython-311.pyc,, +redis/_parsers/__init__.py,sha256=qkfgV2X9iyvQAvbLdSelwgz0dCk9SGAosCvuZC9-qDc,550 +redis/_parsers/__pycache__/__init__.cpython-311.pyc,, +redis/_parsers/__pycache__/base.cpython-311.pyc,, +redis/_parsers/__pycache__/commands.cpython-311.pyc,, +redis/_parsers/__pycache__/encoders.cpython-311.pyc,, +redis/_parsers/__pycache__/helpers.cpython-311.pyc,, +redis/_parsers/__pycache__/hiredis.cpython-311.pyc,, +redis/_parsers/__pycache__/resp2.cpython-311.pyc,, +redis/_parsers/__pycache__/resp3.cpython-311.pyc,, +redis/_parsers/__pycache__/socket.cpython-311.pyc,, +redis/_parsers/base.py,sha256=95SoPNwt4xJQB-ONIjxsR46n4EHnxnmkv9f0ReZSIR0,7480 +redis/_parsers/commands.py,sha256=pmR4hl4u93UvCmeDgePHFc6pWDr4slrKEvCsdMmtj_M,11052 +redis/_parsers/encoders.py,sha256=X0jvTp-E4TZUlZxV5LJJ88TuVrF1vly5tuC0xjxGaSc,1734 +redis/_parsers/helpers.py,sha256=w5SNtnOA-t-znes-6koV2XoYQ9X45VMi2yWKZcLgtWQ,27958 +redis/_parsers/hiredis.py,sha256=rDdsiFAEwxSS1prNNfSO0rVvJyf18opMEjRHAwmz99w,7783 +redis/_parsers/resp2.py,sha256=f22kH-_ZP2iNtOn6xOe65MSy_fJpu8OEn1u_hgeeojI,4813 +redis/_parsers/resp3.py,sha256=RWKRikLk26Y9GbErSXl6LvFdntGESRfk68Zqj4fQ56Y,10017 +redis/_parsers/socket.py,sha256=CKD8QW_wFSNlIZzxlbNduaGpiv0I8wBcsGuAIojDfJg,5403 +redis/asyncio/__init__.py,sha256=uoDD8XYVi0Kj6mcufYwLDUTQXmBRx7a0bhKF9stZr7I,1489 +redis/asyncio/__pycache__/__init__.cpython-311.pyc,, +redis/asyncio/__pycache__/client.cpython-311.pyc,, +redis/asyncio/__pycache__/cluster.cpython-311.pyc,, +redis/asyncio/__pycache__/connection.cpython-311.pyc,, +redis/asyncio/__pycache__/lock.cpython-311.pyc,, +redis/asyncio/__pycache__/retry.cpython-311.pyc,, +redis/asyncio/__pycache__/sentinel.cpython-311.pyc,, +redis/asyncio/__pycache__/utils.cpython-311.pyc,, +redis/asyncio/client.py,sha256=YohQEyth8iM1A33_qHBf_xxro22_qG33hmeMsBuPx3c,59355 +redis/asyncio/cluster.py,sha256=Cdy_io7fRou50jMYQcNV4wk5NgwUtmR7V1R-_wxQ0-0,63292 +redis/asyncio/connection.py,sha256=6OfdGjQTg6M1zkr2PhFPs_9R0VDeF9ooY3QJdIwCVrY,45884 +redis/asyncio/lock.py,sha256=lLasXEO2E1CskhX5ZZoaSGpmwZP1Q782R3HAUNG3wD4,11967 +redis/asyncio/retry.py,sha256=SnPPOlo5gcyIFtkC4DY7HFvmDgUaILsJ3DeHioogdB8,2219 +redis/asyncio/sentinel.py,sha256=sTVJCbi1KtIbHJc3fkHRZb_LGav_UtCAq-ipxltkGsE,14198 +redis/asyncio/utils.py,sha256=Yxc5YQumhLjtDDwCS4mgxI6yy2Z21AzLlFxVbxCohic,704 +redis/backoff.py,sha256=x-sAjV7u4MmdOjFZSZ8RnUnCaQtPhCBbGNBgICvCW3I,2966 +redis/client.py,sha256=HYrKlkN9uFDuB3ouh3OEqA_EkUujqA6Sh2ZfcFu4zCs,57978 +redis/cluster.py,sha256=rKTO8bhY2Sgv6WHmdSg3CFSVnr4Oua4LFwFJQBqr5gk,93040 +redis/commands/__init__.py,sha256=cTUH-MGvaLYS0WuoytyqtN1wniw2A1KbkUXcpvOSY3I,576 +redis/commands/__pycache__/__init__.cpython-311.pyc,, +redis/commands/__pycache__/cluster.cpython-311.pyc,, +redis/commands/__pycache__/core.cpython-311.pyc,, +redis/commands/__pycache__/helpers.cpython-311.pyc,, +redis/commands/__pycache__/redismodules.cpython-311.pyc,, +redis/commands/__pycache__/sentinel.cpython-311.pyc,, +redis/commands/bf/__init__.py,sha256=ESmQXH4p9Dp37tNCwQGDiF_BHDEaKnXSF7ZfASEqkFY,8027 +redis/commands/bf/__pycache__/__init__.cpython-311.pyc,, +redis/commands/bf/__pycache__/commands.cpython-311.pyc,, +redis/commands/bf/__pycache__/info.cpython-311.pyc,, +redis/commands/bf/commands.py,sha256=kVWUatdS0zLcu8-fVIqLLQBU5u8fJWIOCVUD3fqYVp0,21462 +redis/commands/bf/info.py,sha256=tpE4hv1zApxoOgyV9_8BEDZcl4Wf6tS1dSvtlxV7uTE,3395 +redis/commands/cluster.py,sha256=AdoYSe__C9f2J6To9_wuITQxim1n4X41FFXbSrQxn78,31618 +redis/commands/core.py,sha256=x0uW602wqVNIvwF0DTOF10ucaJ2PFciktXjkMF04Emk,223625 +redis/commands/graph/__init__.py,sha256=NmklyOuzIa20yEWrhnKQxgQlaXKYkcwBkGHpvQyo5J8,7237 +redis/commands/graph/__pycache__/__init__.cpython-311.pyc,, +redis/commands/graph/__pycache__/commands.cpython-311.pyc,, +redis/commands/graph/__pycache__/edge.cpython-311.pyc,, +redis/commands/graph/__pycache__/exceptions.cpython-311.pyc,, +redis/commands/graph/__pycache__/execution_plan.cpython-311.pyc,, +redis/commands/graph/__pycache__/node.cpython-311.pyc,, +redis/commands/graph/__pycache__/path.cpython-311.pyc,, +redis/commands/graph/__pycache__/query_result.cpython-311.pyc,, +redis/commands/graph/commands.py,sha256=rLGV58ZJKEf6yxzk1oD3IwiS03lP6bpbo0249pFI0OY,10379 +redis/commands/graph/edge.py,sha256=_TljVB4a1pPS9pb8_Cvw8rclbBOOI__-fY9fybU4djQ,2460 +redis/commands/graph/exceptions.py,sha256=kRDBsYLgwIaM4vqioO_Bp_ugWvjfqCH7DIv4Gpc9HCM,107 +redis/commands/graph/execution_plan.py,sha256=Pxr8_zhPWT_EdZSgGrbiWw8wFL6q5JF7O-Z6Xzm55iw,6742 +redis/commands/graph/node.py,sha256=Pasfsl5dF6WqT9KCNFAKKwGubyK_2ORCoAQE4VtnXkQ,2400 +redis/commands/graph/path.py,sha256=m6Gz4DYfMIQ8VReDLHlnQw_KI2rVdepWYk_AU0_x_GM,2080 +redis/commands/graph/query_result.py,sha256=GTEnBE0rAiUk4JquaxcVKdL1kzSMDWW5ky-iFTvRN84,17040 +redis/commands/helpers.py,sha256=zTLRBkjEXizZwHgE0X-Br5pgrCnK0JVN7koyh-1KSag,4741 +redis/commands/json/__init__.py,sha256=llpDQz2kBNnJyfQfuh0-2oY-knMb6gAS0ADtPmaTKsM,4854 +redis/commands/json/__pycache__/__init__.cpython-311.pyc,, +redis/commands/json/__pycache__/_util.cpython-311.pyc,, +redis/commands/json/__pycache__/commands.cpython-311.pyc,, +redis/commands/json/__pycache__/decoders.cpython-311.pyc,, +redis/commands/json/__pycache__/path.cpython-311.pyc,, +redis/commands/json/_util.py,sha256=b_VQTh10FyLl8BtREfJfDagOJCyd6wTQQs8g63pi5GI,116 +redis/commands/json/commands.py,sha256=oD8tMD1iLF_FHWCKDzN7OPWu4wejWt6vcIpKWzPMFb4,15637 +redis/commands/json/decoders.py,sha256=a_IoMV_wgeJyUifD4P6HTcM9s6FhricwmzQcZRmc-Gw,1411 +redis/commands/json/path.py,sha256=0zaO6_q_FVMk1Bkhkb7Wcr8AF2Tfr69VhkKy1IBVhpA,393 +redis/commands/redismodules.py,sha256=7TfVzLj319mhsA6WEybsOdIPk4pC-1hScJg3H5hv3T4,2454 +redis/commands/search/__init__.py,sha256=happQFVF0j7P87p7LQsUK5AK0kuem9cA-xvVRdQWpos,5744 +redis/commands/search/__pycache__/__init__.cpython-311.pyc,, +redis/commands/search/__pycache__/_util.cpython-311.pyc,, +redis/commands/search/__pycache__/aggregation.cpython-311.pyc,, +redis/commands/search/__pycache__/commands.cpython-311.pyc,, +redis/commands/search/__pycache__/document.cpython-311.pyc,, +redis/commands/search/__pycache__/field.cpython-311.pyc,, +redis/commands/search/__pycache__/indexDefinition.cpython-311.pyc,, +redis/commands/search/__pycache__/query.cpython-311.pyc,, +redis/commands/search/__pycache__/querystring.cpython-311.pyc,, +redis/commands/search/__pycache__/reducers.cpython-311.pyc,, +redis/commands/search/__pycache__/result.cpython-311.pyc,, +redis/commands/search/__pycache__/suggestion.cpython-311.pyc,, +redis/commands/search/_util.py,sha256=VAguSwh_3dNtJwNU6Vle2CNdPE10_NUkPffD7GWFX48,193 +redis/commands/search/aggregation.py,sha256=8yQ1P31Qiy29xehlmN2ToCh73e-MHmOg_y0_UXfQDS8,10772 +redis/commands/search/commands.py,sha256=dpSMZ7hXjbAlrUL4h5GX6BtP4WibQZCO6Ylfo8qkAF0,36751 +redis/commands/search/document.py,sha256=g2R-PRgq-jN33_GLXzavvse4cpIHBMfjPfPK7tnE9Gc,413 +redis/commands/search/field.py,sha256=0o1GAwcnN6rBrepB31_9zU9qqijyGopCfjy2H6F9iRU,4923 +redis/commands/search/indexDefinition.py,sha256=VL2CMzjxN0HEIaTn88evnHX1fCEmytbik4vAmiiYSC8,2489 +redis/commands/search/query.py,sha256=blBcgFnurT9rkg4gI6j14EekWU_J9e_aDlryVCCWDjM,11564 +redis/commands/search/querystring.py,sha256=dE577kOqkCErNgO-IXI4xFVHI8kQE-JiH5ZRI_CKjHE,7597 +redis/commands/search/reducers.py,sha256=Scceylx8BjyqS-TJOdhNW63n6tecL9ojt4U5Sqho5UY,4220 +redis/commands/search/result.py,sha256=4H7LnOVWScti7WO2XYxjhiTu3QNIt2pZHO1eptXZDBk,2149 +redis/commands/search/suggestion.py,sha256=V_re6suDCoNc0ETn_P1t51FeK4pCamPwxZRxCY8jscE,1612 +redis/commands/sentinel.py,sha256=hRcIQ9x9nEkdcCsJzo6Ves6vk-3tsfQqfJTT_v3oLY0,4110 +redis/commands/timeseries/__init__.py,sha256=gkz6wshEzzQQryBOnrAqqQzttS-AHfXmuN_H1J38EbM,3459 +redis/commands/timeseries/__pycache__/__init__.cpython-311.pyc,, +redis/commands/timeseries/__pycache__/commands.cpython-311.pyc,, +redis/commands/timeseries/__pycache__/info.cpython-311.pyc,, +redis/commands/timeseries/__pycache__/utils.cpython-311.pyc,, +redis/commands/timeseries/commands.py,sha256=BEzB4alqmVxx-SvCS6gm_oEmC-Y0SIHCgZ7Wla_lnhM,34143 +redis/commands/timeseries/info.py,sha256=5deBInBtLPb3ZrVoSB4EhWkRPkSIW5Qd_98rMDnutnk,3207 +redis/commands/timeseries/utils.py,sha256=o7q7Fe1wgpdTLKyGY8Qi2VV6XKEBprhzmPdrFz3OIvo,1309 +redis/compat.py,sha256=tr-t9oHdeosrK3TvZySaLvP3ZlGqTZQaXtlTqiqp_8I,242 +redis/connection.py,sha256=qO6ue4DyItkI5m2xQcAHYz6ivKwaoYKlYPNoorXsjXE,51921 +redis/crc.py,sha256=Z3kXFtkY2LdgefnQMud1xr4vG5UYvA9LCMqNMX1ywu4,729 +redis/credentials.py,sha256=6VvFeReFp6vernGIWlIVOm8OmbNgoFYdd1wgsjZTnlk,738 +redis/exceptions.py,sha256=xwIOiFPcv1CBuVwtMDY3BVOxwdSAcmS-asTXNlpOPPQ,5215 +redis/lock.py,sha256=3JOC3AmYJ10zbq0blOtV4uNwuEhw4K7xuJ6nM-qv5Ig,11976 +redis/ocsp.py,sha256=4b1s43x-DJ859zRKtwGTIbNys_dyGv5YyOdWnOvigyM,11451 +redis/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +redis/retry.py,sha256=Ssp9s2hhDfyRs0rCRCaTgRtLR7NAYO5QMw4QflourGo,1817 +redis/sentinel.py,sha256=CErsD-c3mYFnXDttCY1OvpyUdfKcyD5F9Jv9Fd3iHuU,14175 +redis/typing.py,sha256=wjyihEjyGiJrigcs0-zhy7K-MzVy7uLidjszNdPHMug,2212 +redis/utils.py,sha256=87p7ImnihyIhiaqalVYh9Qq9JeaVwi_Y4GBzNaHAXJg,3381 diff --git a/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/WHEEL b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/WHEEL new file mode 100644 index 00000000..bab98d67 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.43.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/top_level.txt b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/top_level.txt new file mode 100644 index 00000000..7800f0fa --- /dev/null +++ b/templates/skills/spotify/dependencies/redis-5.0.4.dist-info/top_level.txt @@ -0,0 +1 @@ +redis diff --git a/templates/skills/spotify/dependencies/redis/__init__.py b/templates/skills/spotify/dependencies/redis/__init__.py new file mode 100644 index 00000000..495d2d99 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/__init__.py @@ -0,0 +1,94 @@ +import sys + +from redis import asyncio # noqa +from redis.backoff import default_backoff +from redis.client import Redis, StrictRedis +from redis.cluster import RedisCluster +from redis.connection import ( + BlockingConnectionPool, + Connection, + ConnectionPool, + SSLConnection, + UnixDomainSocketConnection, +) +from redis.credentials import CredentialProvider, UsernamePasswordCredentialProvider +from redis.exceptions import ( + AuthenticationError, + AuthenticationWrongNumberOfArgsError, + BusyLoadingError, + ChildDeadlockedError, + ConnectionError, + DataError, + InvalidResponse, + OutOfMemoryError, + PubSubError, + ReadOnlyError, + RedisError, + ResponseError, + TimeoutError, + WatchError, +) +from redis.sentinel import ( + Sentinel, + SentinelConnectionPool, + SentinelManagedConnection, + SentinelManagedSSLConnection, +) +from redis.utils import from_url + +if sys.version_info >= (3, 8): + from importlib import metadata +else: + import importlib_metadata as metadata + + +def int_or_str(value): + try: + return int(value) + except ValueError: + return value + + +try: + __version__ = metadata.version("redis") +except metadata.PackageNotFoundError: + __version__ = "99.99.99" + + +try: + VERSION = tuple(map(int_or_str, __version__.split("."))) +except AttributeError: + VERSION = tuple([99, 99, 99]) + +__all__ = [ + "AuthenticationError", + "AuthenticationWrongNumberOfArgsError", + "BlockingConnectionPool", + "BusyLoadingError", + "ChildDeadlockedError", + "Connection", + "ConnectionError", + "ConnectionPool", + "CredentialProvider", + "DataError", + "from_url", + "default_backoff", + "InvalidResponse", + "OutOfMemoryError", + "PubSubError", + "ReadOnlyError", + "Redis", + "RedisCluster", + "RedisError", + "ResponseError", + "Sentinel", + "SentinelConnectionPool", + "SentinelManagedConnection", + "SentinelManagedSSLConnection", + "SSLConnection", + "UsernamePasswordCredentialProvider", + "StrictRedis", + "TimeoutError", + "UnixDomainSocketConnection", + "WatchError", +] diff --git a/templates/skills/spotify/dependencies/redis/_parsers/__init__.py b/templates/skills/spotify/dependencies/redis/_parsers/__init__.py new file mode 100644 index 00000000..6cc32e3c --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/__init__.py @@ -0,0 +1,20 @@ +from .base import BaseParser, _AsyncRESPBase +from .commands import AsyncCommandsParser, CommandsParser +from .encoders import Encoder +from .hiredis import _AsyncHiredisParser, _HiredisParser +from .resp2 import _AsyncRESP2Parser, _RESP2Parser +from .resp3 import _AsyncRESP3Parser, _RESP3Parser + +__all__ = [ + "AsyncCommandsParser", + "_AsyncHiredisParser", + "_AsyncRESPBase", + "_AsyncRESP2Parser", + "_AsyncRESP3Parser", + "CommandsParser", + "Encoder", + "BaseParser", + "_HiredisParser", + "_RESP2Parser", + "_RESP3Parser", +] diff --git a/templates/skills/spotify/dependencies/redis/_parsers/base.py b/templates/skills/spotify/dependencies/redis/_parsers/base.py new file mode 100644 index 00000000..8e59249b --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/base.py @@ -0,0 +1,225 @@ +import sys +from abc import ABC +from asyncio import IncompleteReadError, StreamReader, TimeoutError +from typing import List, Optional, Union + +if sys.version_info.major >= 3 and sys.version_info.minor >= 11: + from asyncio import timeout as async_timeout +else: + from async_timeout import timeout as async_timeout + +from ..exceptions import ( + AuthenticationError, + AuthenticationWrongNumberOfArgsError, + BusyLoadingError, + ConnectionError, + ExecAbortError, + ModuleError, + NoPermissionError, + NoScriptError, + OutOfMemoryError, + ReadOnlyError, + RedisError, + ResponseError, +) +from ..typing import EncodableT +from .encoders import Encoder +from .socket import SERVER_CLOSED_CONNECTION_ERROR, SocketBuffer + +MODULE_LOAD_ERROR = "Error loading the extension. " "Please check the server logs." +NO_SUCH_MODULE_ERROR = "Error unloading module: no such module with that name" +MODULE_UNLOAD_NOT_POSSIBLE_ERROR = "Error unloading module: operation not " "possible." +MODULE_EXPORTS_DATA_TYPES_ERROR = ( + "Error unloading module: the module " + "exports one or more module-side data " + "types, can't unload" +) +# user send an AUTH cmd to a server without authorization configured +NO_AUTH_SET_ERROR = { + # Redis >= 6.0 + "AUTH called without any password " + "configured for the default user. Are you sure " + "your configuration is correct?": AuthenticationError, + # Redis < 6.0 + "Client sent AUTH, but no password is set": AuthenticationError, +} + + +class BaseParser(ABC): + EXCEPTION_CLASSES = { + "ERR": { + "max number of clients reached": ConnectionError, + "invalid password": AuthenticationError, + # some Redis server versions report invalid command syntax + # in lowercase + "wrong number of arguments " + "for 'auth' command": AuthenticationWrongNumberOfArgsError, + # some Redis server versions report invalid command syntax + # in uppercase + "wrong number of arguments " + "for 'AUTH' command": AuthenticationWrongNumberOfArgsError, + MODULE_LOAD_ERROR: ModuleError, + MODULE_EXPORTS_DATA_TYPES_ERROR: ModuleError, + NO_SUCH_MODULE_ERROR: ModuleError, + MODULE_UNLOAD_NOT_POSSIBLE_ERROR: ModuleError, + **NO_AUTH_SET_ERROR, + }, + "OOM": OutOfMemoryError, + "WRONGPASS": AuthenticationError, + "EXECABORT": ExecAbortError, + "LOADING": BusyLoadingError, + "NOSCRIPT": NoScriptError, + "READONLY": ReadOnlyError, + "NOAUTH": AuthenticationError, + "NOPERM": NoPermissionError, + } + + @classmethod + def parse_error(cls, response): + "Parse an error response" + error_code = response.split(" ")[0] + if error_code in cls.EXCEPTION_CLASSES: + response = response[len(error_code) + 1 :] + exception_class = cls.EXCEPTION_CLASSES[error_code] + if isinstance(exception_class, dict): + exception_class = exception_class.get(response, ResponseError) + return exception_class(response) + return ResponseError(response) + + def on_disconnect(self): + raise NotImplementedError() + + def on_connect(self, connection): + raise NotImplementedError() + + +class _RESPBase(BaseParser): + """Base class for sync-based resp parsing""" + + def __init__(self, socket_read_size): + self.socket_read_size = socket_read_size + self.encoder = None + self._sock = None + self._buffer = None + + def __del__(self): + try: + self.on_disconnect() + except Exception: + pass + + def on_connect(self, connection): + "Called when the socket connects" + self._sock = connection._sock + self._buffer = SocketBuffer( + self._sock, self.socket_read_size, connection.socket_timeout + ) + self.encoder = connection.encoder + + def on_disconnect(self): + "Called when the socket disconnects" + self._sock = None + if self._buffer is not None: + self._buffer.close() + self._buffer = None + self.encoder = None + + def can_read(self, timeout): + return self._buffer and self._buffer.can_read(timeout) + + +class AsyncBaseParser(BaseParser): + """Base parsing class for the python-backed async parser""" + + __slots__ = "_stream", "_read_size" + + def __init__(self, socket_read_size: int): + self._stream: Optional[StreamReader] = None + self._read_size = socket_read_size + + async def can_read_destructive(self) -> bool: + raise NotImplementedError() + + async def read_response( + self, disable_decoding: bool = False + ) -> Union[EncodableT, ResponseError, None, List[EncodableT]]: + raise NotImplementedError() + + +class _AsyncRESPBase(AsyncBaseParser): + """Base class for async resp parsing""" + + __slots__ = AsyncBaseParser.__slots__ + ("encoder", "_buffer", "_pos", "_chunks") + + def __init__(self, socket_read_size: int): + super().__init__(socket_read_size) + self.encoder: Optional[Encoder] = None + self._buffer = b"" + self._chunks = [] + self._pos = 0 + + def _clear(self): + self._buffer = b"" + self._chunks.clear() + + def on_connect(self, connection): + """Called when the stream connects""" + self._stream = connection._reader + if self._stream is None: + raise RedisError("Buffer is closed.") + self.encoder = connection.encoder + self._clear() + self._connected = True + + def on_disconnect(self): + """Called when the stream disconnects""" + self._connected = False + + async def can_read_destructive(self) -> bool: + if not self._connected: + raise RedisError("Buffer is closed.") + if self._buffer: + return True + try: + async with async_timeout(0): + return await self._stream.read(1) + except TimeoutError: + return False + + async def _read(self, length: int) -> bytes: + """ + Read `length` bytes of data. These are assumed to be followed + by a '\r\n' terminator which is subsequently discarded. + """ + want = length + 2 + end = self._pos + want + if len(self._buffer) >= end: + result = self._buffer[self._pos : end - 2] + else: + tail = self._buffer[self._pos :] + try: + data = await self._stream.readexactly(want - len(tail)) + except IncompleteReadError as error: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from error + result = (tail + data)[:-2] + self._chunks.append(data) + self._pos += want + return result + + async def _readline(self) -> bytes: + """ + read an unknown number of bytes up to the next '\r\n' + line separator, which is discarded. + """ + found = self._buffer.find(b"\r\n", self._pos) + if found >= 0: + result = self._buffer[self._pos : found] + else: + tail = self._buffer[self._pos :] + data = await self._stream.readline() + if not data.endswith(b"\r\n"): + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + result = (tail + data)[:-2] + self._chunks.append(data) + self._pos += len(result) + 2 + return result diff --git a/templates/skills/spotify/dependencies/redis/_parsers/commands.py b/templates/skills/spotify/dependencies/redis/_parsers/commands.py new file mode 100644 index 00000000..b5109252 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/commands.py @@ -0,0 +1,281 @@ +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union + +from redis.exceptions import RedisError, ResponseError +from redis.utils import str_if_bytes + +if TYPE_CHECKING: + from redis.asyncio.cluster import ClusterNode + + +class AbstractCommandsParser: + def _get_pubsub_keys(self, *args): + """ + Get the keys from pubsub command. + Although PubSub commands have predetermined key locations, they are not + supported in the 'COMMAND's output, so the key positions are hardcoded + in this method + """ + if len(args) < 2: + # The command has no keys in it + return None + args = [str_if_bytes(arg) for arg in args] + command = args[0].upper() + keys = None + if command == "PUBSUB": + # the second argument is a part of the command name, e.g. + # ['PUBSUB', 'NUMSUB', 'foo']. + pubsub_type = args[1].upper() + if pubsub_type in ["CHANNELS", "NUMSUB", "SHARDCHANNELS", "SHARDNUMSUB"]: + keys = args[2:] + elif command in ["SUBSCRIBE", "PSUBSCRIBE", "UNSUBSCRIBE", "PUNSUBSCRIBE"]: + # format example: + # SUBSCRIBE channel [channel ...] + keys = list(args[1:]) + elif command in ["PUBLISH", "SPUBLISH"]: + # format example: + # PUBLISH channel message + keys = [args[1]] + return keys + + def parse_subcommand(self, command, **options): + cmd_dict = {} + cmd_name = str_if_bytes(command[0]) + cmd_dict["name"] = cmd_name + cmd_dict["arity"] = int(command[1]) + cmd_dict["flags"] = [str_if_bytes(flag) for flag in command[2]] + cmd_dict["first_key_pos"] = command[3] + cmd_dict["last_key_pos"] = command[4] + cmd_dict["step_count"] = command[5] + if len(command) > 7: + cmd_dict["tips"] = command[7] + cmd_dict["key_specifications"] = command[8] + cmd_dict["subcommands"] = command[9] + return cmd_dict + + +class CommandsParser(AbstractCommandsParser): + """ + Parses Redis commands to get command keys. + COMMAND output is used to determine key locations. + Commands that do not have a predefined key location are flagged with + 'movablekeys', and these commands' keys are determined by the command + 'COMMAND GETKEYS'. + """ + + def __init__(self, redis_connection): + self.commands = {} + self.initialize(redis_connection) + + def initialize(self, r): + commands = r.command() + uppercase_commands = [] + for cmd in commands: + if any(x.isupper() for x in cmd): + uppercase_commands.append(cmd) + for cmd in uppercase_commands: + commands[cmd.lower()] = commands.pop(cmd) + self.commands = commands + + # As soon as this PR is merged into Redis, we should reimplement + # our logic to use COMMAND INFO changes to determine the key positions + # https://github.com/redis/redis/pull/8324 + def get_keys(self, redis_conn, *args): + """ + Get the keys from the passed command. + + NOTE: Due to a bug in redis<7.0, this function does not work properly + for EVAL or EVALSHA when the `numkeys` arg is 0. + - issue: https://github.com/redis/redis/issues/9493 + - fix: https://github.com/redis/redis/pull/9733 + + So, don't use this function with EVAL or EVALSHA. + """ + if len(args) < 2: + # The command has no keys in it + return None + + cmd_name = args[0].lower() + if cmd_name not in self.commands: + # try to split the command name and to take only the main command, + # e.g. 'memory' for 'memory usage' + cmd_name_split = cmd_name.split() + cmd_name = cmd_name_split[0] + if cmd_name in self.commands: + # save the splitted command to args + args = cmd_name_split + list(args[1:]) + else: + # We'll try to reinitialize the commands cache, if the engine + # version has changed, the commands may not be current + self.initialize(redis_conn) + if cmd_name not in self.commands: + raise RedisError( + f"{cmd_name.upper()} command doesn't exist in Redis commands" + ) + + command = self.commands.get(cmd_name) + if "movablekeys" in command["flags"]: + keys = self._get_moveable_keys(redis_conn, *args) + elif "pubsub" in command["flags"] or command["name"] == "pubsub": + keys = self._get_pubsub_keys(*args) + else: + if ( + command["step_count"] == 0 + and command["first_key_pos"] == 0 + and command["last_key_pos"] == 0 + ): + is_subcmd = False + if "subcommands" in command: + subcmd_name = f"{cmd_name}|{args[1].lower()}" + for subcmd in command["subcommands"]: + if str_if_bytes(subcmd[0]) == subcmd_name: + command = self.parse_subcommand(subcmd) + is_subcmd = True + + # The command doesn't have keys in it + if not is_subcmd: + return None + last_key_pos = command["last_key_pos"] + if last_key_pos < 0: + last_key_pos = len(args) - abs(last_key_pos) + keys_pos = list( + range(command["first_key_pos"], last_key_pos + 1, command["step_count"]) + ) + keys = [args[pos] for pos in keys_pos] + + return keys + + def _get_moveable_keys(self, redis_conn, *args): + """ + NOTE: Due to a bug in redis<7.0, this function does not work properly + for EVAL or EVALSHA when the `numkeys` arg is 0. + - issue: https://github.com/redis/redis/issues/9493 + - fix: https://github.com/redis/redis/pull/9733 + + So, don't use this function with EVAL or EVALSHA. + """ + # The command name should be splitted into separate arguments, + # e.g. 'MEMORY USAGE' will be splitted into ['MEMORY', 'USAGE'] + pieces = args[0].split() + list(args[1:]) + try: + keys = redis_conn.execute_command("COMMAND GETKEYS", *pieces) + except ResponseError as e: + message = e.__str__() + if ( + "Invalid arguments" in message + or "The command has no key arguments" in message + ): + return None + else: + raise e + return keys + + +class AsyncCommandsParser(AbstractCommandsParser): + """ + Parses Redis commands to get command keys. + + COMMAND output is used to determine key locations. + Commands that do not have a predefined key location are flagged with 'movablekeys', + and these commands' keys are determined by the command 'COMMAND GETKEYS'. + + NOTE: Due to a bug in redis<7.0, this does not work properly + for EVAL or EVALSHA when the `numkeys` arg is 0. + - issue: https://github.com/redis/redis/issues/9493 + - fix: https://github.com/redis/redis/pull/9733 + + So, don't use this with EVAL or EVALSHA. + """ + + __slots__ = ("commands", "node") + + def __init__(self) -> None: + self.commands: Dict[str, Union[int, Dict[str, Any]]] = {} + + async def initialize(self, node: Optional["ClusterNode"] = None) -> None: + if node: + self.node = node + + commands = await self.node.execute_command("COMMAND") + self.commands = {cmd.lower(): command for cmd, command in commands.items()} + + # As soon as this PR is merged into Redis, we should reimplement + # our logic to use COMMAND INFO changes to determine the key positions + # https://github.com/redis/redis/pull/8324 + async def get_keys(self, *args: Any) -> Optional[Tuple[str, ...]]: + """ + Get the keys from the passed command. + + NOTE: Due to a bug in redis<7.0, this function does not work properly + for EVAL or EVALSHA when the `numkeys` arg is 0. + - issue: https://github.com/redis/redis/issues/9493 + - fix: https://github.com/redis/redis/pull/9733 + + So, don't use this function with EVAL or EVALSHA. + """ + if len(args) < 2: + # The command has no keys in it + return None + + cmd_name = args[0].lower() + if cmd_name not in self.commands: + # try to split the command name and to take only the main command, + # e.g. 'memory' for 'memory usage' + cmd_name_split = cmd_name.split() + cmd_name = cmd_name_split[0] + if cmd_name in self.commands: + # save the splitted command to args + args = cmd_name_split + list(args[1:]) + else: + # We'll try to reinitialize the commands cache, if the engine + # version has changed, the commands may not be current + await self.initialize() + if cmd_name not in self.commands: + raise RedisError( + f"{cmd_name.upper()} command doesn't exist in Redis commands" + ) + + command = self.commands.get(cmd_name) + if "movablekeys" in command["flags"]: + keys = await self._get_moveable_keys(*args) + elif "pubsub" in command["flags"] or command["name"] == "pubsub": + keys = self._get_pubsub_keys(*args) + else: + if ( + command["step_count"] == 0 + and command["first_key_pos"] == 0 + and command["last_key_pos"] == 0 + ): + is_subcmd = False + if "subcommands" in command: + subcmd_name = f"{cmd_name}|{args[1].lower()}" + for subcmd in command["subcommands"]: + if str_if_bytes(subcmd[0]) == subcmd_name: + command = self.parse_subcommand(subcmd) + is_subcmd = True + + # The command doesn't have keys in it + if not is_subcmd: + return None + last_key_pos = command["last_key_pos"] + if last_key_pos < 0: + last_key_pos = len(args) - abs(last_key_pos) + keys_pos = list( + range(command["first_key_pos"], last_key_pos + 1, command["step_count"]) + ) + keys = [args[pos] for pos in keys_pos] + + return keys + + async def _get_moveable_keys(self, *args: Any) -> Optional[Tuple[str, ...]]: + try: + keys = await self.node.execute_command("COMMAND GETKEYS", *args) + except ResponseError as e: + message = e.__str__() + if ( + "Invalid arguments" in message + or "The command has no key arguments" in message + ): + return None + else: + raise e + return keys diff --git a/templates/skills/spotify/dependencies/redis/_parsers/encoders.py b/templates/skills/spotify/dependencies/redis/_parsers/encoders.py new file mode 100644 index 00000000..6fdf0ad8 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/encoders.py @@ -0,0 +1,44 @@ +from ..exceptions import DataError + + +class Encoder: + "Encode strings to bytes-like and decode bytes-like to strings" + + __slots__ = "encoding", "encoding_errors", "decode_responses" + + def __init__(self, encoding, encoding_errors, decode_responses): + self.encoding = encoding + self.encoding_errors = encoding_errors + self.decode_responses = decode_responses + + def encode(self, value): + "Return a bytestring or bytes-like representation of the value" + if isinstance(value, (bytes, memoryview)): + return value + elif isinstance(value, bool): + # special case bool since it is a subclass of int + raise DataError( + "Invalid input of type: 'bool'. Convert to a " + "bytes, string, int or float first." + ) + elif isinstance(value, (int, float)): + value = repr(value).encode() + elif not isinstance(value, str): + # a value we don't know how to deal with. throw an error + typename = type(value).__name__ + raise DataError( + f"Invalid input of type: '{typename}'. " + f"Convert to a bytes, string, int or float first." + ) + if isinstance(value, str): + value = value.encode(self.encoding, self.encoding_errors) + return value + + def decode(self, value, force=False): + "Return a unicode string from the bytes-like representation" + if self.decode_responses or force: + if isinstance(value, memoryview): + value = value.tobytes() + if isinstance(value, bytes): + value = value.decode(self.encoding, self.encoding_errors) + return value diff --git a/templates/skills/spotify/dependencies/redis/_parsers/helpers.py b/templates/skills/spotify/dependencies/redis/_parsers/helpers.py new file mode 100644 index 00000000..bdd749a5 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/helpers.py @@ -0,0 +1,852 @@ +import datetime + +from redis.utils import str_if_bytes + + +def timestamp_to_datetime(response): + "Converts a unix timestamp to a Python datetime object" + if not response: + return None + try: + response = int(response) + except ValueError: + return None + return datetime.datetime.fromtimestamp(response) + + +def parse_debug_object(response): + "Parse the results of Redis's DEBUG OBJECT command into a Python dict" + # The 'type' of the object is the first item in the response, but isn't + # prefixed with a name + response = str_if_bytes(response) + response = "type:" + response + response = dict(kv.split(":") for kv in response.split()) + + # parse some expected int values from the string response + # note: this cmd isn't spec'd so these may not appear in all redis versions + int_fields = ("refcount", "serializedlength", "lru", "lru_seconds_idle") + for field in int_fields: + if field in response: + response[field] = int(response[field]) + + return response + + +def parse_info(response): + """Parse the result of Redis's INFO command into a Python dict""" + info = {} + response = str_if_bytes(response) + + def get_value(value): + if "," not in value or "=" not in value: + try: + if "." in value: + return float(value) + else: + return int(value) + except ValueError: + return value + else: + sub_dict = {} + for item in value.split(","): + k, v = item.rsplit("=", 1) + sub_dict[k] = get_value(v) + return sub_dict + + for line in response.splitlines(): + if line and not line.startswith("#"): + if line.find(":") != -1: + # Split, the info fields keys and values. + # Note that the value may contain ':'. but the 'host:' + # pseudo-command is the only case where the key contains ':' + key, value = line.split(":", 1) + if key == "cmdstat_host": + key, value = line.rsplit(":", 1) + + if key == "module": + # Hardcode a list for key 'modules' since there could be + # multiple lines that started with 'module' + info.setdefault("modules", []).append(get_value(value)) + else: + info[key] = get_value(value) + else: + # if the line isn't splittable, append it to the "__raw__" key + info.setdefault("__raw__", []).append(line) + + return info + + +def parse_memory_stats(response, **kwargs): + """Parse the results of MEMORY STATS""" + stats = pairs_to_dict(response, decode_keys=True, decode_string_values=True) + for key, value in stats.items(): + if key.startswith("db."): + stats[key] = pairs_to_dict( + value, decode_keys=True, decode_string_values=True + ) + return stats + + +SENTINEL_STATE_TYPES = { + "can-failover-its-master": int, + "config-epoch": int, + "down-after-milliseconds": int, + "failover-timeout": int, + "info-refresh": int, + "last-hello-message": int, + "last-ok-ping-reply": int, + "last-ping-reply": int, + "last-ping-sent": int, + "master-link-down-time": int, + "master-port": int, + "num-other-sentinels": int, + "num-slaves": int, + "o-down-time": int, + "pending-commands": int, + "parallel-syncs": int, + "port": int, + "quorum": int, + "role-reported-time": int, + "s-down-time": int, + "slave-priority": int, + "slave-repl-offset": int, + "voted-leader-epoch": int, +} + + +def parse_sentinel_state(item): + result = pairs_to_dict_typed(item, SENTINEL_STATE_TYPES) + flags = set(result["flags"].split(",")) + for name, flag in ( + ("is_master", "master"), + ("is_slave", "slave"), + ("is_sdown", "s_down"), + ("is_odown", "o_down"), + ("is_sentinel", "sentinel"), + ("is_disconnected", "disconnected"), + ("is_master_down", "master_down"), + ): + result[name] = flag in flags + return result + + +def parse_sentinel_master(response): + return parse_sentinel_state(map(str_if_bytes, response)) + + +def parse_sentinel_state_resp3(response): + result = {} + for key in response: + try: + value = SENTINEL_STATE_TYPES[key](str_if_bytes(response[key])) + result[str_if_bytes(key)] = value + except Exception: + result[str_if_bytes(key)] = response[str_if_bytes(key)] + flags = set(result["flags"].split(",")) + result["flags"] = flags + return result + + +def parse_sentinel_masters(response): + result = {} + for item in response: + state = parse_sentinel_state(map(str_if_bytes, item)) + result[state["name"]] = state + return result + + +def parse_sentinel_masters_resp3(response): + return [parse_sentinel_state(master) for master in response] + + +def parse_sentinel_slaves_and_sentinels(response): + return [parse_sentinel_state(map(str_if_bytes, item)) for item in response] + + +def parse_sentinel_slaves_and_sentinels_resp3(response): + return [parse_sentinel_state_resp3(item) for item in response] + + +def parse_sentinel_get_master(response): + return response and (response[0], int(response[1])) or None + + +def pairs_to_dict(response, decode_keys=False, decode_string_values=False): + """Create a dict given a list of key/value pairs""" + if response is None: + return {} + if decode_keys or decode_string_values: + # the iter form is faster, but I don't know how to make that work + # with a str_if_bytes() map + keys = response[::2] + if decode_keys: + keys = map(str_if_bytes, keys) + values = response[1::2] + if decode_string_values: + values = map(str_if_bytes, values) + return dict(zip(keys, values)) + else: + it = iter(response) + return dict(zip(it, it)) + + +def pairs_to_dict_typed(response, type_info): + it = iter(response) + result = {} + for key, value in zip(it, it): + if key in type_info: + try: + value = type_info[key](value) + except Exception: + # if for some reason the value can't be coerced, just use + # the string value + pass + result[key] = value + return result + + +def zset_score_pairs(response, **options): + """ + If ``withscores`` is specified in the options, return the response as + a list of (value, score) pairs + """ + if not response or not options.get("withscores"): + return response + score_cast_func = options.get("score_cast_func", float) + it = iter(response) + return list(zip(it, map(score_cast_func, it))) + + +def sort_return_tuples(response, **options): + """ + If ``groups`` is specified, return the response as a list of + n-element tuples with n being the value found in options['groups'] + """ + if not response or not options.get("groups"): + return response + n = options["groups"] + return list(zip(*[response[i::n] for i in range(n)])) + + +def parse_stream_list(response): + if response is None: + return None + data = [] + for r in response: + if r is not None: + data.append((r[0], pairs_to_dict(r[1]))) + else: + data.append((None, None)) + return data + + +def pairs_to_dict_with_str_keys(response): + return pairs_to_dict(response, decode_keys=True) + + +def parse_list_of_dicts(response): + return list(map(pairs_to_dict_with_str_keys, response)) + + +def parse_xclaim(response, **options): + if options.get("parse_justid", False): + return response + return parse_stream_list(response) + + +def parse_xautoclaim(response, **options): + if options.get("parse_justid", False): + return response[1] + response[1] = parse_stream_list(response[1]) + return response + + +def parse_xinfo_stream(response, **options): + if isinstance(response, list): + data = pairs_to_dict(response, decode_keys=True) + else: + data = {str_if_bytes(k): v for k, v in response.items()} + if not options.get("full", False): + first = data.get("first-entry") + if first is not None: + data["first-entry"] = (first[0], pairs_to_dict(first[1])) + last = data["last-entry"] + if last is not None: + data["last-entry"] = (last[0], pairs_to_dict(last[1])) + else: + data["entries"] = {_id: pairs_to_dict(entry) for _id, entry in data["entries"]} + if isinstance(data["groups"][0], list): + data["groups"] = [ + pairs_to_dict(group, decode_keys=True) for group in data["groups"] + ] + else: + data["groups"] = [ + {str_if_bytes(k): v for k, v in group.items()} + for group in data["groups"] + ] + return data + + +def parse_xread(response): + if response is None: + return [] + return [[r[0], parse_stream_list(r[1])] for r in response] + + +def parse_xread_resp3(response): + if response is None: + return {} + return {key: [parse_stream_list(value)] for key, value in response.items()} + + +def parse_xpending(response, **options): + if options.get("parse_detail", False): + return parse_xpending_range(response) + consumers = [{"name": n, "pending": int(p)} for n, p in response[3] or []] + return { + "pending": response[0], + "min": response[1], + "max": response[2], + "consumers": consumers, + } + + +def parse_xpending_range(response): + k = ("message_id", "consumer", "time_since_delivered", "times_delivered") + return [dict(zip(k, r)) for r in response] + + +def float_or_none(response): + if response is None: + return None + return float(response) + + +def bool_ok(response, **options): + return str_if_bytes(response) == "OK" + + +def parse_zadd(response, **options): + if response is None: + return None + if options.get("as_score"): + return float(response) + return int(response) + + +def parse_client_list(response, **options): + clients = [] + for c in str_if_bytes(response).splitlines(): + # Values might contain '=' + clients.append(dict(pair.split("=", 1) for pair in c.split(" "))) + return clients + + +def parse_config_get(response, **options): + response = [str_if_bytes(i) if i is not None else None for i in response] + return response and pairs_to_dict(response) or {} + + +def parse_scan(response, **options): + cursor, r = response + return int(cursor), r + + +def parse_hscan(response, **options): + cursor, r = response + return int(cursor), r and pairs_to_dict(r) or {} + + +def parse_zscan(response, **options): + score_cast_func = options.get("score_cast_func", float) + cursor, r = response + it = iter(r) + return int(cursor), list(zip(it, map(score_cast_func, it))) + + +def parse_zmscore(response, **options): + # zmscore: list of scores (double precision floating point number) or nil + return [float(score) if score is not None else None for score in response] + + +def parse_slowlog_get(response, **options): + space = " " if options.get("decode_responses", False) else b" " + + def parse_item(item): + result = {"id": item[0], "start_time": int(item[1]), "duration": int(item[2])} + # Redis Enterprise injects another entry at index [3], which has + # the complexity info (i.e. the value N in case the command has + # an O(N) complexity) instead of the command. + if isinstance(item[3], list): + result["command"] = space.join(item[3]) + result["client_address"] = item[4] + result["client_name"] = item[5] + else: + result["complexity"] = item[3] + result["command"] = space.join(item[4]) + result["client_address"] = item[5] + result["client_name"] = item[6] + return result + + return [parse_item(item) for item in response] + + +def parse_stralgo(response, **options): + """ + Parse the response from `STRALGO` command. + Without modifiers the returned value is string. + When LEN is given the command returns the length of the result + (i.e integer). + When IDX is given the command returns a dictionary with the LCS + length and all the ranges in both the strings, start and end + offset for each string, where there are matches. + When WITHMATCHLEN is given, each array representing a match will + also have the length of the match at the beginning of the array. + """ + if options.get("len", False): + return int(response) + if options.get("idx", False): + if options.get("withmatchlen", False): + matches = [ + [(int(match[-1]))] + list(map(tuple, match[:-1])) + for match in response[1] + ] + else: + matches = [list(map(tuple, match)) for match in response[1]] + return { + str_if_bytes(response[0]): matches, + str_if_bytes(response[2]): int(response[3]), + } + return str_if_bytes(response) + + +def parse_cluster_info(response, **options): + response = str_if_bytes(response) + return dict(line.split(":") for line in response.splitlines() if line) + + +def _parse_node_line(line): + line_items = line.split(" ") + node_id, addr, flags, master_id, ping, pong, epoch, connected = line.split(" ")[:8] + addr = addr.split("@")[0] + node_dict = { + "node_id": node_id, + "flags": flags, + "master_id": master_id, + "last_ping_sent": ping, + "last_pong_rcvd": pong, + "epoch": epoch, + "slots": [], + "migrations": [], + "connected": True if connected == "connected" else False, + } + if len(line_items) >= 9: + slots, migrations = _parse_slots(line_items[8:]) + node_dict["slots"], node_dict["migrations"] = slots, migrations + return addr, node_dict + + +def _parse_slots(slot_ranges): + slots, migrations = [], [] + for s_range in slot_ranges: + if "->-" in s_range: + slot_id, dst_node_id = s_range[1:-1].split("->-", 1) + migrations.append( + {"slot": slot_id, "node_id": dst_node_id, "state": "migrating"} + ) + elif "-<-" in s_range: + slot_id, src_node_id = s_range[1:-1].split("-<-", 1) + migrations.append( + {"slot": slot_id, "node_id": src_node_id, "state": "importing"} + ) + else: + s_range = [sl for sl in s_range.split("-")] + slots.append(s_range) + + return slots, migrations + + +def parse_cluster_nodes(response, **options): + """ + @see: https://redis.io/commands/cluster-nodes # string / bytes + @see: https://redis.io/commands/cluster-replicas # list of string / bytes + """ + if isinstance(response, (str, bytes)): + response = response.splitlines() + return dict(_parse_node_line(str_if_bytes(node)) for node in response) + + +def parse_geosearch_generic(response, **options): + """ + Parse the response of 'GEOSEARCH', GEORADIUS' and 'GEORADIUSBYMEMBER' + commands according to 'withdist', 'withhash' and 'withcoord' labels. + """ + try: + if options["store"] or options["store_dist"]: + # `store` and `store_dist` cant be combined + # with other command arguments. + # relevant to 'GEORADIUS' and 'GEORADIUSBYMEMBER' + return response + except KeyError: # it means the command was sent via execute_command + return response + + if type(response) != list: + response_list = [response] + else: + response_list = response + + if not options["withdist"] and not options["withcoord"] and not options["withhash"]: + # just a bunch of places + return response_list + + cast = { + "withdist": float, + "withcoord": lambda ll: (float(ll[0]), float(ll[1])), + "withhash": int, + } + + # zip all output results with each casting function to get + # the properly native Python value. + f = [lambda x: x] + f += [cast[o] for o in ["withdist", "withhash", "withcoord"] if options[o]] + return [list(map(lambda fv: fv[0](fv[1]), zip(f, r))) for r in response_list] + + +def parse_command(response, **options): + commands = {} + for command in response: + cmd_dict = {} + cmd_name = str_if_bytes(command[0]) + cmd_dict["name"] = cmd_name + cmd_dict["arity"] = int(command[1]) + cmd_dict["flags"] = [str_if_bytes(flag) for flag in command[2]] + cmd_dict["first_key_pos"] = command[3] + cmd_dict["last_key_pos"] = command[4] + cmd_dict["step_count"] = command[5] + if len(command) > 7: + cmd_dict["tips"] = command[7] + cmd_dict["key_specifications"] = command[8] + cmd_dict["subcommands"] = command[9] + commands[cmd_name] = cmd_dict + return commands + + +def parse_command_resp3(response, **options): + commands = {} + for command in response: + cmd_dict = {} + cmd_name = str_if_bytes(command[0]) + cmd_dict["name"] = cmd_name + cmd_dict["arity"] = command[1] + cmd_dict["flags"] = {str_if_bytes(flag) for flag in command[2]} + cmd_dict["first_key_pos"] = command[3] + cmd_dict["last_key_pos"] = command[4] + cmd_dict["step_count"] = command[5] + cmd_dict["acl_categories"] = command[6] + if len(command) > 7: + cmd_dict["tips"] = command[7] + cmd_dict["key_specifications"] = command[8] + cmd_dict["subcommands"] = command[9] + + commands[cmd_name] = cmd_dict + return commands + + +def parse_pubsub_numsub(response, **options): + return list(zip(response[0::2], response[1::2])) + + +def parse_client_kill(response, **options): + if isinstance(response, int): + return response + return str_if_bytes(response) == "OK" + + +def parse_acl_getuser(response, **options): + if response is None: + return None + if isinstance(response, list): + data = pairs_to_dict(response, decode_keys=True) + else: + data = {str_if_bytes(key): value for key, value in response.items()} + + # convert everything but user-defined data in 'keys' to native strings + data["flags"] = list(map(str_if_bytes, data["flags"])) + data["passwords"] = list(map(str_if_bytes, data["passwords"])) + data["commands"] = str_if_bytes(data["commands"]) + if isinstance(data["keys"], str) or isinstance(data["keys"], bytes): + data["keys"] = list(str_if_bytes(data["keys"]).split(" ")) + if data["keys"] == [""]: + data["keys"] = [] + if "channels" in data: + if isinstance(data["channels"], str) or isinstance(data["channels"], bytes): + data["channels"] = list(str_if_bytes(data["channels"]).split(" ")) + if data["channels"] == [""]: + data["channels"] = [] + if "selectors" in data: + if data["selectors"] != [] and isinstance(data["selectors"][0], list): + data["selectors"] = [ + list(map(str_if_bytes, selector)) for selector in data["selectors"] + ] + elif data["selectors"] != []: + data["selectors"] = [ + {str_if_bytes(k): str_if_bytes(v) for k, v in selector.items()} + for selector in data["selectors"] + ] + + # split 'commands' into separate 'categories' and 'commands' lists + commands, categories = [], [] + for command in data["commands"].split(" "): + categories.append(command) if "@" in command else commands.append(command) + + data["commands"] = commands + data["categories"] = categories + data["enabled"] = "on" in data["flags"] + return data + + +def parse_acl_log(response, **options): + if response is None: + return None + if isinstance(response, list): + data = [] + for log in response: + log_data = pairs_to_dict(log, True, True) + client_info = log_data.get("client-info", "") + log_data["client-info"] = parse_client_info(client_info) + + # float() is lossy comparing to the "double" in C + log_data["age-seconds"] = float(log_data["age-seconds"]) + data.append(log_data) + else: + data = bool_ok(response) + return data + + +def parse_client_info(value): + """ + Parsing client-info in ACL Log in following format. + "key1=value1 key2=value2 key3=value3" + """ + client_info = {} + for info in str_if_bytes(value).strip().split(): + key, value = info.split("=") + client_info[key] = value + + # Those fields are defined as int in networking.c + for int_key in { + "id", + "age", + "idle", + "db", + "sub", + "psub", + "multi", + "qbuf", + "qbuf-free", + "obl", + "argv-mem", + "oll", + "omem", + "tot-mem", + }: + client_info[int_key] = int(client_info[int_key]) + return client_info + + +def parse_set_result(response, **options): + """ + Handle SET result since GET argument is available since Redis 6.2. + Parsing SET result into: + - BOOL + - String when GET argument is used + """ + if options.get("get"): + # Redis will return a getCommand result. + # See `setGenericCommand` in t_string.c + return response + return response and str_if_bytes(response) == "OK" + + +def string_keys_to_dict(key_string, callback): + return dict.fromkeys(key_string.split(), callback) + + +_RedisCallbacks = { + **string_keys_to_dict( + "AUTH COPY EXPIRE EXPIREAT HEXISTS HMSET MOVE MSETNX PERSIST PSETEX " + "PEXPIRE PEXPIREAT RENAMENX SETEX SETNX SMOVE", + bool, + ), + **string_keys_to_dict("HINCRBYFLOAT INCRBYFLOAT", float), + **string_keys_to_dict( + "ASKING FLUSHALL FLUSHDB LSET LTRIM MSET PFMERGE READONLY READWRITE " + "RENAME SAVE SELECT SHUTDOWN SLAVEOF SWAPDB WATCH UNWATCH", + bool_ok, + ), + **string_keys_to_dict("XREAD XREADGROUP", parse_xread), + **string_keys_to_dict( + "GEORADIUS GEORADIUSBYMEMBER GEOSEARCH", + parse_geosearch_generic, + ), + **string_keys_to_dict("XRANGE XREVRANGE", parse_stream_list), + "ACL GETUSER": parse_acl_getuser, + "ACL LOAD": bool_ok, + "ACL LOG": parse_acl_log, + "ACL SETUSER": bool_ok, + "ACL SAVE": bool_ok, + "CLIENT INFO": parse_client_info, + "CLIENT KILL": parse_client_kill, + "CLIENT LIST": parse_client_list, + "CLIENT PAUSE": bool_ok, + "CLIENT SETINFO": bool_ok, + "CLIENT SETNAME": bool_ok, + "CLIENT UNBLOCK": bool, + "CLUSTER ADDSLOTS": bool_ok, + "CLUSTER ADDSLOTSRANGE": bool_ok, + "CLUSTER DELSLOTS": bool_ok, + "CLUSTER DELSLOTSRANGE": bool_ok, + "CLUSTER FAILOVER": bool_ok, + "CLUSTER FORGET": bool_ok, + "CLUSTER INFO": parse_cluster_info, + "CLUSTER MEET": bool_ok, + "CLUSTER NODES": parse_cluster_nodes, + "CLUSTER REPLICAS": parse_cluster_nodes, + "CLUSTER REPLICATE": bool_ok, + "CLUSTER RESET": bool_ok, + "CLUSTER SAVECONFIG": bool_ok, + "CLUSTER SET-CONFIG-EPOCH": bool_ok, + "CLUSTER SETSLOT": bool_ok, + "CLUSTER SLAVES": parse_cluster_nodes, + "COMMAND": parse_command, + "CONFIG RESETSTAT": bool_ok, + "CONFIG SET": bool_ok, + "FUNCTION DELETE": bool_ok, + "FUNCTION FLUSH": bool_ok, + "FUNCTION RESTORE": bool_ok, + "GEODIST": float_or_none, + "HSCAN": parse_hscan, + "INFO": parse_info, + "LASTSAVE": timestamp_to_datetime, + "MEMORY PURGE": bool_ok, + "MODULE LOAD": bool, + "MODULE UNLOAD": bool, + "PING": lambda r: str_if_bytes(r) == "PONG", + "PUBSUB NUMSUB": parse_pubsub_numsub, + "PUBSUB SHARDNUMSUB": parse_pubsub_numsub, + "QUIT": bool_ok, + "SET": parse_set_result, + "SCAN": parse_scan, + "SCRIPT EXISTS": lambda r: list(map(bool, r)), + "SCRIPT FLUSH": bool_ok, + "SCRIPT KILL": bool_ok, + "SCRIPT LOAD": str_if_bytes, + "SENTINEL CKQUORUM": bool_ok, + "SENTINEL FAILOVER": bool_ok, + "SENTINEL FLUSHCONFIG": bool_ok, + "SENTINEL GET-MASTER-ADDR-BY-NAME": parse_sentinel_get_master, + "SENTINEL MONITOR": bool_ok, + "SENTINEL RESET": bool_ok, + "SENTINEL REMOVE": bool_ok, + "SENTINEL SET": bool_ok, + "SLOWLOG GET": parse_slowlog_get, + "SLOWLOG RESET": bool_ok, + "SORT": sort_return_tuples, + "SSCAN": parse_scan, + "TIME": lambda x: (int(x[0]), int(x[1])), + "XAUTOCLAIM": parse_xautoclaim, + "XCLAIM": parse_xclaim, + "XGROUP CREATE": bool_ok, + "XGROUP DESTROY": bool, + "XGROUP SETID": bool_ok, + "XINFO STREAM": parse_xinfo_stream, + "XPENDING": parse_xpending, + "ZSCAN": parse_zscan, +} + + +_RedisCallbacksRESP2 = { + **string_keys_to_dict( + "SDIFF SINTER SMEMBERS SUNION", lambda r: r and set(r) or set() + ), + **string_keys_to_dict( + "ZDIFF ZINTER ZPOPMAX ZPOPMIN ZRANGE ZRANGEBYSCORE ZRANK ZREVRANGE " + "ZREVRANGEBYSCORE ZREVRANK ZUNION", + zset_score_pairs, + ), + **string_keys_to_dict("ZINCRBY ZSCORE", float_or_none), + **string_keys_to_dict("BGREWRITEAOF BGSAVE", lambda r: True), + **string_keys_to_dict("BLPOP BRPOP", lambda r: r and tuple(r) or None), + **string_keys_to_dict( + "BZPOPMAX BZPOPMIN", lambda r: r and (r[0], r[1], float(r[2])) or None + ), + "ACL CAT": lambda r: list(map(str_if_bytes, r)), + "ACL GENPASS": str_if_bytes, + "ACL HELP": lambda r: list(map(str_if_bytes, r)), + "ACL LIST": lambda r: list(map(str_if_bytes, r)), + "ACL USERS": lambda r: list(map(str_if_bytes, r)), + "ACL WHOAMI": str_if_bytes, + "CLIENT GETNAME": str_if_bytes, + "CLIENT TRACKINGINFO": lambda r: list(map(str_if_bytes, r)), + "CLUSTER GETKEYSINSLOT": lambda r: list(map(str_if_bytes, r)), + "COMMAND GETKEYS": lambda r: list(map(str_if_bytes, r)), + "CONFIG GET": parse_config_get, + "DEBUG OBJECT": parse_debug_object, + "GEOHASH": lambda r: list(map(str_if_bytes, r)), + "GEOPOS": lambda r: list( + map(lambda ll: (float(ll[0]), float(ll[1])) if ll is not None else None, r) + ), + "HGETALL": lambda r: r and pairs_to_dict(r) or {}, + "MEMORY STATS": parse_memory_stats, + "MODULE LIST": lambda r: [pairs_to_dict(m) for m in r], + "RESET": str_if_bytes, + "SENTINEL MASTER": parse_sentinel_master, + "SENTINEL MASTERS": parse_sentinel_masters, + "SENTINEL SENTINELS": parse_sentinel_slaves_and_sentinels, + "SENTINEL SLAVES": parse_sentinel_slaves_and_sentinels, + "STRALGO": parse_stralgo, + "XINFO CONSUMERS": parse_list_of_dicts, + "XINFO GROUPS": parse_list_of_dicts, + "ZADD": parse_zadd, + "ZMSCORE": parse_zmscore, +} + + +_RedisCallbacksRESP3 = { + **string_keys_to_dict( + "ZRANGE ZINTER ZPOPMAX ZPOPMIN ZRANGEBYSCORE ZREVRANGE ZREVRANGEBYSCORE " + "ZUNION HGETALL XREADGROUP", + lambda r, **kwargs: r, + ), + **string_keys_to_dict("XREAD XREADGROUP", parse_xread_resp3), + "ACL LOG": lambda r: [ + {str_if_bytes(key): str_if_bytes(value) for key, value in x.items()} for x in r + ] + if isinstance(r, list) + else bool_ok(r), + "COMMAND": parse_command_resp3, + "CONFIG GET": lambda r: { + str_if_bytes(key) + if key is not None + else None: str_if_bytes(value) + if value is not None + else None + for key, value in r.items() + }, + "MEMORY STATS": lambda r: {str_if_bytes(key): value for key, value in r.items()}, + "SENTINEL MASTER": parse_sentinel_state_resp3, + "SENTINEL MASTERS": parse_sentinel_masters_resp3, + "SENTINEL SENTINELS": parse_sentinel_slaves_and_sentinels_resp3, + "SENTINEL SLAVES": parse_sentinel_slaves_and_sentinels_resp3, + "STRALGO": lambda r, **options: { + str_if_bytes(key): str_if_bytes(value) for key, value in r.items() + } + if isinstance(r, dict) + else str_if_bytes(r), + "XINFO CONSUMERS": lambda r: [ + {str_if_bytes(key): value for key, value in x.items()} for x in r + ], + "XINFO GROUPS": lambda r: [ + {str_if_bytes(key): value for key, value in d.items()} for d in r + ], +} diff --git a/templates/skills/spotify/dependencies/redis/_parsers/hiredis.py b/templates/skills/spotify/dependencies/redis/_parsers/hiredis.py new file mode 100644 index 00000000..1919d365 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/hiredis.py @@ -0,0 +1,223 @@ +import asyncio +import socket +import sys +from typing import Callable, List, Optional, Union + +if sys.version_info.major >= 3 and sys.version_info.minor >= 11: + from asyncio import timeout as async_timeout +else: + from async_timeout import timeout as async_timeout + +from redis.compat import TypedDict + +from ..exceptions import ConnectionError, InvalidResponse, RedisError +from ..typing import EncodableT +from ..utils import HIREDIS_AVAILABLE +from .base import AsyncBaseParser, BaseParser +from .socket import ( + NONBLOCKING_EXCEPTION_ERROR_NUMBERS, + NONBLOCKING_EXCEPTIONS, + SENTINEL, + SERVER_CLOSED_CONNECTION_ERROR, +) + + +class _HiredisReaderArgs(TypedDict, total=False): + protocolError: Callable[[str], Exception] + replyError: Callable[[str], Exception] + encoding: Optional[str] + errors: Optional[str] + + +class _HiredisParser(BaseParser): + "Parser class for connections using Hiredis" + + def __init__(self, socket_read_size): + if not HIREDIS_AVAILABLE: + raise RedisError("Hiredis is not installed") + self.socket_read_size = socket_read_size + self._buffer = bytearray(socket_read_size) + + def __del__(self): + try: + self.on_disconnect() + except Exception: + pass + + def on_connect(self, connection, **kwargs): + import hiredis + + self._sock = connection._sock + self._socket_timeout = connection.socket_timeout + kwargs = { + "protocolError": InvalidResponse, + "replyError": self.parse_error, + "errors": connection.encoder.encoding_errors, + } + + if connection.encoder.decode_responses: + kwargs["encoding"] = connection.encoder.encoding + self._reader = hiredis.Reader(**kwargs) + self._next_response = False + + def on_disconnect(self): + self._sock = None + self._reader = None + self._next_response = False + + def can_read(self, timeout): + if not self._reader: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + + if self._next_response is False: + self._next_response = self._reader.gets() + if self._next_response is False: + return self.read_from_socket(timeout=timeout, raise_on_timeout=False) + return True + + def read_from_socket(self, timeout=SENTINEL, raise_on_timeout=True): + sock = self._sock + custom_timeout = timeout is not SENTINEL + try: + if custom_timeout: + sock.settimeout(timeout) + bufflen = self._sock.recv_into(self._buffer) + if bufflen == 0: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + self._reader.feed(self._buffer, 0, bufflen) + # data was read from the socket and added to the buffer. + # return True to indicate that data was read. + return True + except socket.timeout: + if raise_on_timeout: + raise TimeoutError("Timeout reading from socket") + return False + except NONBLOCKING_EXCEPTIONS as ex: + # if we're in nonblocking mode and the recv raises a + # blocking error, simply return False indicating that + # there's no data to be read. otherwise raise the + # original exception. + allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1) + if not raise_on_timeout and ex.errno == allowed: + return False + raise ConnectionError(f"Error while reading from socket: {ex.args}") + finally: + if custom_timeout: + sock.settimeout(self._socket_timeout) + + def read_response(self, disable_decoding=False): + if not self._reader: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + + # _next_response might be cached from a can_read() call + if self._next_response is not False: + response = self._next_response + self._next_response = False + return response + + if disable_decoding: + response = self._reader.gets(False) + else: + response = self._reader.gets() + + while response is False: + self.read_from_socket() + if disable_decoding: + response = self._reader.gets(False) + else: + response = self._reader.gets() + # if the response is a ConnectionError or the response is a list and + # the first item is a ConnectionError, raise it as something bad + # happened + if isinstance(response, ConnectionError): + raise response + elif ( + isinstance(response, list) + and response + and isinstance(response[0], ConnectionError) + ): + raise response[0] + return response + + +class _AsyncHiredisParser(AsyncBaseParser): + """Async implementation of parser class for connections using Hiredis""" + + __slots__ = ("_reader",) + + def __init__(self, socket_read_size: int): + if not HIREDIS_AVAILABLE: + raise RedisError("Hiredis is not available.") + super().__init__(socket_read_size=socket_read_size) + self._reader = None + + def on_connect(self, connection): + import hiredis + + self._stream = connection._reader + kwargs: _HiredisReaderArgs = { + "protocolError": InvalidResponse, + "replyError": self.parse_error, + } + if connection.encoder.decode_responses: + kwargs["encoding"] = connection.encoder.encoding + kwargs["errors"] = connection.encoder.encoding_errors + + self._reader = hiredis.Reader(**kwargs) + self._connected = True + + def on_disconnect(self): + self._connected = False + + async def can_read_destructive(self): + if not self._connected: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + if self._reader.gets(): + return True + try: + async with async_timeout(0): + return await self.read_from_socket() + except asyncio.TimeoutError: + return False + + async def read_from_socket(self): + buffer = await self._stream.read(self._read_size) + if not buffer or not isinstance(buffer, bytes): + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None + self._reader.feed(buffer) + # data was read from the socket and added to the buffer. + # return True to indicate that data was read. + return True + + async def read_response( + self, disable_decoding: bool = False + ) -> Union[EncodableT, List[EncodableT]]: + # If `on_disconnect()` has been called, prohibit any more reads + # even if they could happen because data might be present. + # We still allow reads in progress to finish + if not self._connected: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) from None + + if disable_decoding: + response = self._reader.gets(False) + else: + response = self._reader.gets() + while response is False: + await self.read_from_socket() + if disable_decoding: + response = self._reader.gets(False) + else: + response = self._reader.gets() + + # if the response is a ConnectionError or the response is a list and + # the first item is a ConnectionError, raise it as something bad + # happened + if isinstance(response, ConnectionError): + raise response + elif ( + isinstance(response, list) + and response + and isinstance(response[0], ConnectionError) + ): + raise response[0] + return response diff --git a/templates/skills/spotify/dependencies/redis/_parsers/resp2.py b/templates/skills/spotify/dependencies/redis/_parsers/resp2.py new file mode 100644 index 00000000..d5adc1a8 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/resp2.py @@ -0,0 +1,132 @@ +from typing import Any, Union + +from ..exceptions import ConnectionError, InvalidResponse, ResponseError +from ..typing import EncodableT +from .base import _AsyncRESPBase, _RESPBase +from .socket import SERVER_CLOSED_CONNECTION_ERROR + + +class _RESP2Parser(_RESPBase): + """RESP2 protocol implementation""" + + def read_response(self, disable_decoding=False): + pos = self._buffer.get_pos() if self._buffer else None + try: + result = self._read_response(disable_decoding=disable_decoding) + except BaseException: + if self._buffer: + self._buffer.rewind(pos) + raise + else: + self._buffer.purge() + return result + + def _read_response(self, disable_decoding=False): + raw = self._buffer.readline() + if not raw: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + + byte, response = raw[:1], raw[1:] + + # server returned an error + if byte == b"-": + response = response.decode("utf-8", errors="replace") + error = self.parse_error(response) + # if the error is a ConnectionError, raise immediately so the user + # is notified + if isinstance(error, ConnectionError): + raise error + # otherwise, we're dealing with a ResponseError that might belong + # inside a pipeline response. the connection's read_response() + # and/or the pipeline's execute() will raise this error if + # necessary, so just return the exception instance here. + return error + # single value + elif byte == b"+": + pass + # int value + elif byte == b":": + return int(response) + # bulk response + elif byte == b"$" and response == b"-1": + return None + elif byte == b"$": + response = self._buffer.read(int(response)) + # multi-bulk response + elif byte == b"*" and response == b"-1": + return None + elif byte == b"*": + response = [ + self._read_response(disable_decoding=disable_decoding) + for i in range(int(response)) + ] + else: + raise InvalidResponse(f"Protocol Error: {raw!r}") + + if disable_decoding is False: + response = self.encoder.decode(response) + return response + + +class _AsyncRESP2Parser(_AsyncRESPBase): + """Async class for the RESP2 protocol""" + + async def read_response(self, disable_decoding: bool = False): + if not self._connected: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + if self._chunks: + # augment parsing buffer with previously read data + self._buffer += b"".join(self._chunks) + self._chunks.clear() + self._pos = 0 + response = await self._read_response(disable_decoding=disable_decoding) + # Successfully parsing a response allows us to clear our parsing buffer + self._clear() + return response + + async def _read_response( + self, disable_decoding: bool = False + ) -> Union[EncodableT, ResponseError, None]: + raw = await self._readline() + response: Any + byte, response = raw[:1], raw[1:] + + # server returned an error + if byte == b"-": + response = response.decode("utf-8", errors="replace") + error = self.parse_error(response) + # if the error is a ConnectionError, raise immediately so the user + # is notified + if isinstance(error, ConnectionError): + self._clear() # Successful parse + raise error + # otherwise, we're dealing with a ResponseError that might belong + # inside a pipeline response. the connection's read_response() + # and/or the pipeline's execute() will raise this error if + # necessary, so just return the exception instance here. + return error + # single value + elif byte == b"+": + pass + # int value + elif byte == b":": + return int(response) + # bulk response + elif byte == b"$" and response == b"-1": + return None + elif byte == b"$": + response = await self._read(int(response)) + # multi-bulk response + elif byte == b"*" and response == b"-1": + return None + elif byte == b"*": + response = [ + (await self._read_response(disable_decoding)) + for _ in range(int(response)) # noqa + ] + else: + raise InvalidResponse(f"Protocol Error: {raw!r}") + + if disable_decoding is False: + response = self.encoder.decode(response) + return response diff --git a/templates/skills/spotify/dependencies/redis/_parsers/resp3.py b/templates/skills/spotify/dependencies/redis/_parsers/resp3.py new file mode 100644 index 00000000..569e7ee6 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/resp3.py @@ -0,0 +1,264 @@ +from logging import getLogger +from typing import Any, Union + +from ..exceptions import ConnectionError, InvalidResponse, ResponseError +from ..typing import EncodableT +from .base import _AsyncRESPBase, _RESPBase +from .socket import SERVER_CLOSED_CONNECTION_ERROR + + +class _RESP3Parser(_RESPBase): + """RESP3 protocol implementation""" + + def __init__(self, socket_read_size): + super().__init__(socket_read_size) + self.push_handler_func = self.handle_push_response + + def handle_push_response(self, response): + logger = getLogger("push_response") + logger.info("Push response: " + str(response)) + return response + + def read_response(self, disable_decoding=False, push_request=False): + pos = self._buffer.get_pos() if self._buffer else None + try: + result = self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + except BaseException: + if self._buffer: + self._buffer.rewind(pos) + raise + else: + self._buffer.purge() + return result + + def _read_response(self, disable_decoding=False, push_request=False): + raw = self._buffer.readline() + if not raw: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + + byte, response = raw[:1], raw[1:] + + # server returned an error + if byte in (b"-", b"!"): + if byte == b"!": + response = self._buffer.read(int(response)) + response = response.decode("utf-8", errors="replace") + error = self.parse_error(response) + # if the error is a ConnectionError, raise immediately so the user + # is notified + if isinstance(error, ConnectionError): + raise error + # otherwise, we're dealing with a ResponseError that might belong + # inside a pipeline response. the connection's read_response() + # and/or the pipeline's execute() will raise this error if + # necessary, so just return the exception instance here. + return error + # single value + elif byte == b"+": + pass + # null value + elif byte == b"_": + return None + # int and big int values + elif byte in (b":", b"("): + return int(response) + # double value + elif byte == b",": + return float(response) + # bool value + elif byte == b"#": + return response == b"t" + # bulk response + elif byte == b"$": + response = self._buffer.read(int(response)) + # verbatim string response + elif byte == b"=": + response = self._buffer.read(int(response))[4:] + # array response + elif byte == b"*": + response = [ + self._read_response(disable_decoding=disable_decoding) + for _ in range(int(response)) + ] + # set response + elif byte == b"~": + # redis can return unhashable types (like dict) in a set, + # so we need to first convert to a list, and then try to convert it to a set + response = [ + self._read_response(disable_decoding=disable_decoding) + for _ in range(int(response)) + ] + try: + response = set(response) + except TypeError: + pass + # map response + elif byte == b"%": + # We cannot use a dict-comprehension to parse stream. + # Evaluation order of key:val expression in dict comprehension only + # became defined to be left-right in version 3.8 + resp_dict = {} + for _ in range(int(response)): + key = self._read_response(disable_decoding=disable_decoding) + resp_dict[key] = self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + response = resp_dict + # push response + elif byte == b">": + response = [ + self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + for _ in range(int(response)) + ] + res = self.push_handler_func(response) + if not push_request: + return self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + else: + return res + else: + raise InvalidResponse(f"Protocol Error: {raw!r}") + + if isinstance(response, bytes) and disable_decoding is False: + response = self.encoder.decode(response) + return response + + def set_push_handler(self, push_handler_func): + self.push_handler_func = push_handler_func + + +class _AsyncRESP3Parser(_AsyncRESPBase): + def __init__(self, socket_read_size): + super().__init__(socket_read_size) + self.push_handler_func = self.handle_push_response + + def handle_push_response(self, response): + logger = getLogger("push_response") + logger.info("Push response: " + str(response)) + return response + + async def read_response( + self, disable_decoding: bool = False, push_request: bool = False + ): + if self._chunks: + # augment parsing buffer with previously read data + self._buffer += b"".join(self._chunks) + self._chunks.clear() + self._pos = 0 + response = await self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + # Successfully parsing a response allows us to clear our parsing buffer + self._clear() + return response + + async def _read_response( + self, disable_decoding: bool = False, push_request: bool = False + ) -> Union[EncodableT, ResponseError, None]: + if not self._stream or not self.encoder: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + raw = await self._readline() + response: Any + byte, response = raw[:1], raw[1:] + + # if byte not in (b"-", b"+", b":", b"$", b"*"): + # raise InvalidResponse(f"Protocol Error: {raw!r}") + + # server returned an error + if byte in (b"-", b"!"): + if byte == b"!": + response = await self._read(int(response)) + response = response.decode("utf-8", errors="replace") + error = self.parse_error(response) + # if the error is a ConnectionError, raise immediately so the user + # is notified + if isinstance(error, ConnectionError): + self._clear() # Successful parse + raise error + # otherwise, we're dealing with a ResponseError that might belong + # inside a pipeline response. the connection's read_response() + # and/or the pipeline's execute() will raise this error if + # necessary, so just return the exception instance here. + return error + # single value + elif byte == b"+": + pass + # null value + elif byte == b"_": + return None + # int and big int values + elif byte in (b":", b"("): + return int(response) + # double value + elif byte == b",": + return float(response) + # bool value + elif byte == b"#": + return response == b"t" + # bulk response + elif byte == b"$": + response = await self._read(int(response)) + # verbatim string response + elif byte == b"=": + response = (await self._read(int(response)))[4:] + # array response + elif byte == b"*": + response = [ + (await self._read_response(disable_decoding=disable_decoding)) + for _ in range(int(response)) + ] + # set response + elif byte == b"~": + # redis can return unhashable types (like dict) in a set, + # so we need to first convert to a list, and then try to convert it to a set + response = [ + (await self._read_response(disable_decoding=disable_decoding)) + for _ in range(int(response)) + ] + try: + response = set(response) + except TypeError: + pass + # map response + elif byte == b"%": + # We cannot use a dict-comprehension to parse stream. + # Evaluation order of key:val expression in dict comprehension only + # became defined to be left-right in version 3.8 + resp_dict = {} + for _ in range(int(response)): + key = await self._read_response(disable_decoding=disable_decoding) + resp_dict[key] = await self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + response = resp_dict + # push response + elif byte == b">": + response = [ + ( + await self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + ) + for _ in range(int(response)) + ] + res = self.push_handler_func(response) + if not push_request: + return await self._read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + else: + return res + else: + raise InvalidResponse(f"Protocol Error: {raw!r}") + + if isinstance(response, bytes) and disable_decoding is False: + response = self.encoder.decode(response) + return response + + def set_push_handler(self, push_handler_func): + self.push_handler_func = push_handler_func diff --git a/templates/skills/spotify/dependencies/redis/_parsers/socket.py b/templates/skills/spotify/dependencies/redis/_parsers/socket.py new file mode 100644 index 00000000..8147243b --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/_parsers/socket.py @@ -0,0 +1,162 @@ +import errno +import io +import socket +from io import SEEK_END +from typing import Optional, Union + +from ..exceptions import ConnectionError, TimeoutError +from ..utils import SSL_AVAILABLE + +NONBLOCKING_EXCEPTION_ERROR_NUMBERS = {BlockingIOError: errno.EWOULDBLOCK} + +if SSL_AVAILABLE: + import ssl + + if hasattr(ssl, "SSLWantReadError"): + NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLWantReadError] = 2 + NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLWantWriteError] = 2 + else: + NONBLOCKING_EXCEPTION_ERROR_NUMBERS[ssl.SSLError] = 2 + +NONBLOCKING_EXCEPTIONS = tuple(NONBLOCKING_EXCEPTION_ERROR_NUMBERS.keys()) + +SERVER_CLOSED_CONNECTION_ERROR = "Connection closed by server." +SENTINEL = object() + +SYM_CRLF = b"\r\n" + + +class SocketBuffer: + def __init__( + self, socket: socket.socket, socket_read_size: int, socket_timeout: float + ): + self._sock = socket + self.socket_read_size = socket_read_size + self.socket_timeout = socket_timeout + self._buffer = io.BytesIO() + + def unread_bytes(self) -> int: + """ + Remaining unread length of buffer + """ + pos = self._buffer.tell() + end = self._buffer.seek(0, SEEK_END) + self._buffer.seek(pos) + return end - pos + + def _read_from_socket( + self, + length: Optional[int] = None, + timeout: Union[float, object] = SENTINEL, + raise_on_timeout: Optional[bool] = True, + ) -> bool: + sock = self._sock + socket_read_size = self.socket_read_size + marker = 0 + custom_timeout = timeout is not SENTINEL + + buf = self._buffer + current_pos = buf.tell() + buf.seek(0, SEEK_END) + if custom_timeout: + sock.settimeout(timeout) + try: + while True: + data = self._sock.recv(socket_read_size) + # an empty string indicates the server shutdown the socket + if isinstance(data, bytes) and len(data) == 0: + raise ConnectionError(SERVER_CLOSED_CONNECTION_ERROR) + buf.write(data) + data_length = len(data) + marker += data_length + + if length is not None and length > marker: + continue + return True + except socket.timeout: + if raise_on_timeout: + raise TimeoutError("Timeout reading from socket") + return False + except NONBLOCKING_EXCEPTIONS as ex: + # if we're in nonblocking mode and the recv raises a + # blocking error, simply return False indicating that + # there's no data to be read. otherwise raise the + # original exception. + allowed = NONBLOCKING_EXCEPTION_ERROR_NUMBERS.get(ex.__class__, -1) + if not raise_on_timeout and ex.errno == allowed: + return False + raise ConnectionError(f"Error while reading from socket: {ex.args}") + finally: + buf.seek(current_pos) + if custom_timeout: + sock.settimeout(self.socket_timeout) + + def can_read(self, timeout: float) -> bool: + return bool(self.unread_bytes()) or self._read_from_socket( + timeout=timeout, raise_on_timeout=False + ) + + def read(self, length: int) -> bytes: + length = length + 2 # make sure to read the \r\n terminator + # BufferIO will return less than requested if buffer is short + data = self._buffer.read(length) + missing = length - len(data) + if missing: + # fill up the buffer and read the remainder + self._read_from_socket(missing) + data += self._buffer.read(missing) + return data[:-2] + + def readline(self) -> bytes: + buf = self._buffer + data = buf.readline() + while not data.endswith(SYM_CRLF): + # there's more data in the socket that we need + self._read_from_socket() + data += buf.readline() + + return data[:-2] + + def get_pos(self) -> int: + """ + Get current read position + """ + return self._buffer.tell() + + def rewind(self, pos: int) -> None: + """ + Rewind the buffer to a specific position, to re-start reading + """ + self._buffer.seek(pos) + + def purge(self) -> None: + """ + After a successful read, purge the read part of buffer + """ + unread = self.unread_bytes() + + # Only if we have read all of the buffer do we truncate, to + # reduce the amount of memory thrashing. This heuristic + # can be changed or removed later. + if unread > 0: + return + + if unread > 0: + # move unread data to the front + view = self._buffer.getbuffer() + view[:unread] = view[-unread:] + self._buffer.truncate(unread) + self._buffer.seek(0) + + def close(self) -> None: + try: + self._buffer.close() + except Exception: + # issue #633 suggests the purge/close somehow raised a + # BadFileDescriptor error. Perhaps the client ran out of + # memory or something else? It's probably OK to ignore + # any error being raised from purge/close since we're + # removing the reference to the instance below. + pass + self._buffer = None + self._sock = None diff --git a/templates/skills/spotify/dependencies/redis/asyncio/__init__.py b/templates/skills/spotify/dependencies/redis/asyncio/__init__.py new file mode 100644 index 00000000..3545ab44 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/__init__.py @@ -0,0 +1,64 @@ +from redis.asyncio.client import Redis, StrictRedis +from redis.asyncio.cluster import RedisCluster +from redis.asyncio.connection import ( + BlockingConnectionPool, + Connection, + ConnectionPool, + SSLConnection, + UnixDomainSocketConnection, +) +from redis.asyncio.sentinel import ( + Sentinel, + SentinelConnectionPool, + SentinelManagedConnection, + SentinelManagedSSLConnection, +) +from redis.asyncio.utils import from_url +from redis.backoff import default_backoff +from redis.exceptions import ( + AuthenticationError, + AuthenticationWrongNumberOfArgsError, + BusyLoadingError, + ChildDeadlockedError, + ConnectionError, + DataError, + InvalidResponse, + OutOfMemoryError, + PubSubError, + ReadOnlyError, + RedisError, + ResponseError, + TimeoutError, + WatchError, +) + +__all__ = [ + "AuthenticationError", + "AuthenticationWrongNumberOfArgsError", + "BlockingConnectionPool", + "BusyLoadingError", + "ChildDeadlockedError", + "Connection", + "ConnectionError", + "ConnectionPool", + "DataError", + "from_url", + "default_backoff", + "InvalidResponse", + "PubSubError", + "OutOfMemoryError", + "ReadOnlyError", + "Redis", + "RedisCluster", + "RedisError", + "ResponseError", + "Sentinel", + "SentinelConnectionPool", + "SentinelManagedConnection", + "SentinelManagedSSLConnection", + "SSLConnection", + "StrictRedis", + "TimeoutError", + "UnixDomainSocketConnection", + "WatchError", +] diff --git a/templates/skills/spotify/dependencies/redis/asyncio/client.py b/templates/skills/spotify/dependencies/redis/asyncio/client.py new file mode 100644 index 00000000..480a877d --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/client.py @@ -0,0 +1,1549 @@ +import asyncio +import copy +import inspect +import re +import ssl +import warnings +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Awaitable, + Callable, + Dict, + Iterable, + List, + Mapping, + MutableMapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +from redis._parsers.helpers import ( + _RedisCallbacks, + _RedisCallbacksRESP2, + _RedisCallbacksRESP3, + bool_ok, +) +from redis.asyncio.connection import ( + Connection, + ConnectionPool, + SSLConnection, + UnixDomainSocketConnection, +) +from redis.asyncio.lock import Lock +from redis.asyncio.retry import Retry +from redis.client import ( + EMPTY_RESPONSE, + NEVER_DECODE, + AbstractRedis, + CaseInsensitiveDict, +) +from redis.commands import ( + AsyncCoreCommands, + AsyncRedisModuleCommands, + AsyncSentinelCommands, + list_or_args, +) +from redis.compat import Protocol, TypedDict +from redis.credentials import CredentialProvider +from redis.exceptions import ( + ConnectionError, + ExecAbortError, + PubSubError, + RedisError, + ResponseError, + TimeoutError, + WatchError, +) +from redis.typing import ChannelT, EncodableT, KeyT +from redis.utils import ( + HIREDIS_AVAILABLE, + _set_info_logger, + deprecated_function, + get_lib_version, + safe_str, + str_if_bytes, +) + +PubSubHandler = Callable[[Dict[str, str]], Awaitable[None]] +_KeyT = TypeVar("_KeyT", bound=KeyT) +_ArgT = TypeVar("_ArgT", KeyT, EncodableT) +_RedisT = TypeVar("_RedisT", bound="Redis") +_NormalizeKeysT = TypeVar("_NormalizeKeysT", bound=Mapping[ChannelT, object]) +if TYPE_CHECKING: + from redis.commands.core import Script + + +class ResponseCallbackProtocol(Protocol): + def __call__(self, response: Any, **kwargs): + ... + + +class AsyncResponseCallbackProtocol(Protocol): + async def __call__(self, response: Any, **kwargs): + ... + + +ResponseCallbackT = Union[ResponseCallbackProtocol, AsyncResponseCallbackProtocol] + + +class Redis( + AbstractRedis, AsyncRedisModuleCommands, AsyncCoreCommands, AsyncSentinelCommands +): + """ + Implementation of the Redis protocol. + + This abstract class provides a Python interface to all Redis commands + and an implementation of the Redis protocol. + + Pipelines derive from this, implementing how + the commands are sent and received to the Redis server. Based on + configuration, an instance will either use a ConnectionPool, or + Connection object to talk to redis. + """ + + response_callbacks: MutableMapping[Union[str, bytes], ResponseCallbackT] + + @classmethod + def from_url( + cls, + url: str, + single_connection_client: bool = False, + auto_close_connection_pool: Optional[bool] = None, + **kwargs, + ): + """ + Return a Redis client object configured from the given URL + + For example:: + + redis://[[username]:[password]]@localhost:6379/0 + rediss://[[username]:[password]]@localhost:6379/0 + unix://[username@]/path/to/socket.sock?db=0[&password=password] + + Three URL schemes are supported: + + - `redis://` creates a TCP socket connection. See more at: + + - `rediss://` creates a SSL wrapped TCP socket connection. See more at: + + - ``unix://``: creates a Unix Domain Socket connection. + + The username, password, hostname, path and all querystring values + are passed through urllib.parse.unquote in order to replace any + percent-encoded values with their corresponding characters. + + There are several ways to specify a database number. The first value + found will be used: + + 1. A ``db`` querystring option, e.g. redis://localhost?db=0 + + 2. If using the redis:// or rediss:// schemes, the path argument + of the url, e.g. redis://localhost/0 + + 3. A ``db`` keyword argument to this function. + + If none of these options are specified, the default db=0 is used. + + All querystring options are cast to their appropriate Python types. + Boolean arguments can be specified with string values "True"/"False" + or "Yes"/"No". Values that cannot be properly cast cause a + ``ValueError`` to be raised. Once parsed, the querystring arguments + and keyword arguments are passed to the ``ConnectionPool``'s + class initializer. In the case of conflicting arguments, querystring + arguments always win. + + """ + connection_pool = ConnectionPool.from_url(url, **kwargs) + client = cls( + connection_pool=connection_pool, + single_connection_client=single_connection_client, + ) + if auto_close_connection_pool is not None: + warnings.warn( + DeprecationWarning( + '"auto_close_connection_pool" is deprecated ' + "since version 5.0.1. " + "Please create a ConnectionPool explicitly and " + "provide to the Redis() constructor instead." + ) + ) + else: + auto_close_connection_pool = True + client.auto_close_connection_pool = auto_close_connection_pool + return client + + @classmethod + def from_pool( + cls: Type["Redis"], + connection_pool: ConnectionPool, + ) -> "Redis": + """ + Return a Redis client from the given connection pool. + The Redis client will take ownership of the connection pool and + close it when the Redis client is closed. + """ + client = cls( + connection_pool=connection_pool, + ) + client.auto_close_connection_pool = True + return client + + def __init__( + self, + *, + host: str = "localhost", + port: int = 6379, + db: Union[str, int] = 0, + password: Optional[str] = None, + socket_timeout: Optional[float] = None, + socket_connect_timeout: Optional[float] = None, + socket_keepalive: Optional[bool] = None, + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, + connection_pool: Optional[ConnectionPool] = None, + unix_socket_path: Optional[str] = None, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + retry_on_timeout: bool = False, + retry_on_error: Optional[list] = None, + ssl: bool = False, + ssl_keyfile: Optional[str] = None, + ssl_certfile: Optional[str] = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: Optional[str] = None, + ssl_ca_data: Optional[str] = None, + ssl_check_hostname: bool = False, + ssl_min_version: Optional[ssl.TLSVersion] = None, + ssl_ciphers: Optional[str] = None, + max_connections: Optional[int] = None, + single_connection_client: bool = False, + health_check_interval: int = 0, + client_name: Optional[str] = None, + lib_name: Optional[str] = "redis-py", + lib_version: Optional[str] = get_lib_version(), + username: Optional[str] = None, + retry: Optional[Retry] = None, + auto_close_connection_pool: Optional[bool] = None, + redis_connect_func=None, + credential_provider: Optional[CredentialProvider] = None, + protocol: Optional[int] = 2, + ): + """ + Initialize a new Redis client. + To specify a retry policy for specific errors, first set + `retry_on_error` to a list of the error/s to retry on, then set + `retry` to a valid `Retry` object. + To retry on TimeoutError, `retry_on_timeout` can also be set to `True`. + """ + kwargs: Dict[str, Any] + # auto_close_connection_pool only has an effect if connection_pool is + # None. It is assumed that if connection_pool is not None, the user + # wants to manage the connection pool themselves. + if auto_close_connection_pool is not None: + warnings.warn( + DeprecationWarning( + '"auto_close_connection_pool" is deprecated ' + "since version 5.0.1. " + "Please create a ConnectionPool explicitly and " + "provide to the Redis() constructor instead." + ) + ) + else: + auto_close_connection_pool = True + + if not connection_pool: + # Create internal connection pool, expected to be closed by Redis instance + if not retry_on_error: + retry_on_error = [] + if retry_on_timeout is True: + retry_on_error.append(TimeoutError) + kwargs = { + "db": db, + "username": username, + "password": password, + "credential_provider": credential_provider, + "socket_timeout": socket_timeout, + "encoding": encoding, + "encoding_errors": encoding_errors, + "decode_responses": decode_responses, + "retry_on_timeout": retry_on_timeout, + "retry_on_error": retry_on_error, + "retry": copy.deepcopy(retry), + "max_connections": max_connections, + "health_check_interval": health_check_interval, + "client_name": client_name, + "lib_name": lib_name, + "lib_version": lib_version, + "redis_connect_func": redis_connect_func, + "protocol": protocol, + } + # based on input, setup appropriate connection args + if unix_socket_path is not None: + kwargs.update( + { + "path": unix_socket_path, + "connection_class": UnixDomainSocketConnection, + } + ) + else: + # TCP specific options + kwargs.update( + { + "host": host, + "port": port, + "socket_connect_timeout": socket_connect_timeout, + "socket_keepalive": socket_keepalive, + "socket_keepalive_options": socket_keepalive_options, + } + ) + + if ssl: + kwargs.update( + { + "connection_class": SSLConnection, + "ssl_keyfile": ssl_keyfile, + "ssl_certfile": ssl_certfile, + "ssl_cert_reqs": ssl_cert_reqs, + "ssl_ca_certs": ssl_ca_certs, + "ssl_ca_data": ssl_ca_data, + "ssl_check_hostname": ssl_check_hostname, + "ssl_min_version": ssl_min_version, + "ssl_ciphers": ssl_ciphers, + } + ) + # This arg only used if no pool is passed in + self.auto_close_connection_pool = auto_close_connection_pool + connection_pool = ConnectionPool(**kwargs) + else: + # If a pool is passed in, do not close it + self.auto_close_connection_pool = False + + self.connection_pool = connection_pool + self.single_connection_client = single_connection_client + self.connection: Optional[Connection] = None + + self.response_callbacks = CaseInsensitiveDict(_RedisCallbacks) + + if self.connection_pool.connection_kwargs.get("protocol") in ["3", 3]: + self.response_callbacks.update(_RedisCallbacksRESP3) + else: + self.response_callbacks.update(_RedisCallbacksRESP2) + + # If using a single connection client, we need to lock creation-of and use-of + # the client in order to avoid race conditions such as using asyncio.gather + # on a set of redis commands + self._single_conn_lock = asyncio.Lock() + + def __repr__(self): + return f"{self.__class__.__name__}<{self.connection_pool!r}>" + + def __await__(self): + return self.initialize().__await__() + + async def initialize(self: _RedisT) -> _RedisT: + if self.single_connection_client: + async with self._single_conn_lock: + if self.connection is None: + self.connection = await self.connection_pool.get_connection("_") + return self + + def set_response_callback(self, command: str, callback: ResponseCallbackT): + """Set a custom Response Callback""" + self.response_callbacks[command] = callback + + def get_encoder(self): + """Get the connection pool's encoder""" + return self.connection_pool.get_encoder() + + def get_connection_kwargs(self): + """Get the connection's key-word arguments""" + return self.connection_pool.connection_kwargs + + def get_retry(self) -> Optional["Retry"]: + return self.get_connection_kwargs().get("retry") + + def set_retry(self, retry: "Retry") -> None: + self.get_connection_kwargs().update({"retry": retry}) + self.connection_pool.set_retry(retry) + + def load_external_module(self, funcname, func): + """ + This function can be used to add externally defined redis modules, + and their namespaces to the redis client. + + funcname - A string containing the name of the function to create + func - The function, being added to this class. + + ex: Assume that one has a custom redis module named foomod that + creates command named 'foo.dothing' and 'foo.anotherthing' in redis. + To load function functions into this namespace: + + from redis import Redis + from foomodule import F + r = Redis() + r.load_external_module("foo", F) + r.foo().dothing('your', 'arguments') + + For a concrete example see the reimport of the redisjson module in + tests/test_connection.py::test_loading_external_modules + """ + setattr(self, funcname, func) + + def pipeline( + self, transaction: bool = True, shard_hint: Optional[str] = None + ) -> "Pipeline": + """ + Return a new pipeline object that can queue multiple commands for + later execution. ``transaction`` indicates whether all commands + should be executed atomically. Apart from making a group of operations + atomic, pipelines are useful for reducing the back-and-forth overhead + between the client and server. + """ + return Pipeline( + self.connection_pool, self.response_callbacks, transaction, shard_hint + ) + + async def transaction( + self, + func: Callable[["Pipeline"], Union[Any, Awaitable[Any]]], + *watches: KeyT, + shard_hint: Optional[str] = None, + value_from_callable: bool = False, + watch_delay: Optional[float] = None, + ): + """ + Convenience method for executing the callable `func` as a transaction + while watching all keys specified in `watches`. The 'func' callable + should expect a single argument which is a Pipeline object. + """ + pipe: Pipeline + async with self.pipeline(True, shard_hint) as pipe: + while True: + try: + if watches: + await pipe.watch(*watches) + func_value = func(pipe) + if inspect.isawaitable(func_value): + func_value = await func_value + exec_value = await pipe.execute() + return func_value if value_from_callable else exec_value + except WatchError: + if watch_delay is not None and watch_delay > 0: + await asyncio.sleep(watch_delay) + continue + + def lock( + self, + name: KeyT, + timeout: Optional[float] = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: Optional[float] = None, + lock_class: Optional[Type[Lock]] = None, + thread_local: bool = True, + ) -> Lock: + """ + Return a new Lock object using key ``name`` that mimics + the behavior of threading.Lock. + + If specified, ``timeout`` indicates a maximum life for the lock. + By default, it will remain locked until release() is called. + + ``sleep`` indicates the amount of time to sleep per loop iteration + when the lock is in blocking mode and another client is currently + holding the lock. + + ``blocking`` indicates whether calling ``acquire`` should block until + the lock has been acquired or to fail immediately, causing ``acquire`` + to return False and the lock not being acquired. Defaults to True. + Note this value can be overridden by passing a ``blocking`` + argument to ``acquire``. + + ``blocking_timeout`` indicates the maximum amount of time in seconds to + spend trying to acquire the lock. A value of ``None`` indicates + continue trying forever. ``blocking_timeout`` can be specified as a + float or integer, both representing the number of seconds to wait. + + ``lock_class`` forces the specified lock implementation. Note that as + of redis-py 3.0, the only lock class we implement is ``Lock`` (which is + a Lua-based lock). So, it's unlikely you'll need this parameter, unless + you have created your own custom lock class. + + ``thread_local`` indicates whether the lock token is placed in + thread-local storage. By default, the token is placed in thread local + storage so that a thread only sees its token, not a token set by + another thread. Consider the following timeline: + + time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds. + thread-1 sets the token to "abc" + time: 1, thread-2 blocks trying to acquire `my-lock` using the + Lock instance. + time: 5, thread-1 has not yet completed. redis expires the lock + key. + time: 5, thread-2 acquired `my-lock` now that it's available. + thread-2 sets the token to "xyz" + time: 6, thread-1 finishes its work and calls release(). if the + token is *not* stored in thread local storage, then + thread-1 would see the token value as "xyz" and would be + able to successfully release the thread-2's lock. + + In some use cases it's necessary to disable thread local storage. For + example, if you have code where one thread acquires a lock and passes + that lock instance to a worker thread to release later. If thread + local storage isn't disabled in this case, the worker thread won't see + the token set by the thread that acquired the lock. Our assumption + is that these cases aren't common and as such default to using + thread local storage.""" + if lock_class is None: + lock_class = Lock + return lock_class( + self, + name, + timeout=timeout, + sleep=sleep, + blocking=blocking, + blocking_timeout=blocking_timeout, + thread_local=thread_local, + ) + + def pubsub(self, **kwargs) -> "PubSub": + """ + Return a Publish/Subscribe object. With this object, you can + subscribe to channels and listen for messages that get published to + them. + """ + return PubSub(self.connection_pool, **kwargs) + + def monitor(self) -> "Monitor": + return Monitor(self.connection_pool) + + def client(self) -> "Redis": + return self.__class__( + connection_pool=self.connection_pool, single_connection_client=True + ) + + async def __aenter__(self: _RedisT) -> _RedisT: + return await self.initialize() + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.aclose() + + _DEL_MESSAGE = "Unclosed Redis client" + + # passing _warnings and _grl as argument default since they may be gone + # by the time __del__ is called at shutdown + def __del__( + self, + _warn: Any = warnings.warn, + _grl: Any = asyncio.get_running_loop, + ) -> None: + if hasattr(self, "connection") and (self.connection is not None): + _warn(f"Unclosed client session {self!r}", ResourceWarning, source=self) + try: + context = {"client": self, "message": self._DEL_MESSAGE} + _grl().call_exception_handler(context) + except RuntimeError: + pass + self.connection._close() + + async def aclose(self, close_connection_pool: Optional[bool] = None) -> None: + """ + Closes Redis client connection + + :param close_connection_pool: decides whether to close the connection pool used + by this Redis client, overriding Redis.auto_close_connection_pool. By default, + let Redis.auto_close_connection_pool decide whether to close the connection + pool. + """ + conn = self.connection + if conn: + self.connection = None + await self.connection_pool.release(conn) + if close_connection_pool or ( + close_connection_pool is None and self.auto_close_connection_pool + ): + await self.connection_pool.disconnect() + + @deprecated_function(version="5.0.1", reason="Use aclose() instead", name="close") + async def close(self, close_connection_pool: Optional[bool] = None) -> None: + """ + Alias for aclose(), for backwards compatibility + """ + await self.aclose(close_connection_pool) + + async def _send_command_parse_response(self, conn, command_name, *args, **options): + """ + Send a command and parse the response + """ + await conn.send_command(*args) + return await self.parse_response(conn, command_name, **options) + + async def _disconnect_raise(self, conn: Connection, error: Exception): + """ + Close the connection and raise an exception + if retry_on_error is not set or the error + is not one of the specified error types + """ + await conn.disconnect() + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + raise error + + # COMMAND EXECUTION AND PROTOCOL PARSING + async def execute_command(self, *args, **options): + """Execute a command and return a parsed response""" + await self.initialize() + pool = self.connection_pool + command_name = args[0] + conn = self.connection or await pool.get_connection(command_name, **options) + + if self.single_connection_client: + await self._single_conn_lock.acquire() + try: + return await conn.retry.call_with_retry( + lambda: self._send_command_parse_response( + conn, command_name, *args, **options + ), + lambda error: self._disconnect_raise(conn, error), + ) + finally: + if self.single_connection_client: + self._single_conn_lock.release() + if not self.connection: + await pool.release(conn) + + async def parse_response( + self, connection: Connection, command_name: Union[str, bytes], **options + ): + """Parses a response from the Redis server""" + try: + if NEVER_DECODE in options: + response = await connection.read_response(disable_decoding=True) + options.pop(NEVER_DECODE) + else: + response = await connection.read_response() + except ResponseError: + if EMPTY_RESPONSE in options: + return options[EMPTY_RESPONSE] + raise + + if EMPTY_RESPONSE in options: + options.pop(EMPTY_RESPONSE) + + if command_name in self.response_callbacks: + # Mypy bug: https://github.com/python/mypy/issues/10977 + command_name = cast(str, command_name) + retval = self.response_callbacks[command_name](response, **options) + return await retval if inspect.isawaitable(retval) else retval + return response + + +StrictRedis = Redis + + +class MonitorCommandInfo(TypedDict): + time: float + db: int + client_address: str + client_port: str + client_type: str + command: str + + +class Monitor: + """ + Monitor is useful for handling the MONITOR command to the redis server. + next_command() method returns one command from monitor + listen() method yields commands from monitor. + """ + + monitor_re = re.compile(r"\[(\d+) (.*?)\] (.*)") + command_re = re.compile(r'"(.*?)(? MonitorCommandInfo: + """Parse the response from a monitor command""" + await self.connect() + response = await self.connection.read_response() + if isinstance(response, bytes): + response = self.connection.encoder.decode(response, force=True) + command_time, command_data = response.split(" ", 1) + m = self.monitor_re.match(command_data) + db_id, client_info, command = m.groups() + command = " ".join(self.command_re.findall(command)) + # Redis escapes double quotes because each piece of the command + # string is surrounded by double quotes. We don't have that + # requirement so remove the escaping and leave the quote. + command = command.replace('\\"', '"') + + if client_info == "lua": + client_address = "lua" + client_port = "" + client_type = "lua" + elif client_info.startswith("unix"): + client_address = "unix" + client_port = client_info[5:] + client_type = "unix" + else: + # use rsplit as ipv6 addresses contain colons + client_address, client_port = client_info.rsplit(":", 1) + client_type = "tcp" + return { + "time": float(command_time), + "db": int(db_id), + "client_address": client_address, + "client_port": client_port, + "client_type": client_type, + "command": command, + } + + async def listen(self) -> AsyncIterator[MonitorCommandInfo]: + """Listen for commands coming to the server.""" + while True: + yield await self.next_command() + + +class PubSub: + """ + PubSub provides publish, subscribe and listen support to Redis channels. + + After subscribing to one or more channels, the listen() method will block + until a message arrives on one of the subscribed channels. That message + will be returned and it's safe to start listening again. + """ + + PUBLISH_MESSAGE_TYPES = ("message", "pmessage") + UNSUBSCRIBE_MESSAGE_TYPES = ("unsubscribe", "punsubscribe") + HEALTH_CHECK_MESSAGE = "redis-py-health-check" + + def __init__( + self, + connection_pool: ConnectionPool, + shard_hint: Optional[str] = None, + ignore_subscribe_messages: bool = False, + encoder=None, + push_handler_func: Optional[Callable] = None, + ): + self.connection_pool = connection_pool + self.shard_hint = shard_hint + self.ignore_subscribe_messages = ignore_subscribe_messages + self.connection = None + # we need to know the encoding options for this connection in order + # to lookup channel and pattern names for callback handlers. + self.encoder = encoder + self.push_handler_func = push_handler_func + if self.encoder is None: + self.encoder = self.connection_pool.get_encoder() + if self.encoder.decode_responses: + self.health_check_response = [ + ["pong", self.HEALTH_CHECK_MESSAGE], + self.HEALTH_CHECK_MESSAGE, + ] + else: + self.health_check_response = [ + [b"pong", self.encoder.encode(self.HEALTH_CHECK_MESSAGE)], + self.encoder.encode(self.HEALTH_CHECK_MESSAGE), + ] + if self.push_handler_func is None: + _set_info_logger() + self.channels = {} + self.pending_unsubscribe_channels = set() + self.patterns = {} + self.pending_unsubscribe_patterns = set() + self._lock = asyncio.Lock() + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.aclose() + + def __del__(self): + if self.connection: + self.connection.deregister_connect_callback(self.on_connect) + + async def aclose(self): + # In case a connection property does not yet exist + # (due to a crash earlier in the Redis() constructor), return + # immediately as there is nothing to clean-up. + if not hasattr(self, "connection"): + return + async with self._lock: + if self.connection: + await self.connection.disconnect() + self.connection.deregister_connect_callback(self.on_connect) + await self.connection_pool.release(self.connection) + self.connection = None + self.channels = {} + self.pending_unsubscribe_channels = set() + self.patterns = {} + self.pending_unsubscribe_patterns = set() + + @deprecated_function(version="5.0.1", reason="Use aclose() instead", name="close") + async def close(self) -> None: + """Alias for aclose(), for backwards compatibility""" + await self.aclose() + + @deprecated_function(version="5.0.1", reason="Use aclose() instead", name="reset") + async def reset(self) -> None: + """Alias for aclose(), for backwards compatibility""" + await self.aclose() + + async def on_connect(self, connection: Connection): + """Re-subscribe to any channels and patterns previously subscribed to""" + # NOTE: for python3, we can't pass bytestrings as keyword arguments + # so we need to decode channel/pattern names back to unicode strings + # before passing them to [p]subscribe. + self.pending_unsubscribe_channels.clear() + self.pending_unsubscribe_patterns.clear() + if self.channels: + channels = {} + for k, v in self.channels.items(): + channels[self.encoder.decode(k, force=True)] = v + await self.subscribe(**channels) + if self.patterns: + patterns = {} + for k, v in self.patterns.items(): + patterns[self.encoder.decode(k, force=True)] = v + await self.psubscribe(**patterns) + + @property + def subscribed(self): + """Indicates if there are subscriptions to any channels or patterns""" + return bool(self.channels or self.patterns) + + async def execute_command(self, *args: EncodableT): + """Execute a publish/subscribe command""" + + # NOTE: don't parse the response in this function -- it could pull a + # legitimate message off the stack if the connection is already + # subscribed to one or more channels + + await self.connect() + connection = self.connection + kwargs = {"check_health": not self.subscribed} + await self._execute(connection, connection.send_command, *args, **kwargs) + + async def connect(self): + """ + Ensure that the PubSub is connected + """ + if self.connection is None: + self.connection = await self.connection_pool.get_connection( + "pubsub", self.shard_hint + ) + # register a callback that re-subscribes to any channels we + # were listening to when we were disconnected + self.connection.register_connect_callback(self.on_connect) + else: + await self.connection.connect() + if self.push_handler_func is not None and not HIREDIS_AVAILABLE: + self.connection._parser.set_push_handler(self.push_handler_func) + + async def _disconnect_raise_connect(self, conn, error): + """ + Close the connection and raise an exception + if retry_on_error is not set or the error is not one + of the specified error types. Otherwise, try to + reconnect + """ + await conn.disconnect() + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + raise error + await conn.connect() + + async def _execute(self, conn, command, *args, **kwargs): + """ + Connect manually upon disconnection. If the Redis server is down, + this will fail and raise a ConnectionError as desired. + After reconnection, the ``on_connect`` callback should have been + called by the # connection to resubscribe us to any channels and + patterns we were previously listening to + """ + return await conn.retry.call_with_retry( + lambda: command(*args, **kwargs), + lambda error: self._disconnect_raise_connect(conn, error), + ) + + async def parse_response(self, block: bool = True, timeout: float = 0): + """Parse the response from a publish/subscribe command""" + conn = self.connection + if conn is None: + raise RuntimeError( + "pubsub connection not set: " + "did you forget to call subscribe() or psubscribe()?" + ) + + await self.check_health() + + if not conn.is_connected: + await conn.connect() + + read_timeout = None if block else timeout + response = await self._execute( + conn, + conn.read_response, + timeout=read_timeout, + disconnect_on_error=False, + push_request=True, + ) + + if conn.health_check_interval and response in self.health_check_response: + # ignore the health check message as user might not expect it + return None + return response + + async def check_health(self): + conn = self.connection + if conn is None: + raise RuntimeError( + "pubsub connection not set: " + "did you forget to call subscribe() or psubscribe()?" + ) + + if ( + conn.health_check_interval + and asyncio.get_running_loop().time() > conn.next_health_check + ): + await conn.send_command( + "PING", self.HEALTH_CHECK_MESSAGE, check_health=False + ) + + def _normalize_keys(self, data: _NormalizeKeysT) -> _NormalizeKeysT: + """ + normalize channel/pattern names to be either bytes or strings + based on whether responses are automatically decoded. this saves us + from coercing the value for each message coming in. + """ + encode = self.encoder.encode + decode = self.encoder.decode + return {decode(encode(k)): v for k, v in data.items()} # type: ignore[return-value] # noqa: E501 + + async def psubscribe(self, *args: ChannelT, **kwargs: PubSubHandler): + """ + Subscribe to channel patterns. Patterns supplied as keyword arguments + expect a pattern name as the key and a callable as the value. A + pattern's callable will be invoked automatically when a message is + received on that pattern rather than producing a message via + ``listen()``. + """ + parsed_args = list_or_args((args[0],), args[1:]) if args else args + new_patterns: Dict[ChannelT, PubSubHandler] = dict.fromkeys(parsed_args) + # Mypy bug: https://github.com/python/mypy/issues/10970 + new_patterns.update(kwargs) # type: ignore[arg-type] + ret_val = await self.execute_command("PSUBSCRIBE", *new_patterns.keys()) + # update the patterns dict AFTER we send the command. we don't want to + # subscribe twice to these patterns, once for the command and again + # for the reconnection. + new_patterns = self._normalize_keys(new_patterns) + self.patterns.update(new_patterns) + self.pending_unsubscribe_patterns.difference_update(new_patterns) + return ret_val + + def punsubscribe(self, *args: ChannelT) -> Awaitable: + """ + Unsubscribe from the supplied patterns. If empty, unsubscribe from + all patterns. + """ + patterns: Iterable[ChannelT] + if args: + parsed_args = list_or_args((args[0],), args[1:]) + patterns = self._normalize_keys(dict.fromkeys(parsed_args)).keys() + else: + parsed_args = [] + patterns = self.patterns + self.pending_unsubscribe_patterns.update(patterns) + return self.execute_command("PUNSUBSCRIBE", *parsed_args) + + async def subscribe(self, *args: ChannelT, **kwargs: Callable): + """ + Subscribe to channels. Channels supplied as keyword arguments expect + a channel name as the key and a callable as the value. A channel's + callable will be invoked automatically when a message is received on + that channel rather than producing a message via ``listen()`` or + ``get_message()``. + """ + parsed_args = list_or_args((args[0],), args[1:]) if args else () + new_channels = dict.fromkeys(parsed_args) + # Mypy bug: https://github.com/python/mypy/issues/10970 + new_channels.update(kwargs) # type: ignore[arg-type] + ret_val = await self.execute_command("SUBSCRIBE", *new_channels.keys()) + # update the channels dict AFTER we send the command. we don't want to + # subscribe twice to these channels, once for the command and again + # for the reconnection. + new_channels = self._normalize_keys(new_channels) + self.channels.update(new_channels) + self.pending_unsubscribe_channels.difference_update(new_channels) + return ret_val + + def unsubscribe(self, *args) -> Awaitable: + """ + Unsubscribe from the supplied channels. If empty, unsubscribe from + all channels + """ + if args: + parsed_args = list_or_args(args[0], args[1:]) + channels = self._normalize_keys(dict.fromkeys(parsed_args)) + else: + parsed_args = [] + channels = self.channels + self.pending_unsubscribe_channels.update(channels) + return self.execute_command("UNSUBSCRIBE", *parsed_args) + + async def listen(self) -> AsyncIterator: + """Listen for messages on channels this client has been subscribed to""" + while self.subscribed: + response = await self.handle_message(await self.parse_response(block=True)) + if response is not None: + yield response + + async def get_message( + self, ignore_subscribe_messages: bool = False, timeout: Optional[float] = 0.0 + ): + """ + Get the next message if one is available, otherwise None. + + If timeout is specified, the system will wait for `timeout` seconds + before returning. Timeout should be specified as a floating point + number or None to wait indefinitely. + """ + response = await self.parse_response(block=(timeout is None), timeout=timeout) + if response: + return await self.handle_message(response, ignore_subscribe_messages) + return None + + def ping(self, message=None) -> Awaitable: + """ + Ping the Redis server + """ + args = ["PING", message] if message is not None else ["PING"] + return self.execute_command(*args) + + async def handle_message(self, response, ignore_subscribe_messages=False): + """ + Parses a pub/sub message. If the channel or pattern was subscribed to + with a message handler, the handler is invoked instead of a parsed + message being returned. + """ + if response is None: + return None + if isinstance(response, bytes): + response = [b"pong", response] if response != b"PONG" else [b"pong", b""] + message_type = str_if_bytes(response[0]) + if message_type == "pmessage": + message = { + "type": message_type, + "pattern": response[1], + "channel": response[2], + "data": response[3], + } + elif message_type == "pong": + message = { + "type": message_type, + "pattern": None, + "channel": None, + "data": response[1], + } + else: + message = { + "type": message_type, + "pattern": None, + "channel": response[1], + "data": response[2], + } + + # if this is an unsubscribe message, remove it from memory + if message_type in self.UNSUBSCRIBE_MESSAGE_TYPES: + if message_type == "punsubscribe": + pattern = response[1] + if pattern in self.pending_unsubscribe_patterns: + self.pending_unsubscribe_patterns.remove(pattern) + self.patterns.pop(pattern, None) + else: + channel = response[1] + if channel in self.pending_unsubscribe_channels: + self.pending_unsubscribe_channels.remove(channel) + self.channels.pop(channel, None) + + if message_type in self.PUBLISH_MESSAGE_TYPES: + # if there's a message handler, invoke it + if message_type == "pmessage": + handler = self.patterns.get(message["pattern"], None) + else: + handler = self.channels.get(message["channel"], None) + if handler: + if inspect.iscoroutinefunction(handler): + await handler(message) + else: + handler(message) + return None + elif message_type != "pong": + # this is a subscribe/unsubscribe message. ignore if we don't + # want them + if ignore_subscribe_messages or self.ignore_subscribe_messages: + return None + + return message + + async def run( + self, + *, + exception_handler: Optional["PSWorkerThreadExcHandlerT"] = None, + poll_timeout: float = 1.0, + ) -> None: + """Process pub/sub messages using registered callbacks. + + This is the equivalent of :py:meth:`redis.PubSub.run_in_thread` in + redis-py, but it is a coroutine. To launch it as a separate task, use + ``asyncio.create_task``: + + >>> task = asyncio.create_task(pubsub.run()) + + To shut it down, use asyncio cancellation: + + >>> task.cancel() + >>> await task + """ + for channel, handler in self.channels.items(): + if handler is None: + raise PubSubError(f"Channel: '{channel}' has no handler registered") + for pattern, handler in self.patterns.items(): + if handler is None: + raise PubSubError(f"Pattern: '{pattern}' has no handler registered") + + await self.connect() + while True: + try: + await self.get_message( + ignore_subscribe_messages=True, timeout=poll_timeout + ) + except asyncio.CancelledError: + raise + except BaseException as e: + if exception_handler is None: + raise + res = exception_handler(e, self) + if inspect.isawaitable(res): + await res + # Ensure that other tasks on the event loop get a chance to run + # if we didn't have to block for I/O anywhere. + await asyncio.sleep(0) + + +class PubsubWorkerExceptionHandler(Protocol): + def __call__(self, e: BaseException, pubsub: PubSub): + ... + + +class AsyncPubsubWorkerExceptionHandler(Protocol): + async def __call__(self, e: BaseException, pubsub: PubSub): + ... + + +PSWorkerThreadExcHandlerT = Union[ + PubsubWorkerExceptionHandler, AsyncPubsubWorkerExceptionHandler +] + + +CommandT = Tuple[Tuple[Union[str, bytes], ...], Mapping[str, Any]] +CommandStackT = List[CommandT] + + +class Pipeline(Redis): # lgtm [py/init-calls-subclass] + """ + Pipelines provide a way to transmit multiple commands to the Redis server + in one transmission. This is convenient for batch processing, such as + saving all the values in a list to Redis. + + All commands executed within a pipeline are wrapped with MULTI and EXEC + calls. This guarantees all commands executed in the pipeline will be + executed atomically. + + Any command raising an exception does *not* halt the execution of + subsequent commands in the pipeline. Instead, the exception is caught + and its instance is placed into the response list returned by execute(). + Code iterating over the response list should be able to deal with an + instance of an exception as a potential value. In general, these will be + ResponseError exceptions, such as those raised when issuing a command + on a key of a different datatype. + """ + + UNWATCH_COMMANDS = {"DISCARD", "EXEC", "UNWATCH"} + + def __init__( + self, + connection_pool: ConnectionPool, + response_callbacks: MutableMapping[Union[str, bytes], ResponseCallbackT], + transaction: bool, + shard_hint: Optional[str], + ): + self.connection_pool = connection_pool + self.connection = None + self.response_callbacks = response_callbacks + self.is_transaction = transaction + self.shard_hint = shard_hint + self.watching = False + self.command_stack: CommandStackT = [] + self.scripts: Set["Script"] = set() + self.explicit_transaction = False + + async def __aenter__(self: _RedisT) -> _RedisT: + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.reset() + + def __await__(self): + return self._async_self().__await__() + + _DEL_MESSAGE = "Unclosed Pipeline client" + + def __len__(self): + return len(self.command_stack) + + def __bool__(self): + """Pipeline instances should always evaluate to True""" + return True + + async def _async_self(self): + return self + + async def reset(self): + self.command_stack = [] + self.scripts = set() + # make sure to reset the connection state in the event that we were + # watching something + if self.watching and self.connection: + try: + # call this manually since our unwatch or + # immediate_execute_command methods can call reset() + await self.connection.send_command("UNWATCH") + await self.connection.read_response() + except ConnectionError: + # disconnect will also remove any previous WATCHes + if self.connection: + await self.connection.disconnect() + # clean up the other instance attributes + self.watching = False + self.explicit_transaction = False + # we can safely return the connection to the pool here since we're + # sure we're no longer WATCHing anything + if self.connection: + await self.connection_pool.release(self.connection) + self.connection = None + + async def aclose(self) -> None: + """Alias for reset(), a standard method name for cleanup""" + await self.reset() + + def multi(self): + """ + Start a transactional block of the pipeline after WATCH commands + are issued. End the transactional block with `execute`. + """ + if self.explicit_transaction: + raise RedisError("Cannot issue nested calls to MULTI") + if self.command_stack: + raise RedisError( + "Commands without an initial WATCH have already been issued" + ) + self.explicit_transaction = True + + def execute_command( + self, *args, **kwargs + ) -> Union["Pipeline", Awaitable["Pipeline"]]: + if (self.watching or args[0] == "WATCH") and not self.explicit_transaction: + return self.immediate_execute_command(*args, **kwargs) + return self.pipeline_execute_command(*args, **kwargs) + + async def _disconnect_reset_raise(self, conn, error): + """ + Close the connection, reset watching state and + raise an exception if we were watching, + if retry_on_error is not set or the error is not one + of the specified error types. + """ + await conn.disconnect() + # if we were already watching a variable, the watch is no longer + # valid since this connection has died. raise a WatchError, which + # indicates the user should retry this transaction. + if self.watching: + await self.aclose() + raise WatchError( + "A ConnectionError occurred on while watching one or more keys" + ) + # if retry_on_error is not set or the error is not one + # of the specified error types, raise it + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + await self.aclose() + raise + + async def immediate_execute_command(self, *args, **options): + """ + Execute a command immediately, but don't auto-retry on a + ConnectionError if we're already WATCHing a variable. Used when + issuing WATCH or subsequent commands retrieving their values but before + MULTI is called. + """ + command_name = args[0] + conn = self.connection + # if this is the first call, we need a connection + if not conn: + conn = await self.connection_pool.get_connection( + command_name, self.shard_hint + ) + self.connection = conn + + return await conn.retry.call_with_retry( + lambda: self._send_command_parse_response( + conn, command_name, *args, **options + ), + lambda error: self._disconnect_reset_raise(conn, error), + ) + + def pipeline_execute_command(self, *args, **options): + """ + Stage a command to be executed when execute() is next called + + Returns the current Pipeline object back so commands can be + chained together, such as: + + pipe = pipe.set('foo', 'bar').incr('baz').decr('bang') + + At some other point, you can then run: pipe.execute(), + which will execute all commands queued in the pipe. + """ + self.command_stack.append((args, options)) + return self + + async def _execute_transaction( # noqa: C901 + self, connection: Connection, commands: CommandStackT, raise_on_error + ): + pre: CommandT = (("MULTI",), {}) + post: CommandT = (("EXEC",), {}) + cmds = (pre, *commands, post) + all_cmds = connection.pack_commands( + args for args, options in cmds if EMPTY_RESPONSE not in options + ) + await connection.send_packed_command(all_cmds) + errors = [] + + # parse off the response for MULTI + # NOTE: we need to handle ResponseErrors here and continue + # so that we read all the additional command messages from + # the socket + try: + await self.parse_response(connection, "_") + except ResponseError as err: + errors.append((0, err)) + + # and all the other commands + for i, command in enumerate(commands): + if EMPTY_RESPONSE in command[1]: + errors.append((i, command[1][EMPTY_RESPONSE])) + else: + try: + await self.parse_response(connection, "_") + except ResponseError as err: + self.annotate_exception(err, i + 1, command[0]) + errors.append((i, err)) + + # parse the EXEC. + try: + response = await self.parse_response(connection, "_") + except ExecAbortError as err: + if errors: + raise errors[0][1] from err + raise + + # EXEC clears any watched keys + self.watching = False + + if response is None: + raise WatchError("Watched variable changed.") from None + + # put any parse errors into the response + for i, e in errors: + response.insert(i, e) + + if len(response) != len(commands): + if self.connection: + await self.connection.disconnect() + raise ResponseError( + "Wrong number of response items from pipeline execution" + ) from None + + # find any errors in the response and raise if necessary + if raise_on_error: + self.raise_first_error(commands, response) + + # We have to run response callbacks manually + data = [] + for r, cmd in zip(response, commands): + if not isinstance(r, Exception): + args, options = cmd + command_name = args[0] + if command_name in self.response_callbacks: + r = self.response_callbacks[command_name](r, **options) + if inspect.isawaitable(r): + r = await r + data.append(r) + return data + + async def _execute_pipeline( + self, connection: Connection, commands: CommandStackT, raise_on_error: bool + ): + # build up all commands into a single request to increase network perf + all_cmds = connection.pack_commands([args for args, _ in commands]) + await connection.send_packed_command(all_cmds) + + response = [] + for args, options in commands: + try: + response.append( + await self.parse_response(connection, args[0], **options) + ) + except ResponseError as e: + response.append(e) + + if raise_on_error: + self.raise_first_error(commands, response) + return response + + def raise_first_error(self, commands: CommandStackT, response: Iterable[Any]): + for i, r in enumerate(response): + if isinstance(r, ResponseError): + self.annotate_exception(r, i + 1, commands[i][0]) + raise r + + def annotate_exception( + self, exception: Exception, number: int, command: Iterable[object] + ) -> None: + cmd = " ".join(map(safe_str, command)) + msg = f"Command # {number} ({cmd}) of pipeline caused error: {exception.args}" + exception.args = (msg,) + exception.args[1:] + + async def parse_response( + self, connection: Connection, command_name: Union[str, bytes], **options + ): + result = await super().parse_response(connection, command_name, **options) + if command_name in self.UNWATCH_COMMANDS: + self.watching = False + elif command_name == "WATCH": + self.watching = True + return result + + async def load_scripts(self): + # make sure all scripts that are about to be run on this pipeline exist + scripts = list(self.scripts) + immediate = self.immediate_execute_command + shas = [s.sha for s in scripts] + # we can't use the normal script_* methods because they would just + # get buffered in the pipeline. + exists = await immediate("SCRIPT EXISTS", *shas) + if not all(exists): + for s, exist in zip(scripts, exists): + if not exist: + s.sha = await immediate("SCRIPT LOAD", s.script) + + async def _disconnect_raise_reset(self, conn: Connection, error: Exception): + """ + Close the connection, raise an exception if we were watching, + and raise an exception if retry_on_error is not set or the + error is not one of the specified error types. + """ + await conn.disconnect() + # if we were watching a variable, the watch is no longer valid + # since this connection has died. raise a WatchError, which + # indicates the user should retry this transaction. + if self.watching: + raise WatchError( + "A ConnectionError occurred on while watching one or more keys" + ) + # if retry_on_error is not set or the error is not one + # of the specified error types, raise it + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + await self.reset() + raise + + async def execute(self, raise_on_error: bool = True): + """Execute all the commands in the current pipeline""" + stack = self.command_stack + if not stack and not self.watching: + return [] + if self.scripts: + await self.load_scripts() + if self.is_transaction or self.explicit_transaction: + execute = self._execute_transaction + else: + execute = self._execute_pipeline + + conn = self.connection + if not conn: + conn = await self.connection_pool.get_connection("MULTI", self.shard_hint) + # assign to self.connection so reset() releases the connection + # back to the pool after we're done + self.connection = conn + conn = cast(Connection, conn) + + try: + return await conn.retry.call_with_retry( + lambda: execute(conn, stack, raise_on_error), + lambda error: self._disconnect_raise_reset(conn, error), + ) + finally: + await self.reset() + + async def discard(self): + """Flushes all previously queued commands + See: https://redis.io/commands/DISCARD + """ + await self.execute_command("DISCARD") + + async def watch(self, *names: KeyT): + """Watches the values at keys ``names``""" + if self.explicit_transaction: + raise RedisError("Cannot issue a WATCH after a MULTI") + return await self.execute_command("WATCH", *names) + + async def unwatch(self): + """Unwatches all previously specified keys""" + return self.watching and await self.execute_command("UNWATCH") or True diff --git a/templates/skills/spotify/dependencies/redis/asyncio/cluster.py b/templates/skills/spotify/dependencies/redis/asyncio/cluster.py new file mode 100644 index 00000000..3bf147d7 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/cluster.py @@ -0,0 +1,1625 @@ +import asyncio +import collections +import random +import socket +import ssl +import warnings +from typing import ( + Any, + Callable, + Deque, + Dict, + Generator, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, +) + +from redis._parsers import AsyncCommandsParser, Encoder +from redis._parsers.helpers import ( + _RedisCallbacks, + _RedisCallbacksRESP2, + _RedisCallbacksRESP3, +) +from redis.asyncio.client import ResponseCallbackT +from redis.asyncio.connection import Connection, DefaultParser, SSLConnection, parse_url +from redis.asyncio.lock import Lock +from redis.asyncio.retry import Retry +from redis.backoff import default_backoff +from redis.client import EMPTY_RESPONSE, NEVER_DECODE, AbstractRedis +from redis.cluster import ( + PIPELINE_BLOCKED_COMMANDS, + PRIMARY, + REPLICA, + SLOT_ID, + AbstractRedisCluster, + LoadBalancer, + block_pipeline_command, + get_node_name, + parse_cluster_slots, +) +from redis.commands import READ_COMMANDS, AsyncRedisClusterCommands +from redis.crc import REDIS_CLUSTER_HASH_SLOTS, key_slot +from redis.credentials import CredentialProvider +from redis.exceptions import ( + AskError, + BusyLoadingError, + ClusterCrossSlotError, + ClusterDownError, + ClusterError, + ConnectionError, + DataError, + MasterDownError, + MaxConnectionsError, + MovedError, + RedisClusterException, + ResponseError, + SlotNotCoveredError, + TimeoutError, + TryAgainError, +) +from redis.typing import AnyKeyT, EncodableT, KeyT +from redis.utils import ( + deprecated_function, + dict_merge, + get_lib_version, + safe_str, + str_if_bytes, +) + +TargetNodesT = TypeVar( + "TargetNodesT", str, "ClusterNode", List["ClusterNode"], Dict[Any, "ClusterNode"] +) + + +class ClusterParser(DefaultParser): + EXCEPTION_CLASSES = dict_merge( + DefaultParser.EXCEPTION_CLASSES, + { + "ASK": AskError, + "CLUSTERDOWN": ClusterDownError, + "CROSSSLOT": ClusterCrossSlotError, + "MASTERDOWN": MasterDownError, + "MOVED": MovedError, + "TRYAGAIN": TryAgainError, + }, + ) + + +class RedisCluster(AbstractRedis, AbstractRedisCluster, AsyncRedisClusterCommands): + """ + Create a new RedisCluster client. + + Pass one of parameters: + + - `host` & `port` + - `startup_nodes` + + | Use ``await`` :meth:`initialize` to find cluster nodes & create connections. + | Use ``await`` :meth:`close` to disconnect connections & close client. + + Many commands support the target_nodes kwarg. It can be one of the + :attr:`NODE_FLAGS`: + + - :attr:`PRIMARIES` + - :attr:`REPLICAS` + - :attr:`ALL_NODES` + - :attr:`RANDOM` + - :attr:`DEFAULT_NODE` + + Note: This client is not thread/process/fork safe. + + :param host: + | Can be used to point to a startup node + :param port: + | Port used if **host** is provided + :param startup_nodes: + | :class:`~.ClusterNode` to used as a startup node + :param require_full_coverage: + | When set to ``False``: the client will not require a full coverage of + the slots. However, if not all slots are covered, and at least one node + has ``cluster-require-full-coverage`` set to ``yes``, the server will throw + a :class:`~.ClusterDownError` for some key-based commands. + | When set to ``True``: all slots must be covered to construct the cluster + client. If not all slots are covered, :class:`~.RedisClusterException` will be + thrown. + | See: + https://redis.io/docs/manual/scaling/#redis-cluster-configuration-parameters + :param read_from_replicas: + | Enable read from replicas in READONLY mode. You can read possibly stale data. + When set to true, read commands will be assigned between the primary and + its replications in a Round-Robin manner. + :param reinitialize_steps: + | Specifies the number of MOVED errors that need to occur before reinitializing + the whole cluster topology. If a MOVED error occurs and the cluster does not + need to be reinitialized on this current error handling, only the MOVED slot + will be patched with the redirected node. + To reinitialize the cluster on every MOVED error, set reinitialize_steps to 1. + To avoid reinitializing the cluster on moved errors, set reinitialize_steps to + 0. + :param cluster_error_retry_attempts: + | Number of times to retry before raising an error when :class:`~.TimeoutError` + or :class:`~.ConnectionError` or :class:`~.ClusterDownError` are encountered + :param connection_error_retry_attempts: + | Number of times to retry before reinitializing when :class:`~.TimeoutError` + or :class:`~.ConnectionError` are encountered. + The default backoff strategy will be set if Retry object is not passed (see + default_backoff in backoff.py). To change it, pass a custom Retry object + using the "retry" keyword. + :param max_connections: + | Maximum number of connections per node. If there are no free connections & the + maximum number of connections are already created, a + :class:`~.MaxConnectionsError` is raised. This error may be retried as defined + by :attr:`connection_error_retry_attempts` + :param address_remap: + | An optional callable which, when provided with an internal network + address of a node, e.g. a `(host, port)` tuple, will return the address + where the node is reachable. This can be used to map the addresses at + which the nodes _think_ they are, to addresses at which a client may + reach them, such as when they sit behind a proxy. + + | Rest of the arguments will be passed to the + :class:`~redis.asyncio.connection.Connection` instances when created + + :raises RedisClusterException: + if any arguments are invalid or unknown. Eg: + + - `db` != 0 or None + - `path` argument for unix socket connection + - none of the `host`/`port` & `startup_nodes` were provided + + """ + + @classmethod + def from_url(cls, url: str, **kwargs: Any) -> "RedisCluster": + """ + Return a Redis client object configured from the given URL. + + For example:: + + redis://[[username]:[password]]@localhost:6379/0 + rediss://[[username]:[password]]@localhost:6379/0 + + Three URL schemes are supported: + + - `redis://` creates a TCP socket connection. See more at: + + - `rediss://` creates a SSL wrapped TCP socket connection. See more at: + + + The username, password, hostname, path and all querystring values are passed + through ``urllib.parse.unquote`` in order to replace any percent-encoded values + with their corresponding characters. + + All querystring options are cast to their appropriate Python types. Boolean + arguments can be specified with string values "True"/"False" or "Yes"/"No". + Values that cannot be properly cast cause a ``ValueError`` to be raised. Once + parsed, the querystring arguments and keyword arguments are passed to + :class:`~redis.asyncio.connection.Connection` when created. + In the case of conflicting arguments, querystring arguments are used. + """ + kwargs.update(parse_url(url)) + if kwargs.pop("connection_class", None) is SSLConnection: + kwargs["ssl"] = True + return cls(**kwargs) + + __slots__ = ( + "_initialize", + "_lock", + "cluster_error_retry_attempts", + "command_flags", + "commands_parser", + "connection_error_retry_attempts", + "connection_kwargs", + "encoder", + "node_flags", + "nodes_manager", + "read_from_replicas", + "reinitialize_counter", + "reinitialize_steps", + "response_callbacks", + "result_callbacks", + ) + + def __init__( + self, + host: Optional[str] = None, + port: Union[str, int] = 6379, + # Cluster related kwargs + startup_nodes: Optional[List["ClusterNode"]] = None, + require_full_coverage: bool = True, + read_from_replicas: bool = False, + reinitialize_steps: int = 5, + cluster_error_retry_attempts: int = 3, + connection_error_retry_attempts: int = 3, + max_connections: int = 2**31, + # Client related kwargs + db: Union[str, int] = 0, + path: Optional[str] = None, + credential_provider: Optional[CredentialProvider] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_name: Optional[str] = None, + lib_name: Optional[str] = "redis-py", + lib_version: Optional[str] = get_lib_version(), + # Encoding related kwargs + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + # Connection related kwargs + health_check_interval: float = 0, + socket_connect_timeout: Optional[float] = None, + socket_keepalive: bool = False, + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, + socket_timeout: Optional[float] = None, + retry: Optional["Retry"] = None, + retry_on_error: Optional[List[Type[Exception]]] = None, + # SSL related kwargs + ssl: bool = False, + ssl_ca_certs: Optional[str] = None, + ssl_ca_data: Optional[str] = None, + ssl_cert_reqs: str = "required", + ssl_certfile: Optional[str] = None, + ssl_check_hostname: bool = False, + ssl_keyfile: Optional[str] = None, + ssl_min_version: Optional[ssl.TLSVersion] = None, + ssl_ciphers: Optional[str] = None, + protocol: Optional[int] = 2, + address_remap: Optional[Callable[[str, int], Tuple[str, int]]] = None, + ) -> None: + if db: + raise RedisClusterException( + "Argument 'db' must be 0 or None in cluster mode" + ) + + if path: + raise RedisClusterException( + "Unix domain socket is not supported in cluster mode" + ) + + if (not host or not port) and not startup_nodes: + raise RedisClusterException( + "RedisCluster requires at least one node to discover the cluster.\n" + "Please provide one of the following or use RedisCluster.from_url:\n" + ' - host and port: RedisCluster(host="localhost", port=6379)\n' + " - startup_nodes: RedisCluster(startup_nodes=[" + 'ClusterNode("localhost", 6379), ClusterNode("localhost", 6380)])' + ) + + kwargs: Dict[str, Any] = { + "max_connections": max_connections, + "connection_class": Connection, + "parser_class": ClusterParser, + # Client related kwargs + "credential_provider": credential_provider, + "username": username, + "password": password, + "client_name": client_name, + "lib_name": lib_name, + "lib_version": lib_version, + # Encoding related kwargs + "encoding": encoding, + "encoding_errors": encoding_errors, + "decode_responses": decode_responses, + # Connection related kwargs + "health_check_interval": health_check_interval, + "socket_connect_timeout": socket_connect_timeout, + "socket_keepalive": socket_keepalive, + "socket_keepalive_options": socket_keepalive_options, + "socket_timeout": socket_timeout, + "retry": retry, + "protocol": protocol, + } + + if ssl: + # SSL related kwargs + kwargs.update( + { + "connection_class": SSLConnection, + "ssl_ca_certs": ssl_ca_certs, + "ssl_ca_data": ssl_ca_data, + "ssl_cert_reqs": ssl_cert_reqs, + "ssl_certfile": ssl_certfile, + "ssl_check_hostname": ssl_check_hostname, + "ssl_keyfile": ssl_keyfile, + "ssl_min_version": ssl_min_version, + "ssl_ciphers": ssl_ciphers, + } + ) + + if read_from_replicas: + # Call our on_connect function to configure READONLY mode + kwargs["redis_connect_func"] = self.on_connect + + self.retry = retry + if retry or retry_on_error or connection_error_retry_attempts > 0: + # Set a retry object for all cluster nodes + self.retry = retry or Retry( + default_backoff(), connection_error_retry_attempts + ) + if not retry_on_error: + # Default errors for retrying + retry_on_error = [ConnectionError, TimeoutError] + self.retry.update_supported_errors(retry_on_error) + kwargs.update({"retry": self.retry}) + + kwargs["response_callbacks"] = _RedisCallbacks.copy() + if kwargs.get("protocol") in ["3", 3]: + kwargs["response_callbacks"].update(_RedisCallbacksRESP3) + else: + kwargs["response_callbacks"].update(_RedisCallbacksRESP2) + self.connection_kwargs = kwargs + + if startup_nodes: + passed_nodes = [] + for node in startup_nodes: + passed_nodes.append( + ClusterNode(node.host, node.port, **self.connection_kwargs) + ) + startup_nodes = passed_nodes + else: + startup_nodes = [] + if host and port: + startup_nodes.append(ClusterNode(host, port, **self.connection_kwargs)) + + self.nodes_manager = NodesManager( + startup_nodes, + require_full_coverage, + kwargs, + address_remap=address_remap, + ) + self.encoder = Encoder(encoding, encoding_errors, decode_responses) + self.read_from_replicas = read_from_replicas + self.reinitialize_steps = reinitialize_steps + self.cluster_error_retry_attempts = cluster_error_retry_attempts + self.connection_error_retry_attempts = connection_error_retry_attempts + self.reinitialize_counter = 0 + self.commands_parser = AsyncCommandsParser() + self.node_flags = self.__class__.NODE_FLAGS.copy() + self.command_flags = self.__class__.COMMAND_FLAGS.copy() + self.response_callbacks = kwargs["response_callbacks"] + self.result_callbacks = self.__class__.RESULT_CALLBACKS.copy() + self.result_callbacks[ + "CLUSTER SLOTS" + ] = lambda cmd, res, **kwargs: parse_cluster_slots( + list(res.values())[0], **kwargs + ) + + self._initialize = True + self._lock: Optional[asyncio.Lock] = None + + async def initialize(self) -> "RedisCluster": + """Get all nodes from startup nodes & creates connections if not initialized.""" + if self._initialize: + if not self._lock: + self._lock = asyncio.Lock() + async with self._lock: + if self._initialize: + try: + await self.nodes_manager.initialize() + await self.commands_parser.initialize( + self.nodes_manager.default_node + ) + self._initialize = False + except BaseException: + await self.nodes_manager.aclose() + await self.nodes_manager.aclose("startup_nodes") + raise + return self + + async def aclose(self) -> None: + """Close all connections & client if initialized.""" + if not self._initialize: + if not self._lock: + self._lock = asyncio.Lock() + async with self._lock: + if not self._initialize: + self._initialize = True + await self.nodes_manager.aclose() + await self.nodes_manager.aclose("startup_nodes") + + @deprecated_function(version="5.0.0", reason="Use aclose() instead", name="close") + async def close(self) -> None: + """alias for aclose() for backwards compatibility""" + await self.aclose() + + async def __aenter__(self) -> "RedisCluster": + return await self.initialize() + + async def __aexit__(self, exc_type: None, exc_value: None, traceback: None) -> None: + await self.aclose() + + def __await__(self) -> Generator[Any, None, "RedisCluster"]: + return self.initialize().__await__() + + _DEL_MESSAGE = "Unclosed RedisCluster client" + + def __del__( + self, + _warn: Any = warnings.warn, + _grl: Any = asyncio.get_running_loop, + ) -> None: + if hasattr(self, "_initialize") and not self._initialize: + _warn(f"{self._DEL_MESSAGE} {self!r}", ResourceWarning, source=self) + try: + context = {"client": self, "message": self._DEL_MESSAGE} + _grl().call_exception_handler(context) + except RuntimeError: + pass + + async def on_connect(self, connection: Connection) -> None: + await connection.on_connect() + + # Sending READONLY command to server to configure connection as + # readonly. Since each cluster node may change its server type due + # to a failover, we should establish a READONLY connection + # regardless of the server type. If this is a primary connection, + # READONLY would not affect executing write commands. + await connection.send_command("READONLY") + if str_if_bytes(await connection.read_response()) != "OK": + raise ConnectionError("READONLY command failed") + + def get_nodes(self) -> List["ClusterNode"]: + """Get all nodes of the cluster.""" + return list(self.nodes_manager.nodes_cache.values()) + + def get_primaries(self) -> List["ClusterNode"]: + """Get the primary nodes of the cluster.""" + return self.nodes_manager.get_nodes_by_server_type(PRIMARY) + + def get_replicas(self) -> List["ClusterNode"]: + """Get the replica nodes of the cluster.""" + return self.nodes_manager.get_nodes_by_server_type(REPLICA) + + def get_random_node(self) -> "ClusterNode": + """Get a random node of the cluster.""" + return random.choice(list(self.nodes_manager.nodes_cache.values())) + + def get_default_node(self) -> "ClusterNode": + """Get the default node of the client.""" + return self.nodes_manager.default_node + + def set_default_node(self, node: "ClusterNode") -> None: + """ + Set the default node of the client. + + :raises DataError: if None is passed or node does not exist in cluster. + """ + if not node or not self.get_node(node_name=node.name): + raise DataError("The requested node does not exist in the cluster.") + + self.nodes_manager.default_node = node + + def get_node( + self, + host: Optional[str] = None, + port: Optional[int] = None, + node_name: Optional[str] = None, + ) -> Optional["ClusterNode"]: + """Get node by (host, port) or node_name.""" + return self.nodes_manager.get_node(host, port, node_name) + + def get_node_from_key( + self, key: str, replica: bool = False + ) -> Optional["ClusterNode"]: + """ + Get the cluster node corresponding to the provided key. + + :param key: + :param replica: + | Indicates if a replica should be returned + | + None will returned if no replica holds this key + + :raises SlotNotCoveredError: if the key is not covered by any slot. + """ + slot = self.keyslot(key) + slot_cache = self.nodes_manager.slots_cache.get(slot) + if not slot_cache: + raise SlotNotCoveredError(f'Slot "{slot}" is not covered by the cluster.') + + if replica: + if len(self.nodes_manager.slots_cache[slot]) < 2: + return None + node_idx = 1 + else: + node_idx = 0 + + return slot_cache[node_idx] + + def keyslot(self, key: EncodableT) -> int: + """ + Find the keyslot for a given key. + + See: https://redis.io/docs/manual/scaling/#redis-cluster-data-sharding + """ + return key_slot(self.encoder.encode(key)) + + def get_encoder(self) -> Encoder: + """Get the encoder object of the client.""" + return self.encoder + + def get_connection_kwargs(self) -> Dict[str, Optional[Any]]: + """Get the kwargs passed to :class:`~redis.asyncio.connection.Connection`.""" + return self.connection_kwargs + + def get_retry(self) -> Optional["Retry"]: + return self.retry + + def set_retry(self, retry: "Retry") -> None: + self.retry = retry + for node in self.get_nodes(): + node.connection_kwargs.update({"retry": retry}) + for conn in node._connections: + conn.retry = retry + + def set_response_callback(self, command: str, callback: ResponseCallbackT) -> None: + """Set a custom response callback.""" + self.response_callbacks[command] = callback + + async def _determine_nodes( + self, command: str, *args: Any, node_flag: Optional[str] = None + ) -> List["ClusterNode"]: + # Determine which nodes should be executed the command on. + # Returns a list of target nodes. + if not node_flag: + # get the nodes group for this command if it was predefined + node_flag = self.command_flags.get(command) + + if node_flag in self.node_flags: + if node_flag == self.__class__.DEFAULT_NODE: + # return the cluster's default node + return [self.nodes_manager.default_node] + if node_flag == self.__class__.PRIMARIES: + # return all primaries + return self.nodes_manager.get_nodes_by_server_type(PRIMARY) + if node_flag == self.__class__.REPLICAS: + # return all replicas + return self.nodes_manager.get_nodes_by_server_type(REPLICA) + if node_flag == self.__class__.ALL_NODES: + # return all nodes + return list(self.nodes_manager.nodes_cache.values()) + if node_flag == self.__class__.RANDOM: + # return a random node + return [random.choice(list(self.nodes_manager.nodes_cache.values()))] + + # get the node that holds the key's slot + return [ + self.nodes_manager.get_node_from_slot( + await self._determine_slot(command, *args), + self.read_from_replicas and command in READ_COMMANDS, + ) + ] + + async def _determine_slot(self, command: str, *args: Any) -> int: + if self.command_flags.get(command) == SLOT_ID: + # The command contains the slot ID + return int(args[0]) + + # Get the keys in the command + + # EVAL and EVALSHA are common enough that it's wasteful to go to the + # redis server to parse the keys. Besides, there is a bug in redis<7.0 + # where `self._get_command_keys()` fails anyway. So, we special case + # EVAL/EVALSHA. + # - issue: https://github.com/redis/redis/issues/9493 + # - fix: https://github.com/redis/redis/pull/9733 + if command.upper() in ("EVAL", "EVALSHA"): + # command syntax: EVAL "script body" num_keys ... + if len(args) < 2: + raise RedisClusterException( + f"Invalid args in command: {command, *args}" + ) + keys = args[2 : 2 + int(args[1])] + # if there are 0 keys, that means the script can be run on any node + # so we can just return a random slot + if not keys: + return random.randrange(0, REDIS_CLUSTER_HASH_SLOTS) + else: + keys = await self.commands_parser.get_keys(command, *args) + if not keys: + # FCALL can call a function with 0 keys, that means the function + # can be run on any node so we can just return a random slot + if command.upper() in ("FCALL", "FCALL_RO"): + return random.randrange(0, REDIS_CLUSTER_HASH_SLOTS) + raise RedisClusterException( + "No way to dispatch this command to Redis Cluster. " + "Missing key.\nYou can execute the command by specifying " + f"target nodes.\nCommand: {args}" + ) + + # single key command + if len(keys) == 1: + return self.keyslot(keys[0]) + + # multi-key command; we need to make sure all keys are mapped to + # the same slot + slots = {self.keyslot(key) for key in keys} + if len(slots) != 1: + raise RedisClusterException( + f"{command} - all keys must map to the same key slot" + ) + + return slots.pop() + + def _is_node_flag(self, target_nodes: Any) -> bool: + return isinstance(target_nodes, str) and target_nodes in self.node_flags + + def _parse_target_nodes(self, target_nodes: Any) -> List["ClusterNode"]: + if isinstance(target_nodes, list): + nodes = target_nodes + elif isinstance(target_nodes, ClusterNode): + # Supports passing a single ClusterNode as a variable + nodes = [target_nodes] + elif isinstance(target_nodes, dict): + # Supports dictionaries of the format {node_name: node}. + # It enables to execute commands with multi nodes as follows: + # rc.cluster_save_config(rc.get_primaries()) + nodes = list(target_nodes.values()) + else: + raise TypeError( + "target_nodes type can be one of the following: " + "node_flag (PRIMARIES, REPLICAS, RANDOM, ALL_NODES)," + "ClusterNode, list, or dict. " + f"The passed type is {type(target_nodes)}" + ) + return nodes + + async def execute_command(self, *args: EncodableT, **kwargs: Any) -> Any: + """ + Execute a raw command on the appropriate cluster node or target_nodes. + + It will retry the command as specified by :attr:`cluster_error_retry_attempts` & + then raise an exception. + + :param args: + | Raw command args + :param kwargs: + + - target_nodes: :attr:`NODE_FLAGS` or :class:`~.ClusterNode` + or List[:class:`~.ClusterNode`] or Dict[Any, :class:`~.ClusterNode`] + - Rest of the kwargs are passed to the Redis connection + + :raises RedisClusterException: if target_nodes is not provided & the command + can't be mapped to a slot + """ + command = args[0] + target_nodes = [] + target_nodes_specified = False + retry_attempts = self.cluster_error_retry_attempts + + passed_targets = kwargs.pop("target_nodes", None) + if passed_targets and not self._is_node_flag(passed_targets): + target_nodes = self._parse_target_nodes(passed_targets) + target_nodes_specified = True + retry_attempts = 0 + + # Add one for the first execution + execute_attempts = 1 + retry_attempts + for _ in range(execute_attempts): + if self._initialize: + await self.initialize() + if ( + len(target_nodes) == 1 + and target_nodes[0] == self.get_default_node() + ): + # Replace the default cluster node + self.replace_default_node() + try: + if not target_nodes_specified: + # Determine the nodes to execute the command on + target_nodes = await self._determine_nodes( + *args, node_flag=passed_targets + ) + if not target_nodes: + raise RedisClusterException( + f"No targets were found to execute {args} command on" + ) + + if len(target_nodes) == 1: + # Return the processed result + ret = await self._execute_command(target_nodes[0], *args, **kwargs) + if command in self.result_callbacks: + return self.result_callbacks[command]( + command, {target_nodes[0].name: ret}, **kwargs + ) + return ret + else: + keys = [node.name for node in target_nodes] + values = await asyncio.gather( + *( + asyncio.create_task( + self._execute_command(node, *args, **kwargs) + ) + for node in target_nodes + ) + ) + if command in self.result_callbacks: + return self.result_callbacks[command]( + command, dict(zip(keys, values)), **kwargs + ) + return dict(zip(keys, values)) + except Exception as e: + if retry_attempts > 0 and type(e) in self.__class__.ERRORS_ALLOW_RETRY: + # The nodes and slots cache were should be reinitialized. + # Try again with the new cluster setup. + retry_attempts -= 1 + continue + else: + # raise the exception + raise e + + async def _execute_command( + self, target_node: "ClusterNode", *args: Union[KeyT, EncodableT], **kwargs: Any + ) -> Any: + asking = moved = False + redirect_addr = None + ttl = self.RedisClusterRequestTTL + + while ttl > 0: + ttl -= 1 + try: + if asking: + target_node = self.get_node(node_name=redirect_addr) + await target_node.execute_command("ASKING") + asking = False + elif moved: + # MOVED occurred and the slots cache was updated, + # refresh the target node + slot = await self._determine_slot(*args) + target_node = self.nodes_manager.get_node_from_slot( + slot, self.read_from_replicas and args[0] in READ_COMMANDS + ) + moved = False + + return await target_node.execute_command(*args, **kwargs) + except (BusyLoadingError, MaxConnectionsError): + raise + except (ConnectionError, TimeoutError): + # Connection retries are being handled in the node's + # Retry object. + # Remove the failed node from the startup nodes before we try + # to reinitialize the cluster + self.nodes_manager.startup_nodes.pop(target_node.name, None) + # Hard force of reinitialize of the node/slots setup + # and try again with the new setup + await self.aclose() + raise + except ClusterDownError: + # ClusterDownError can occur during a failover and to get + # self-healed, we will try to reinitialize the cluster layout + # and retry executing the command + await self.aclose() + await asyncio.sleep(0.25) + raise + except MovedError as e: + # First, we will try to patch the slots/nodes cache with the + # redirected node output and try again. If MovedError exceeds + # 'reinitialize_steps' number of times, we will force + # reinitializing the tables, and then try again. + # 'reinitialize_steps' counter will increase faster when + # the same client object is shared between multiple threads. To + # reduce the frequency you can set this variable in the + # RedisCluster constructor. + self.reinitialize_counter += 1 + if ( + self.reinitialize_steps + and self.reinitialize_counter % self.reinitialize_steps == 0 + ): + await self.aclose() + # Reset the counter + self.reinitialize_counter = 0 + else: + self.nodes_manager._moved_exception = e + moved = True + except AskError as e: + redirect_addr = get_node_name(host=e.host, port=e.port) + asking = True + except TryAgainError: + if ttl < self.RedisClusterRequestTTL / 2: + await asyncio.sleep(0.05) + + raise ClusterError("TTL exhausted.") + + def pipeline( + self, transaction: Optional[Any] = None, shard_hint: Optional[Any] = None + ) -> "ClusterPipeline": + """ + Create & return a new :class:`~.ClusterPipeline` object. + + Cluster implementation of pipeline does not support transaction or shard_hint. + + :raises RedisClusterException: if transaction or shard_hint are truthy values + """ + if shard_hint: + raise RedisClusterException("shard_hint is deprecated in cluster mode") + + if transaction: + raise RedisClusterException("transaction is deprecated in cluster mode") + + return ClusterPipeline(self) + + def lock( + self, + name: KeyT, + timeout: Optional[float] = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: Optional[float] = None, + lock_class: Optional[Type[Lock]] = None, + thread_local: bool = True, + ) -> Lock: + """ + Return a new Lock object using key ``name`` that mimics + the behavior of threading.Lock. + + If specified, ``timeout`` indicates a maximum life for the lock. + By default, it will remain locked until release() is called. + + ``sleep`` indicates the amount of time to sleep per loop iteration + when the lock is in blocking mode and another client is currently + holding the lock. + + ``blocking`` indicates whether calling ``acquire`` should block until + the lock has been acquired or to fail immediately, causing ``acquire`` + to return False and the lock not being acquired. Defaults to True. + Note this value can be overridden by passing a ``blocking`` + argument to ``acquire``. + + ``blocking_timeout`` indicates the maximum amount of time in seconds to + spend trying to acquire the lock. A value of ``None`` indicates + continue trying forever. ``blocking_timeout`` can be specified as a + float or integer, both representing the number of seconds to wait. + + ``lock_class`` forces the specified lock implementation. Note that as + of redis-py 3.0, the only lock class we implement is ``Lock`` (which is + a Lua-based lock). So, it's unlikely you'll need this parameter, unless + you have created your own custom lock class. + + ``thread_local`` indicates whether the lock token is placed in + thread-local storage. By default, the token is placed in thread local + storage so that a thread only sees its token, not a token set by + another thread. Consider the following timeline: + + time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds. + thread-1 sets the token to "abc" + time: 1, thread-2 blocks trying to acquire `my-lock` using the + Lock instance. + time: 5, thread-1 has not yet completed. redis expires the lock + key. + time: 5, thread-2 acquired `my-lock` now that it's available. + thread-2 sets the token to "xyz" + time: 6, thread-1 finishes its work and calls release(). if the + token is *not* stored in thread local storage, then + thread-1 would see the token value as "xyz" and would be + able to successfully release the thread-2's lock. + + In some use cases it's necessary to disable thread local storage. For + example, if you have code where one thread acquires a lock and passes + that lock instance to a worker thread to release later. If thread + local storage isn't disabled in this case, the worker thread won't see + the token set by the thread that acquired the lock. Our assumption + is that these cases aren't common and as such default to using + thread local storage.""" + if lock_class is None: + lock_class = Lock + return lock_class( + self, + name, + timeout=timeout, + sleep=sleep, + blocking=blocking, + blocking_timeout=blocking_timeout, + thread_local=thread_local, + ) + + +class ClusterNode: + """ + Create a new ClusterNode. + + Each ClusterNode manages multiple :class:`~redis.asyncio.connection.Connection` + objects for the (host, port). + """ + + __slots__ = ( + "_connections", + "_free", + "connection_class", + "connection_kwargs", + "host", + "max_connections", + "name", + "port", + "response_callbacks", + "server_type", + ) + + def __init__( + self, + host: str, + port: Union[str, int], + server_type: Optional[str] = None, + *, + max_connections: int = 2**31, + connection_class: Type[Connection] = Connection, + **connection_kwargs: Any, + ) -> None: + if host == "localhost": + host = socket.gethostbyname(host) + + connection_kwargs["host"] = host + connection_kwargs["port"] = port + self.host = host + self.port = port + self.name = get_node_name(host, port) + self.server_type = server_type + + self.max_connections = max_connections + self.connection_class = connection_class + self.connection_kwargs = connection_kwargs + self.response_callbacks = connection_kwargs.pop("response_callbacks", {}) + + self._connections: List[Connection] = [] + self._free: Deque[Connection] = collections.deque(maxlen=self.max_connections) + + def __repr__(self) -> str: + return ( + f"[host={self.host}, port={self.port}, " + f"name={self.name}, server_type={self.server_type}]" + ) + + def __eq__(self, obj: Any) -> bool: + return isinstance(obj, ClusterNode) and obj.name == self.name + + _DEL_MESSAGE = "Unclosed ClusterNode object" + + def __del__( + self, + _warn: Any = warnings.warn, + _grl: Any = asyncio.get_running_loop, + ) -> None: + for connection in self._connections: + if connection.is_connected: + _warn(f"{self._DEL_MESSAGE} {self!r}", ResourceWarning, source=self) + + try: + context = {"client": self, "message": self._DEL_MESSAGE} + _grl().call_exception_handler(context) + except RuntimeError: + pass + break + + async def disconnect(self) -> None: + ret = await asyncio.gather( + *( + asyncio.create_task(connection.disconnect()) + for connection in self._connections + ), + return_exceptions=True, + ) + exc = next((res for res in ret if isinstance(res, Exception)), None) + if exc: + raise exc + + def acquire_connection(self) -> Connection: + try: + return self._free.popleft() + except IndexError: + if len(self._connections) < self.max_connections: + connection = self.connection_class(**self.connection_kwargs) + self._connections.append(connection) + return connection + + raise MaxConnectionsError() + + async def parse_response( + self, connection: Connection, command: str, **kwargs: Any + ) -> Any: + try: + if NEVER_DECODE in kwargs: + response = await connection.read_response(disable_decoding=True) + kwargs.pop(NEVER_DECODE) + else: + response = await connection.read_response() + except ResponseError: + if EMPTY_RESPONSE in kwargs: + return kwargs[EMPTY_RESPONSE] + raise + + if EMPTY_RESPONSE in kwargs: + kwargs.pop(EMPTY_RESPONSE) + + # Return response + if command in self.response_callbacks: + return self.response_callbacks[command](response, **kwargs) + + return response + + async def execute_command(self, *args: Any, **kwargs: Any) -> Any: + # Acquire connection + connection = self.acquire_connection() + + # Execute command + await connection.send_packed_command(connection.pack_command(*args), False) + + # Read response + try: + return await self.parse_response(connection, args[0], **kwargs) + finally: + # Release connection + self._free.append(connection) + + async def execute_pipeline(self, commands: List["PipelineCommand"]) -> bool: + # Acquire connection + connection = self.acquire_connection() + + # Execute command + await connection.send_packed_command( + connection.pack_commands(cmd.args for cmd in commands), False + ) + + # Read responses + ret = False + for cmd in commands: + try: + cmd.result = await self.parse_response( + connection, cmd.args[0], **cmd.kwargs + ) + except Exception as e: + cmd.result = e + ret = True + + # Release connection + self._free.append(connection) + + return ret + + +class NodesManager: + __slots__ = ( + "_moved_exception", + "connection_kwargs", + "default_node", + "nodes_cache", + "read_load_balancer", + "require_full_coverage", + "slots_cache", + "startup_nodes", + "address_remap", + ) + + def __init__( + self, + startup_nodes: List["ClusterNode"], + require_full_coverage: bool, + connection_kwargs: Dict[str, Any], + address_remap: Optional[Callable[[str, int], Tuple[str, int]]] = None, + ) -> None: + self.startup_nodes = {node.name: node for node in startup_nodes} + self.require_full_coverage = require_full_coverage + self.connection_kwargs = connection_kwargs + self.address_remap = address_remap + + self.default_node: "ClusterNode" = None + self.nodes_cache: Dict[str, "ClusterNode"] = {} + self.slots_cache: Dict[int, List["ClusterNode"]] = {} + self.read_load_balancer = LoadBalancer() + self._moved_exception: MovedError = None + + def get_node( + self, + host: Optional[str] = None, + port: Optional[int] = None, + node_name: Optional[str] = None, + ) -> Optional["ClusterNode"]: + if host and port: + # the user passed host and port + if host == "localhost": + host = socket.gethostbyname(host) + return self.nodes_cache.get(get_node_name(host=host, port=port)) + elif node_name: + return self.nodes_cache.get(node_name) + else: + raise DataError( + "get_node requires one of the following: " + "1. node name " + "2. host and port" + ) + + def set_nodes( + self, + old: Dict[str, "ClusterNode"], + new: Dict[str, "ClusterNode"], + remove_old: bool = False, + ) -> None: + if remove_old: + for name in list(old.keys()): + if name not in new: + task = asyncio.create_task(old.pop(name).disconnect()) # noqa + + for name, node in new.items(): + if name in old: + if old[name] is node: + continue + task = asyncio.create_task(old[name].disconnect()) # noqa + old[name] = node + + def _update_moved_slots(self) -> None: + e = self._moved_exception + redirected_node = self.get_node(host=e.host, port=e.port) + if redirected_node: + # The node already exists + if redirected_node.server_type != PRIMARY: + # Update the node's server type + redirected_node.server_type = PRIMARY + else: + # This is a new node, we will add it to the nodes cache + redirected_node = ClusterNode( + e.host, e.port, PRIMARY, **self.connection_kwargs + ) + self.set_nodes(self.nodes_cache, {redirected_node.name: redirected_node}) + if redirected_node in self.slots_cache[e.slot_id]: + # The MOVED error resulted from a failover, and the new slot owner + # had previously been a replica. + old_primary = self.slots_cache[e.slot_id][0] + # Update the old primary to be a replica and add it to the end of + # the slot's node list + old_primary.server_type = REPLICA + self.slots_cache[e.slot_id].append(old_primary) + # Remove the old replica, which is now a primary, from the slot's + # node list + self.slots_cache[e.slot_id].remove(redirected_node) + # Override the old primary with the new one + self.slots_cache[e.slot_id][0] = redirected_node + if self.default_node == old_primary: + # Update the default node with the new primary + self.default_node = redirected_node + else: + # The new slot owner is a new server, or a server from a different + # shard. We need to remove all current nodes from the slot's list + # (including replications) and add just the new node. + self.slots_cache[e.slot_id] = [redirected_node] + # Reset moved_exception + self._moved_exception = None + + def get_node_from_slot( + self, slot: int, read_from_replicas: bool = False + ) -> "ClusterNode": + if self._moved_exception: + self._update_moved_slots() + + try: + if read_from_replicas: + # get the server index in a Round-Robin manner + primary_name = self.slots_cache[slot][0].name + node_idx = self.read_load_balancer.get_server_index( + primary_name, len(self.slots_cache[slot]) + ) + return self.slots_cache[slot][node_idx] + return self.slots_cache[slot][0] + except (IndexError, TypeError): + raise SlotNotCoveredError( + f'Slot "{slot}" not covered by the cluster. ' + f'"require_full_coverage={self.require_full_coverage}"' + ) + + def get_nodes_by_server_type(self, server_type: str) -> List["ClusterNode"]: + return [ + node + for node in self.nodes_cache.values() + if node.server_type == server_type + ] + + async def initialize(self) -> None: + self.read_load_balancer.reset() + tmp_nodes_cache: Dict[str, "ClusterNode"] = {} + tmp_slots: Dict[int, List["ClusterNode"]] = {} + disagreements = [] + startup_nodes_reachable = False + fully_covered = False + exception = None + for startup_node in self.startup_nodes.values(): + try: + # Make sure cluster mode is enabled on this node + if not (await startup_node.execute_command("INFO")).get( + "cluster_enabled" + ): + raise RedisClusterException( + "Cluster mode is not enabled on this node" + ) + cluster_slots = await startup_node.execute_command("CLUSTER SLOTS") + startup_nodes_reachable = True + except Exception as e: + # Try the next startup node. + # The exception is saved and raised only if we have no more nodes. + exception = e + continue + + # CLUSTER SLOTS command results in the following output: + # [[slot_section[from_slot,to_slot,master,replica1,...,replicaN]]] + # where each node contains the following list: [IP, port, node_id] + # Therefore, cluster_slots[0][2][0] will be the IP address of the + # primary node of the first slot section. + # If there's only one server in the cluster, its ``host`` is '' + # Fix it to the host in startup_nodes + if ( + len(cluster_slots) == 1 + and not cluster_slots[0][2][0] + and len(self.startup_nodes) == 1 + ): + cluster_slots[0][2][0] = startup_node.host + + for slot in cluster_slots: + for i in range(2, len(slot)): + slot[i] = [str_if_bytes(val) for val in slot[i]] + primary_node = slot[2] + host = primary_node[0] + if host == "": + host = startup_node.host + port = int(primary_node[1]) + host, port = self.remap_host_port(host, port) + + target_node = tmp_nodes_cache.get(get_node_name(host, port)) + if not target_node: + target_node = ClusterNode( + host, port, PRIMARY, **self.connection_kwargs + ) + # add this node to the nodes cache + tmp_nodes_cache[target_node.name] = target_node + + for i in range(int(slot[0]), int(slot[1]) + 1): + if i not in tmp_slots: + tmp_slots[i] = [] + tmp_slots[i].append(target_node) + replica_nodes = [slot[j] for j in range(3, len(slot))] + + for replica_node in replica_nodes: + host = replica_node[0] + port = replica_node[1] + host, port = self.remap_host_port(host, port) + + target_replica_node = tmp_nodes_cache.get( + get_node_name(host, port) + ) + if not target_replica_node: + target_replica_node = ClusterNode( + host, port, REPLICA, **self.connection_kwargs + ) + tmp_slots[i].append(target_replica_node) + # add this node to the nodes cache + tmp_nodes_cache[ + target_replica_node.name + ] = target_replica_node + else: + # Validate that 2 nodes want to use the same slot cache + # setup + tmp_slot = tmp_slots[i][0] + if tmp_slot.name != target_node.name: + disagreements.append( + f"{tmp_slot.name} vs {target_node.name} on slot: {i}" + ) + + if len(disagreements) > 5: + raise RedisClusterException( + f"startup_nodes could not agree on a valid " + f'slots cache: {", ".join(disagreements)}' + ) + + # Validate if all slots are covered or if we should try next startup node + fully_covered = True + for i in range(REDIS_CLUSTER_HASH_SLOTS): + if i not in tmp_slots: + fully_covered = False + break + if fully_covered: + break + + if not startup_nodes_reachable: + raise RedisClusterException( + f"Redis Cluster cannot be connected. Please provide at least " + f"one reachable node: {str(exception)}" + ) from exception + + # Check if the slots are not fully covered + if not fully_covered and self.require_full_coverage: + # Despite the requirement that the slots be covered, there + # isn't a full coverage + raise RedisClusterException( + f"All slots are not covered after query all startup_nodes. " + f"{len(tmp_slots)} of {REDIS_CLUSTER_HASH_SLOTS} " + f"covered..." + ) + + # Set the tmp variables to the real variables + self.slots_cache = tmp_slots + self.set_nodes(self.nodes_cache, tmp_nodes_cache, remove_old=True) + # Populate the startup nodes with all discovered nodes + self.set_nodes(self.startup_nodes, self.nodes_cache, remove_old=True) + + # Set the default node + self.default_node = self.get_nodes_by_server_type(PRIMARY)[0] + # If initialize was called after a MovedError, clear it + self._moved_exception = None + + async def aclose(self, attr: str = "nodes_cache") -> None: + self.default_node = None + await asyncio.gather( + *( + asyncio.create_task(node.disconnect()) + for node in getattr(self, attr).values() + ) + ) + + def remap_host_port(self, host: str, port: int) -> Tuple[str, int]: + """ + Remap the host and port returned from the cluster to a different + internal value. Useful if the client is not connecting directly + to the cluster. + """ + if self.address_remap: + return self.address_remap((host, port)) + return host, port + + +class ClusterPipeline(AbstractRedis, AbstractRedisCluster, AsyncRedisClusterCommands): + """ + Create a new ClusterPipeline object. + + Usage:: + + result = await ( + rc.pipeline() + .set("A", 1) + .get("A") + .hset("K", "F", "V") + .hgetall("K") + .mset_nonatomic({"A": 2, "B": 3}) + .get("A") + .get("B") + .delete("A", "B", "K") + .execute() + ) + # result = [True, "1", 1, {"F": "V"}, True, True, "2", "3", 1, 1, 1] + + Note: For commands `DELETE`, `EXISTS`, `TOUCH`, `UNLINK`, `mset_nonatomic`, which + are split across multiple nodes, you'll get multiple results for them in the array. + + Retryable errors: + - :class:`~.ClusterDownError` + - :class:`~.ConnectionError` + - :class:`~.TimeoutError` + + Redirection errors: + - :class:`~.TryAgainError` + - :class:`~.MovedError` + - :class:`~.AskError` + + :param client: + | Existing :class:`~.RedisCluster` client + """ + + __slots__ = ("_command_stack", "_client") + + def __init__(self, client: RedisCluster) -> None: + self._client = client + + self._command_stack: List["PipelineCommand"] = [] + + async def initialize(self) -> "ClusterPipeline": + if self._client._initialize: + await self._client.initialize() + self._command_stack = [] + return self + + async def __aenter__(self) -> "ClusterPipeline": + return await self.initialize() + + async def __aexit__(self, exc_type: None, exc_value: None, traceback: None) -> None: + self._command_stack = [] + + def __await__(self) -> Generator[Any, None, "ClusterPipeline"]: + return self.initialize().__await__() + + def __enter__(self) -> "ClusterPipeline": + self._command_stack = [] + return self + + def __exit__(self, exc_type: None, exc_value: None, traceback: None) -> None: + self._command_stack = [] + + def __bool__(self) -> bool: + return bool(self._command_stack) + + def __len__(self) -> int: + return len(self._command_stack) + + def execute_command( + self, *args: Union[KeyT, EncodableT], **kwargs: Any + ) -> "ClusterPipeline": + """ + Append a raw command to the pipeline. + + :param args: + | Raw command args + :param kwargs: + + - target_nodes: :attr:`NODE_FLAGS` or :class:`~.ClusterNode` + or List[:class:`~.ClusterNode`] or Dict[Any, :class:`~.ClusterNode`] + - Rest of the kwargs are passed to the Redis connection + """ + self._command_stack.append( + PipelineCommand(len(self._command_stack), *args, **kwargs) + ) + return self + + async def execute( + self, raise_on_error: bool = True, allow_redirections: bool = True + ) -> List[Any]: + """ + Execute the pipeline. + + It will retry the commands as specified by :attr:`cluster_error_retry_attempts` + & then raise an exception. + + :param raise_on_error: + | Raise the first error if there are any errors + :param allow_redirections: + | Whether to retry each failed command individually in case of redirection + errors + + :raises RedisClusterException: if target_nodes is not provided & the command + can't be mapped to a slot + """ + if not self._command_stack: + return [] + + try: + for _ in range(self._client.cluster_error_retry_attempts): + if self._client._initialize: + await self._client.initialize() + + try: + return await self._execute( + self._client, + self._command_stack, + raise_on_error=raise_on_error, + allow_redirections=allow_redirections, + ) + except BaseException as e: + if type(e) in self.__class__.ERRORS_ALLOW_RETRY: + # Try again with the new cluster setup. + exception = e + await self._client.aclose() + await asyncio.sleep(0.25) + else: + # All other errors should be raised. + raise + + # If it fails the configured number of times then raise an exception + raise exception + finally: + self._command_stack = [] + + async def _execute( + self, + client: "RedisCluster", + stack: List["PipelineCommand"], + raise_on_error: bool = True, + allow_redirections: bool = True, + ) -> List[Any]: + todo = [ + cmd for cmd in stack if not cmd.result or isinstance(cmd.result, Exception) + ] + + nodes = {} + for cmd in todo: + passed_targets = cmd.kwargs.pop("target_nodes", None) + if passed_targets and not client._is_node_flag(passed_targets): + target_nodes = client._parse_target_nodes(passed_targets) + else: + target_nodes = await client._determine_nodes( + *cmd.args, node_flag=passed_targets + ) + if not target_nodes: + raise RedisClusterException( + f"No targets were found to execute {cmd.args} command on" + ) + if len(target_nodes) > 1: + raise RedisClusterException(f"Too many targets for command {cmd.args}") + node = target_nodes[0] + if node.name not in nodes: + nodes[node.name] = (node, []) + nodes[node.name][1].append(cmd) + + errors = await asyncio.gather( + *( + asyncio.create_task(node[0].execute_pipeline(node[1])) + for node in nodes.values() + ) + ) + + if any(errors): + if allow_redirections: + # send each errored command individually + for cmd in todo: + if isinstance(cmd.result, (TryAgainError, MovedError, AskError)): + try: + cmd.result = await client.execute_command( + *cmd.args, **cmd.kwargs + ) + except Exception as e: + cmd.result = e + + if raise_on_error: + for cmd in todo: + result = cmd.result + if isinstance(result, Exception): + command = " ".join(map(safe_str, cmd.args)) + msg = ( + f"Command # {cmd.position + 1} ({command}) of pipeline " + f"caused error: {result.args}" + ) + result.args = (msg,) + result.args[1:] + raise result + + default_node = nodes.get(client.get_default_node().name) + if default_node is not None: + # This pipeline execution used the default node, check if we need + # to replace it. + # Note: when the error is raised we'll reset the default node in the + # caller function. + for cmd in default_node[1]: + # Check if it has a command that failed with a relevant + # exception + if type(cmd.result) in self.__class__.ERRORS_ALLOW_RETRY: + client.replace_default_node() + break + + return [cmd.result for cmd in stack] + + def _split_command_across_slots( + self, command: str, *keys: KeyT + ) -> "ClusterPipeline": + for slot_keys in self._client._partition_keys_by_slot(keys).values(): + self.execute_command(command, *slot_keys) + + return self + + def mset_nonatomic( + self, mapping: Mapping[AnyKeyT, EncodableT] + ) -> "ClusterPipeline": + encoder = self._client.encoder + + slots_pairs = {} + for pair in mapping.items(): + slot = key_slot(encoder.encode(pair[0])) + slots_pairs.setdefault(slot, []).extend(pair) + + for pairs in slots_pairs.values(): + self.execute_command("MSET", *pairs) + + return self + + +for command in PIPELINE_BLOCKED_COMMANDS: + command = command.replace(" ", "_").lower() + if command == "mset_nonatomic": + continue + + setattr(ClusterPipeline, command, block_pipeline_command(command)) + + +class PipelineCommand: + def __init__(self, position: int, *args: Any, **kwargs: Any) -> None: + self.args = args + self.kwargs = kwargs + self.position = position + self.result: Union[Any, Exception] = None + + def __repr__(self) -> str: + return f"[{self.position}] {self.args} ({self.kwargs})" diff --git a/templates/skills/spotify/dependencies/redis/asyncio/connection.py b/templates/skills/spotify/dependencies/redis/asyncio/connection.py new file mode 100644 index 00000000..0074bce5 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/connection.py @@ -0,0 +1,1242 @@ +import asyncio +import copy +import enum +import inspect +import socket +import ssl +import sys +import warnings +import weakref +from abc import abstractmethod +from itertools import chain +from types import MappingProxyType +from typing import ( + Any, + Callable, + Iterable, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) +from urllib.parse import ParseResult, parse_qs, unquote, urlparse + +# the functionality is available in 3.11.x but has a major issue before +# 3.11.3. See https://github.com/redis/redis-py/issues/2633 +if sys.version_info >= (3, 11, 3): + from asyncio import timeout as async_timeout +else: + from async_timeout import timeout as async_timeout + +from redis.asyncio.retry import Retry +from redis.backoff import NoBackoff +from redis.compat import Protocol, TypedDict +from redis.connection import DEFAULT_RESP_VERSION +from redis.credentials import CredentialProvider, UsernamePasswordCredentialProvider +from redis.exceptions import ( + AuthenticationError, + AuthenticationWrongNumberOfArgsError, + ConnectionError, + DataError, + RedisError, + ResponseError, + TimeoutError, +) +from redis.typing import EncodableT +from redis.utils import HIREDIS_AVAILABLE, get_lib_version, str_if_bytes + +from .._parsers import ( + BaseParser, + Encoder, + _AsyncHiredisParser, + _AsyncRESP2Parser, + _AsyncRESP3Parser, +) + +SYM_STAR = b"*" +SYM_DOLLAR = b"$" +SYM_CRLF = b"\r\n" +SYM_LF = b"\n" +SYM_EMPTY = b"" + + +class _Sentinel(enum.Enum): + sentinel = object() + + +SENTINEL = _Sentinel.sentinel + + +DefaultParser: Type[Union[_AsyncRESP2Parser, _AsyncRESP3Parser, _AsyncHiredisParser]] +if HIREDIS_AVAILABLE: + DefaultParser = _AsyncHiredisParser +else: + DefaultParser = _AsyncRESP2Parser + + +class ConnectCallbackProtocol(Protocol): + def __call__(self, connection: "AbstractConnection"): + ... + + +class AsyncConnectCallbackProtocol(Protocol): + async def __call__(self, connection: "AbstractConnection"): + ... + + +ConnectCallbackT = Union[ConnectCallbackProtocol, AsyncConnectCallbackProtocol] + + +class AbstractConnection: + """Manages communication to and from a Redis server""" + + __slots__ = ( + "db", + "username", + "client_name", + "lib_name", + "lib_version", + "credential_provider", + "password", + "socket_timeout", + "socket_connect_timeout", + "redis_connect_func", + "retry_on_timeout", + "retry_on_error", + "health_check_interval", + "next_health_check", + "last_active_at", + "encoder", + "ssl_context", + "protocol", + "_reader", + "_writer", + "_parser", + "_connect_callbacks", + "_buffer_cutoff", + "_lock", + "_socket_read_size", + "__dict__", + ) + + def __init__( + self, + *, + db: Union[str, int] = 0, + password: Optional[str] = None, + socket_timeout: Optional[float] = None, + socket_connect_timeout: Optional[float] = None, + retry_on_timeout: bool = False, + retry_on_error: Union[list, _Sentinel] = SENTINEL, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class: Type[BaseParser] = DefaultParser, + socket_read_size: int = 65536, + health_check_interval: float = 0, + client_name: Optional[str] = None, + lib_name: Optional[str] = "redis-py", + lib_version: Optional[str] = get_lib_version(), + username: Optional[str] = None, + retry: Optional[Retry] = None, + redis_connect_func: Optional[ConnectCallbackT] = None, + encoder_class: Type[Encoder] = Encoder, + credential_provider: Optional[CredentialProvider] = None, + protocol: Optional[int] = 2, + ): + if (username or password) and credential_provider is not None: + raise DataError( + "'username' and 'password' cannot be passed along with 'credential_" + "provider'. Please provide only one of the following arguments: \n" + "1. 'password' and (optional) 'username'\n" + "2. 'credential_provider'" + ) + self.db = db + self.client_name = client_name + self.lib_name = lib_name + self.lib_version = lib_version + self.credential_provider = credential_provider + self.password = password + self.username = username + self.socket_timeout = socket_timeout + if socket_connect_timeout is None: + socket_connect_timeout = socket_timeout + self.socket_connect_timeout = socket_connect_timeout + self.retry_on_timeout = retry_on_timeout + if retry_on_error is SENTINEL: + retry_on_error = [] + if retry_on_timeout: + retry_on_error.append(TimeoutError) + retry_on_error.append(socket.timeout) + retry_on_error.append(asyncio.TimeoutError) + self.retry_on_error = retry_on_error + if retry or retry_on_error: + if not retry: + self.retry = Retry(NoBackoff(), 1) + else: + # deep-copy the Retry object as it is mutable + self.retry = copy.deepcopy(retry) + # Update the retry's supported errors with the specified errors + self.retry.update_supported_errors(retry_on_error) + else: + self.retry = Retry(NoBackoff(), 0) + self.health_check_interval = health_check_interval + self.next_health_check: float = -1 + self.encoder = encoder_class(encoding, encoding_errors, decode_responses) + self.redis_connect_func = redis_connect_func + self._reader: Optional[asyncio.StreamReader] = None + self._writer: Optional[asyncio.StreamWriter] = None + self._socket_read_size = socket_read_size + self.set_parser(parser_class) + self._connect_callbacks: List[weakref.WeakMethod[ConnectCallbackT]] = [] + self._buffer_cutoff = 6000 + try: + p = int(protocol) + except TypeError: + p = DEFAULT_RESP_VERSION + except ValueError: + raise ConnectionError("protocol must be an integer") + finally: + if p < 2 or p > 3: + raise ConnectionError("protocol must be either 2 or 3") + self.protocol = protocol + + def __del__(self, _warnings: Any = warnings): + # For some reason, the individual streams don't get properly garbage + # collected and therefore produce no resource warnings. We add one + # here, in the same style as those from the stdlib. + if getattr(self, "_writer", None): + _warnings.warn( + f"unclosed Connection {self!r}", ResourceWarning, source=self + ) + self._close() + + def _close(self): + """ + Internal method to silently close the connection without waiting + """ + if self._writer: + self._writer.close() + self._writer = self._reader = None + + def __repr__(self): + repr_args = ",".join((f"{k}={v}" for k, v in self.repr_pieces())) + return f"{self.__class__.__name__}<{repr_args}>" + + @abstractmethod + def repr_pieces(self): + pass + + @property + def is_connected(self): + return self._reader is not None and self._writer is not None + + def register_connect_callback(self, callback): + """ + Register a callback to be called when the connection is established either + initially or reconnected. This allows listeners to issue commands that + are ephemeral to the connection, for example pub/sub subscription or + key tracking. The callback must be a _method_ and will be kept as + a weak reference. + """ + wm = weakref.WeakMethod(callback) + if wm not in self._connect_callbacks: + self._connect_callbacks.append(wm) + + def deregister_connect_callback(self, callback): + """ + De-register a previously registered callback. It will no-longer receive + notifications on connection events. Calling this is not required when the + listener goes away, since the callbacks are kept as weak methods. + """ + try: + self._connect_callbacks.remove(weakref.WeakMethod(callback)) + except ValueError: + pass + + def set_parser(self, parser_class: Type[BaseParser]) -> None: + """ + Creates a new instance of parser_class with socket size: + _socket_read_size and assigns it to the parser for the connection + :param parser_class: The required parser class + """ + self._parser = parser_class(socket_read_size=self._socket_read_size) + + async def connect(self): + """Connects to the Redis server if not already connected""" + if self.is_connected: + return + try: + await self.retry.call_with_retry( + lambda: self._connect(), lambda error: self.disconnect() + ) + except asyncio.CancelledError: + raise # in 3.7 and earlier, this is an Exception, not BaseException + except (socket.timeout, asyncio.TimeoutError): + raise TimeoutError("Timeout connecting to server") + except OSError as e: + raise ConnectionError(self._error_message(e)) + except Exception as exc: + raise ConnectionError(exc) from exc + + try: + if not self.redis_connect_func: + # Use the default on_connect function + await self.on_connect() + else: + # Use the passed function redis_connect_func + await self.redis_connect_func(self) if asyncio.iscoroutinefunction( + self.redis_connect_func + ) else self.redis_connect_func(self) + except RedisError: + # clean up after any error in on_connect + await self.disconnect() + raise + + # run any user callbacks. right now the only internal callback + # is for pubsub channel/pattern resubscription + # first, remove any dead weakrefs + self._connect_callbacks = [ref for ref in self._connect_callbacks if ref()] + for ref in self._connect_callbacks: + callback = ref() + task = callback(self) + if task and inspect.isawaitable(task): + await task + + @abstractmethod + async def _connect(self): + pass + + @abstractmethod + def _host_error(self) -> str: + pass + + @abstractmethod + def _error_message(self, exception: BaseException) -> str: + pass + + async def on_connect(self) -> None: + """Initialize the connection, authenticate and select a database""" + self._parser.on_connect(self) + parser = self._parser + + auth_args = None + # if credential provider or username and/or password are set, authenticate + if self.credential_provider or (self.username or self.password): + cred_provider = ( + self.credential_provider + or UsernamePasswordCredentialProvider(self.username, self.password) + ) + auth_args = cred_provider.get_credentials() + # if resp version is specified and we have auth args, + # we need to send them via HELLO + if auth_args and self.protocol not in [2, "2"]: + if isinstance(self._parser, _AsyncRESP2Parser): + self.set_parser(_AsyncRESP3Parser) + # update cluster exception classes + self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES + self._parser.on_connect(self) + if len(auth_args) == 1: + auth_args = ["default", auth_args[0]] + await self.send_command("HELLO", self.protocol, "AUTH", *auth_args) + response = await self.read_response() + if response.get(b"proto") != int(self.protocol) and response.get( + "proto" + ) != int(self.protocol): + raise ConnectionError("Invalid RESP version") + # avoid checking health here -- PING will fail if we try + # to check the health prior to the AUTH + elif auth_args: + await self.send_command("AUTH", *auth_args, check_health=False) + + try: + auth_response = await self.read_response() + except AuthenticationWrongNumberOfArgsError: + # a username and password were specified but the Redis + # server seems to be < 6.0.0 which expects a single password + # arg. retry auth with just the password. + # https://github.com/andymccurdy/redis-py/issues/1274 + await self.send_command("AUTH", auth_args[-1], check_health=False) + auth_response = await self.read_response() + + if str_if_bytes(auth_response) != "OK": + raise AuthenticationError("Invalid Username or Password") + + # if resp version is specified, switch to it + elif self.protocol not in [2, "2"]: + if isinstance(self._parser, _AsyncRESP2Parser): + self.set_parser(_AsyncRESP3Parser) + # update cluster exception classes + self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES + self._parser.on_connect(self) + await self.send_command("HELLO", self.protocol) + response = await self.read_response() + # if response.get(b"proto") != self.protocol and response.get( + # "proto" + # ) != self.protocol: + # raise ConnectionError("Invalid RESP version") + + # if a client_name is given, set it + if self.client_name: + await self.send_command("CLIENT", "SETNAME", self.client_name) + if str_if_bytes(await self.read_response()) != "OK": + raise ConnectionError("Error setting client name") + + # set the library name and version, pipeline for lower startup latency + if self.lib_name: + await self.send_command("CLIENT", "SETINFO", "LIB-NAME", self.lib_name) + if self.lib_version: + await self.send_command("CLIENT", "SETINFO", "LIB-VER", self.lib_version) + # if a database is specified, switch to it. Also pipeline this + if self.db: + await self.send_command("SELECT", self.db) + + # read responses from pipeline + for _ in (sent for sent in (self.lib_name, self.lib_version) if sent): + try: + await self.read_response() + except ResponseError: + pass + + if self.db: + if str_if_bytes(await self.read_response()) != "OK": + raise ConnectionError("Invalid Database") + + async def disconnect(self, nowait: bool = False) -> None: + """Disconnects from the Redis server""" + try: + async with async_timeout(self.socket_connect_timeout): + self._parser.on_disconnect() + if not self.is_connected: + return + try: + self._writer.close() # type: ignore[union-attr] + # wait for close to finish, except when handling errors and + # forcefully disconnecting. + if not nowait: + await self._writer.wait_closed() # type: ignore[union-attr] + except OSError: + pass + finally: + self._reader = None + self._writer = None + except asyncio.TimeoutError: + raise TimeoutError( + f"Timed out closing connection after {self.socket_connect_timeout}" + ) from None + + async def _send_ping(self): + """Send PING, expect PONG in return""" + await self.send_command("PING", check_health=False) + if str_if_bytes(await self.read_response()) != "PONG": + raise ConnectionError("Bad response from PING health check") + + async def _ping_failed(self, error): + """Function to call when PING fails""" + await self.disconnect() + + async def check_health(self): + """Check the health of the connection with a PING/PONG""" + if ( + self.health_check_interval + and asyncio.get_running_loop().time() > self.next_health_check + ): + await self.retry.call_with_retry(self._send_ping, self._ping_failed) + + async def _send_packed_command(self, command: Iterable[bytes]) -> None: + self._writer.writelines(command) + await self._writer.drain() + + async def send_packed_command( + self, command: Union[bytes, str, Iterable[bytes]], check_health: bool = True + ) -> None: + if not self.is_connected: + await self.connect() + elif check_health: + await self.check_health() + + try: + if isinstance(command, str): + command = command.encode() + if isinstance(command, bytes): + command = [command] + if self.socket_timeout: + await asyncio.wait_for( + self._send_packed_command(command), self.socket_timeout + ) + else: + self._writer.writelines(command) + await self._writer.drain() + except asyncio.TimeoutError: + await self.disconnect(nowait=True) + raise TimeoutError("Timeout writing to socket") from None + except OSError as e: + await self.disconnect(nowait=True) + if len(e.args) == 1: + err_no, errmsg = "UNKNOWN", e.args[0] + else: + err_no = e.args[0] + errmsg = e.args[1] + raise ConnectionError( + f"Error {err_no} while writing to socket. {errmsg}." + ) from e + except BaseException: + # BaseExceptions can be raised when a socket send operation is not + # finished, e.g. due to a timeout. Ideally, a caller could then re-try + # to send un-sent data. However, the send_packed_command() API + # does not support it so there is no point in keeping the connection open. + await self.disconnect(nowait=True) + raise + + async def send_command(self, *args: Any, **kwargs: Any) -> None: + """Pack and send a command to the Redis server""" + await self.send_packed_command( + self.pack_command(*args), check_health=kwargs.get("check_health", True) + ) + + async def can_read_destructive(self): + """Poll the socket to see if there's data that can be read.""" + try: + return await self._parser.can_read_destructive() + except OSError as e: + await self.disconnect(nowait=True) + host_error = self._host_error() + raise ConnectionError(f"Error while reading from {host_error}: {e.args}") + + async def read_response( + self, + disable_decoding: bool = False, + timeout: Optional[float] = None, + *, + disconnect_on_error: bool = True, + push_request: Optional[bool] = False, + ): + """Read the response from a previously sent command""" + read_timeout = timeout if timeout is not None else self.socket_timeout + host_error = self._host_error() + try: + if ( + read_timeout is not None + and self.protocol in ["3", 3] + and not HIREDIS_AVAILABLE + ): + async with async_timeout(read_timeout): + response = await self._parser.read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + elif read_timeout is not None: + async with async_timeout(read_timeout): + response = await self._parser.read_response( + disable_decoding=disable_decoding + ) + elif self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: + response = await self._parser.read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + else: + response = await self._parser.read_response( + disable_decoding=disable_decoding + ) + except asyncio.TimeoutError: + if timeout is not None: + # user requested timeout, return None. Operation can be retried + return None + # it was a self.socket_timeout error. + if disconnect_on_error: + await self.disconnect(nowait=True) + raise TimeoutError(f"Timeout reading from {host_error}") + except OSError as e: + if disconnect_on_error: + await self.disconnect(nowait=True) + raise ConnectionError(f"Error while reading from {host_error} : {e.args}") + except BaseException: + # Also by default close in case of BaseException. A lot of code + # relies on this behaviour when doing Command/Response pairs. + # See #1128. + if disconnect_on_error: + await self.disconnect(nowait=True) + raise + + if self.health_check_interval: + next_time = asyncio.get_running_loop().time() + self.health_check_interval + self.next_health_check = next_time + + if isinstance(response, ResponseError): + raise response from None + return response + + def pack_command(self, *args: EncodableT) -> List[bytes]: + """Pack a series of arguments into the Redis protocol""" + output = [] + # the client might have included 1 or more literal arguments in + # the command name, e.g., 'CONFIG GET'. The Redis server expects these + # arguments to be sent separately, so split the first argument + # manually. These arguments should be bytestrings so that they are + # not encoded. + assert not isinstance(args[0], float) + if isinstance(args[0], str): + args = tuple(args[0].encode().split()) + args[1:] + elif b" " in args[0]: + args = tuple(args[0].split()) + args[1:] + + buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF)) + + buffer_cutoff = self._buffer_cutoff + for arg in map(self.encoder.encode, args): + # to avoid large string mallocs, chunk the command into the + # output list if we're sending large values or memoryviews + arg_length = len(arg) + if ( + len(buff) > buffer_cutoff + or arg_length > buffer_cutoff + or isinstance(arg, memoryview) + ): + buff = SYM_EMPTY.join( + (buff, SYM_DOLLAR, str(arg_length).encode(), SYM_CRLF) + ) + output.append(buff) + output.append(arg) + buff = SYM_CRLF + else: + buff = SYM_EMPTY.join( + ( + buff, + SYM_DOLLAR, + str(arg_length).encode(), + SYM_CRLF, + arg, + SYM_CRLF, + ) + ) + output.append(buff) + return output + + def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> List[bytes]: + """Pack multiple commands into the Redis protocol""" + output: List[bytes] = [] + pieces: List[bytes] = [] + buffer_length = 0 + buffer_cutoff = self._buffer_cutoff + + for cmd in commands: + for chunk in self.pack_command(*cmd): + chunklen = len(chunk) + if ( + buffer_length > buffer_cutoff + or chunklen > buffer_cutoff + or isinstance(chunk, memoryview) + ): + if pieces: + output.append(SYM_EMPTY.join(pieces)) + buffer_length = 0 + pieces = [] + + if chunklen > buffer_cutoff or isinstance(chunk, memoryview): + output.append(chunk) + else: + pieces.append(chunk) + buffer_length += chunklen + + if pieces: + output.append(SYM_EMPTY.join(pieces)) + return output + + +class Connection(AbstractConnection): + "Manages TCP communication to and from a Redis server" + + def __init__( + self, + *, + host: str = "localhost", + port: Union[str, int] = 6379, + socket_keepalive: bool = False, + socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None, + socket_type: int = 0, + **kwargs, + ): + self.host = host + self.port = int(port) + self.socket_keepalive = socket_keepalive + self.socket_keepalive_options = socket_keepalive_options or {} + self.socket_type = socket_type + super().__init__(**kwargs) + + def repr_pieces(self): + pieces = [("host", self.host), ("port", self.port), ("db", self.db)] + if self.client_name: + pieces.append(("client_name", self.client_name)) + return pieces + + def _connection_arguments(self) -> Mapping: + return {"host": self.host, "port": self.port} + + async def _connect(self): + """Create a TCP socket connection""" + async with async_timeout(self.socket_connect_timeout): + reader, writer = await asyncio.open_connection( + **self._connection_arguments() + ) + self._reader = reader + self._writer = writer + sock = writer.transport.get_extra_info("socket") + if sock: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + try: + # TCP_KEEPALIVE + if self.socket_keepalive: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + for k, v in self.socket_keepalive_options.items(): + sock.setsockopt(socket.SOL_TCP, k, v) + + except (OSError, TypeError): + # `socket_keepalive_options` might contain invalid options + # causing an error. Do not leave the connection open. + writer.close() + raise + + def _host_error(self) -> str: + return f"{self.host}:{self.port}" + + def _error_message(self, exception: BaseException) -> str: + # args for socket.error can either be (errno, "message") + # or just "message" + + host_error = self._host_error() + + if not exception.args: + # asyncio has a bug where on Connection reset by peer, the + # exception is not instanciated, so args is empty. This is the + # workaround. + # See: https://github.com/redis/redis-py/issues/2237 + # See: https://github.com/python/cpython/issues/94061 + return f"Error connecting to {host_error}. Connection reset by peer" + elif len(exception.args) == 1: + return f"Error connecting to {host_error}. {exception.args[0]}." + else: + return ( + f"Error {exception.args[0]} connecting to {host_error}. " + f"{exception.args[0]}." + ) + + +class SSLConnection(Connection): + """Manages SSL connections to and from the Redis server(s). + This class extends the Connection class, adding SSL functionality, and making + use of ssl.SSLContext (https://docs.python.org/3/library/ssl.html#ssl.SSLContext) + """ + + def __init__( + self, + ssl_keyfile: Optional[str] = None, + ssl_certfile: Optional[str] = None, + ssl_cert_reqs: str = "required", + ssl_ca_certs: Optional[str] = None, + ssl_ca_data: Optional[str] = None, + ssl_check_hostname: bool = False, + ssl_min_version: Optional[ssl.TLSVersion] = None, + ssl_ciphers: Optional[str] = None, + **kwargs, + ): + self.ssl_context: RedisSSLContext = RedisSSLContext( + keyfile=ssl_keyfile, + certfile=ssl_certfile, + cert_reqs=ssl_cert_reqs, + ca_certs=ssl_ca_certs, + ca_data=ssl_ca_data, + check_hostname=ssl_check_hostname, + min_version=ssl_min_version, + ciphers=ssl_ciphers, + ) + super().__init__(**kwargs) + + def _connection_arguments(self) -> Mapping: + kwargs = super()._connection_arguments() + kwargs["ssl"] = self.ssl_context.get() + return kwargs + + @property + def keyfile(self): + return self.ssl_context.keyfile + + @property + def certfile(self): + return self.ssl_context.certfile + + @property + def cert_reqs(self): + return self.ssl_context.cert_reqs + + @property + def ca_certs(self): + return self.ssl_context.ca_certs + + @property + def ca_data(self): + return self.ssl_context.ca_data + + @property + def check_hostname(self): + return self.ssl_context.check_hostname + + @property + def min_version(self): + return self.ssl_context.min_version + + +class RedisSSLContext: + __slots__ = ( + "keyfile", + "certfile", + "cert_reqs", + "ca_certs", + "ca_data", + "context", + "check_hostname", + "min_version", + "ciphers", + ) + + def __init__( + self, + keyfile: Optional[str] = None, + certfile: Optional[str] = None, + cert_reqs: Optional[str] = None, + ca_certs: Optional[str] = None, + ca_data: Optional[str] = None, + check_hostname: bool = False, + min_version: Optional[ssl.TLSVersion] = None, + ciphers: Optional[str] = None, + ): + self.keyfile = keyfile + self.certfile = certfile + if cert_reqs is None: + self.cert_reqs = ssl.CERT_NONE + elif isinstance(cert_reqs, str): + CERT_REQS = { + "none": ssl.CERT_NONE, + "optional": ssl.CERT_OPTIONAL, + "required": ssl.CERT_REQUIRED, + } + if cert_reqs not in CERT_REQS: + raise RedisError( + f"Invalid SSL Certificate Requirements Flag: {cert_reqs}" + ) + self.cert_reqs = CERT_REQS[cert_reqs] + self.ca_certs = ca_certs + self.ca_data = ca_data + self.check_hostname = check_hostname + self.min_version = min_version + self.ciphers = ciphers + self.context: Optional[ssl.SSLContext] = None + + def get(self) -> ssl.SSLContext: + if not self.context: + context = ssl.create_default_context() + context.check_hostname = self.check_hostname + context.verify_mode = self.cert_reqs + if self.certfile and self.keyfile: + context.load_cert_chain(certfile=self.certfile, keyfile=self.keyfile) + if self.ca_certs or self.ca_data: + context.load_verify_locations(cafile=self.ca_certs, cadata=self.ca_data) + if self.min_version is not None: + context.minimum_version = self.min_version + if self.ciphers is not None: + context.set_ciphers(self.ciphers) + self.context = context + return self.context + + +class UnixDomainSocketConnection(AbstractConnection): + "Manages UDS communication to and from a Redis server" + + def __init__(self, *, path: str = "", **kwargs): + self.path = path + super().__init__(**kwargs) + + def repr_pieces(self) -> Iterable[Tuple[str, Union[str, int]]]: + pieces = [("path", self.path), ("db", self.db)] + if self.client_name: + pieces.append(("client_name", self.client_name)) + return pieces + + async def _connect(self): + async with async_timeout(self.socket_connect_timeout): + reader, writer = await asyncio.open_unix_connection(path=self.path) + self._reader = reader + self._writer = writer + await self.on_connect() + + def _host_error(self) -> str: + return self.path + + def _error_message(self, exception: BaseException) -> str: + # args for socket.error can either be (errno, "message") + # or just "message" + host_error = self._host_error() + if len(exception.args) == 1: + return ( + f"Error connecting to unix socket: {host_error}. {exception.args[0]}." + ) + else: + return ( + f"Error {exception.args[0]} connecting to unix socket: " + f"{host_error}. {exception.args[1]}." + ) + + +FALSE_STRINGS = ("0", "F", "FALSE", "N", "NO") + + +def to_bool(value) -> Optional[bool]: + if value is None or value == "": + return None + if isinstance(value, str) and value.upper() in FALSE_STRINGS: + return False + return bool(value) + + +URL_QUERY_ARGUMENT_PARSERS: Mapping[str, Callable[..., object]] = MappingProxyType( + { + "db": int, + "socket_timeout": float, + "socket_connect_timeout": float, + "socket_keepalive": to_bool, + "retry_on_timeout": to_bool, + "max_connections": int, + "health_check_interval": int, + "ssl_check_hostname": to_bool, + "timeout": float, + } +) + + +class ConnectKwargs(TypedDict, total=False): + username: str + password: str + connection_class: Type[AbstractConnection] + host: str + port: int + db: int + path: str + + +def parse_url(url: str) -> ConnectKwargs: + parsed: ParseResult = urlparse(url) + kwargs: ConnectKwargs = {} + + for name, value_list in parse_qs(parsed.query).items(): + if value_list and len(value_list) > 0: + value = unquote(value_list[0]) + parser = URL_QUERY_ARGUMENT_PARSERS.get(name) + if parser: + try: + kwargs[name] = parser(value) + except (TypeError, ValueError): + raise ValueError(f"Invalid value for `{name}` in connection URL.") + else: + kwargs[name] = value + + if parsed.username: + kwargs["username"] = unquote(parsed.username) + if parsed.password: + kwargs["password"] = unquote(parsed.password) + + # We only support redis://, rediss:// and unix:// schemes. + if parsed.scheme == "unix": + if parsed.path: + kwargs["path"] = unquote(parsed.path) + kwargs["connection_class"] = UnixDomainSocketConnection + + elif parsed.scheme in ("redis", "rediss"): + if parsed.hostname: + kwargs["host"] = unquote(parsed.hostname) + if parsed.port: + kwargs["port"] = int(parsed.port) + + # If there's a path argument, use it as the db argument if a + # querystring value wasn't specified + if parsed.path and "db" not in kwargs: + try: + kwargs["db"] = int(unquote(parsed.path).replace("/", "")) + except (AttributeError, ValueError): + pass + + if parsed.scheme == "rediss": + kwargs["connection_class"] = SSLConnection + else: + valid_schemes = "redis://, rediss://, unix://" + raise ValueError( + f"Redis URL must specify one of the following schemes ({valid_schemes})" + ) + + return kwargs + + +_CP = TypeVar("_CP", bound="ConnectionPool") + + +class ConnectionPool: + """ + Create a connection pool. ``If max_connections`` is set, then this + object raises :py:class:`~redis.ConnectionError` when the pool's + limit is reached. + + By default, TCP connections are created unless ``connection_class`` + is specified. Use :py:class:`~redis.UnixDomainSocketConnection` for + unix sockets. + + Any additional keyword arguments are passed to the constructor of + ``connection_class``. + """ + + @classmethod + def from_url(cls: Type[_CP], url: str, **kwargs) -> _CP: + """ + Return a connection pool configured from the given URL. + + For example:: + + redis://[[username]:[password]]@localhost:6379/0 + rediss://[[username]:[password]]@localhost:6379/0 + unix://[username@]/path/to/socket.sock?db=0[&password=password] + + Three URL schemes are supported: + + - `redis://` creates a TCP socket connection. See more at: + + - `rediss://` creates a SSL wrapped TCP socket connection. See more at: + + - ``unix://``: creates a Unix Domain Socket connection. + + The username, password, hostname, path and all querystring values + are passed through urllib.parse.unquote in order to replace any + percent-encoded values with their corresponding characters. + + There are several ways to specify a database number. The first value + found will be used: + + 1. A ``db`` querystring option, e.g. redis://localhost?db=0 + + 2. If using the redis:// or rediss:// schemes, the path argument + of the url, e.g. redis://localhost/0 + + 3. A ``db`` keyword argument to this function. + + If none of these options are specified, the default db=0 is used. + + All querystring options are cast to their appropriate Python types. + Boolean arguments can be specified with string values "True"/"False" + or "Yes"/"No". Values that cannot be properly cast cause a + ``ValueError`` to be raised. Once parsed, the querystring arguments + and keyword arguments are passed to the ``ConnectionPool``'s + class initializer. In the case of conflicting arguments, querystring + arguments always win. + """ + url_options = parse_url(url) + kwargs.update(url_options) + return cls(**kwargs) + + def __init__( + self, + connection_class: Type[AbstractConnection] = Connection, + max_connections: Optional[int] = None, + **connection_kwargs, + ): + max_connections = max_connections or 2**31 + if not isinstance(max_connections, int) or max_connections < 0: + raise ValueError('"max_connections" must be a positive integer') + + self.connection_class = connection_class + self.connection_kwargs = connection_kwargs + self.max_connections = max_connections + + self._available_connections: List[AbstractConnection] = [] + self._in_use_connections: Set[AbstractConnection] = set() + self.encoder_class = self.connection_kwargs.get("encoder_class", Encoder) + + def __repr__(self): + return ( + f"{self.__class__.__name__}" + f"<{self.connection_class(**self.connection_kwargs)!r}>" + ) + + def reset(self): + self._available_connections = [] + self._in_use_connections = weakref.WeakSet() + + def can_get_connection(self) -> bool: + """Return True if a connection can be retrieved from the pool.""" + return ( + self._available_connections + or len(self._in_use_connections) < self.max_connections + ) + + async def get_connection(self, command_name, *keys, **options): + """Get a connected connection from the pool""" + connection = self.get_available_connection() + try: + await self.ensure_connection(connection) + except BaseException: + await self.release(connection) + raise + + return connection + + def get_available_connection(self): + """Get a connection from the pool, without making sure it is connected""" + try: + connection = self._available_connections.pop() + except IndexError: + if len(self._in_use_connections) >= self.max_connections: + raise ConnectionError("Too many connections") from None + connection = self.make_connection() + self._in_use_connections.add(connection) + return connection + + def get_encoder(self): + """Return an encoder based on encoding settings""" + kwargs = self.connection_kwargs + return self.encoder_class( + encoding=kwargs.get("encoding", "utf-8"), + encoding_errors=kwargs.get("encoding_errors", "strict"), + decode_responses=kwargs.get("decode_responses", False), + ) + + def make_connection(self): + """Create a new connection. Can be overridden by child classes.""" + return self.connection_class(**self.connection_kwargs) + + async def ensure_connection(self, connection: AbstractConnection): + """Ensure that the connection object is connected and valid""" + await connection.connect() + # connections that the pool provides should be ready to send + # a command. if not, the connection was either returned to the + # pool before all data has been read or the socket has been + # closed. either way, reconnect and verify everything is good. + try: + if await connection.can_read_destructive(): + raise ConnectionError("Connection has data") from None + except (ConnectionError, OSError): + await connection.disconnect() + await connection.connect() + if await connection.can_read_destructive(): + raise ConnectionError("Connection not ready") from None + + async def release(self, connection: AbstractConnection): + """Releases the connection back to the pool""" + # Connections should always be returned to the correct pool, + # not doing so is an error that will cause an exception here. + self._in_use_connections.remove(connection) + self._available_connections.append(connection) + + async def disconnect(self, inuse_connections: bool = True): + """ + Disconnects connections in the pool + + If ``inuse_connections`` is True, disconnect connections that are + current in use, potentially by other tasks. Otherwise only disconnect + connections that are idle in the pool. + """ + if inuse_connections: + connections: Iterable[AbstractConnection] = chain( + self._available_connections, self._in_use_connections + ) + else: + connections = self._available_connections + resp = await asyncio.gather( + *(connection.disconnect() for connection in connections), + return_exceptions=True, + ) + exc = next((r for r in resp if isinstance(r, BaseException)), None) + if exc: + raise exc + + async def aclose(self) -> None: + """Close the pool, disconnecting all connections""" + await self.disconnect() + + def set_retry(self, retry: "Retry") -> None: + for conn in self._available_connections: + conn.retry = retry + for conn in self._in_use_connections: + conn.retry = retry + + +class BlockingConnectionPool(ConnectionPool): + """ + A blocking connection pool:: + + >>> from redis.asyncio import Redis, BlockingConnectionPool + >>> client = Redis.from_pool(BlockingConnectionPool()) + + It performs the same function as the default + :py:class:`~redis.asyncio.ConnectionPool` implementation, in that, + it maintains a pool of reusable connections that can be shared by + multiple async redis clients. + + The difference is that, in the event that a client tries to get a + connection from the pool when all of connections are in use, rather than + raising a :py:class:`~redis.ConnectionError` (as the default + :py:class:`~redis.asyncio.ConnectionPool` implementation does), it + blocks the current `Task` for a specified number of seconds until + a connection becomes available. + + Use ``max_connections`` to increase / decrease the pool size:: + + >>> pool = BlockingConnectionPool(max_connections=10) + + Use ``timeout`` to tell it either how many seconds to wait for a connection + to become available, or to block forever: + + >>> # Block forever. + >>> pool = BlockingConnectionPool(timeout=None) + + >>> # Raise a ``ConnectionError`` after five seconds if a connection is + >>> # not available. + >>> pool = BlockingConnectionPool(timeout=5) + """ + + def __init__( + self, + max_connections: int = 50, + timeout: Optional[int] = 20, + connection_class: Type[AbstractConnection] = Connection, + queue_class: Type[asyncio.Queue] = asyncio.LifoQueue, # deprecated + **connection_kwargs, + ): + super().__init__( + connection_class=connection_class, + max_connections=max_connections, + **connection_kwargs, + ) + self._condition = asyncio.Condition() + self.timeout = timeout + + async def get_connection(self, command_name, *keys, **options): + """Gets a connection from the pool, blocking until one is available""" + try: + async with self._condition: + async with async_timeout(self.timeout): + await self._condition.wait_for(self.can_get_connection) + connection = super().get_available_connection() + except asyncio.TimeoutError as err: + raise ConnectionError("No connection available.") from err + + # We now perform the connection check outside of the lock. + try: + await self.ensure_connection(connection) + return connection + except BaseException: + await self.release(connection) + raise + + async def release(self, connection: AbstractConnection): + """Releases the connection back to the pool.""" + async with self._condition: + await super().release(connection) + self._condition.notify() diff --git a/templates/skills/spotify/dependencies/redis/asyncio/lock.py b/templates/skills/spotify/dependencies/redis/asyncio/lock.py new file mode 100644 index 00000000..e1d11a88 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/lock.py @@ -0,0 +1,313 @@ +import asyncio +import threading +import uuid +from types import SimpleNamespace +from typing import TYPE_CHECKING, Awaitable, Optional, Union + +from redis.exceptions import LockError, LockNotOwnedError + +if TYPE_CHECKING: + from redis.asyncio import Redis, RedisCluster + + +class Lock: + """ + A shared, distributed Lock. Using Redis for locking allows the Lock + to be shared across processes and/or machines. + + It's left to the user to resolve deadlock issues and make sure + multiple clients play nicely together. + """ + + lua_release = None + lua_extend = None + lua_reacquire = None + + # KEYS[1] - lock name + # ARGV[1] - token + # return 1 if the lock was released, otherwise 0 + LUA_RELEASE_SCRIPT = """ + local token = redis.call('get', KEYS[1]) + if not token or token ~= ARGV[1] then + return 0 + end + redis.call('del', KEYS[1]) + return 1 + """ + + # KEYS[1] - lock name + # ARGV[1] - token + # ARGV[2] - additional milliseconds + # ARGV[3] - "0" if the additional time should be added to the lock's + # existing ttl or "1" if the existing ttl should be replaced + # return 1 if the locks time was extended, otherwise 0 + LUA_EXTEND_SCRIPT = """ + local token = redis.call('get', KEYS[1]) + if not token or token ~= ARGV[1] then + return 0 + end + local expiration = redis.call('pttl', KEYS[1]) + if not expiration then + expiration = 0 + end + if expiration < 0 then + return 0 + end + + local newttl = ARGV[2] + if ARGV[3] == "0" then + newttl = ARGV[2] + expiration + end + redis.call('pexpire', KEYS[1], newttl) + return 1 + """ + + # KEYS[1] - lock name + # ARGV[1] - token + # ARGV[2] - milliseconds + # return 1 if the locks time was reacquired, otherwise 0 + LUA_REACQUIRE_SCRIPT = """ + local token = redis.call('get', KEYS[1]) + if not token or token ~= ARGV[1] then + return 0 + end + redis.call('pexpire', KEYS[1], ARGV[2]) + return 1 + """ + + def __init__( + self, + redis: Union["Redis", "RedisCluster"], + name: Union[str, bytes, memoryview], + timeout: Optional[float] = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: Optional[float] = None, + thread_local: bool = True, + ): + """ + Create a new Lock instance named ``name`` using the Redis client + supplied by ``redis``. + + ``timeout`` indicates a maximum life for the lock in seconds. + By default, it will remain locked until release() is called. + ``timeout`` can be specified as a float or integer, both representing + the number of seconds to wait. + + ``sleep`` indicates the amount of time to sleep in seconds per loop + iteration when the lock is in blocking mode and another client is + currently holding the lock. + + ``blocking`` indicates whether calling ``acquire`` should block until + the lock has been acquired or to fail immediately, causing ``acquire`` + to return False and the lock not being acquired. Defaults to True. + Note this value can be overridden by passing a ``blocking`` + argument to ``acquire``. + + ``blocking_timeout`` indicates the maximum amount of time in seconds to + spend trying to acquire the lock. A value of ``None`` indicates + continue trying forever. ``blocking_timeout`` can be specified as a + float or integer, both representing the number of seconds to wait. + + ``thread_local`` indicates whether the lock token is placed in + thread-local storage. By default, the token is placed in thread local + storage so that a thread only sees its token, not a token set by + another thread. Consider the following timeline: + + time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds. + thread-1 sets the token to "abc" + time: 1, thread-2 blocks trying to acquire `my-lock` using the + Lock instance. + time: 5, thread-1 has not yet completed. redis expires the lock + key. + time: 5, thread-2 acquired `my-lock` now that it's available. + thread-2 sets the token to "xyz" + time: 6, thread-1 finishes its work and calls release(). if the + token is *not* stored in thread local storage, then + thread-1 would see the token value as "xyz" and would be + able to successfully release the thread-2's lock. + + In some use cases it's necessary to disable thread local storage. For + example, if you have code where one thread acquires a lock and passes + that lock instance to a worker thread to release later. If thread + local storage isn't disabled in this case, the worker thread won't see + the token set by the thread that acquired the lock. Our assumption + is that these cases aren't common and as such default to using + thread local storage. + """ + self.redis = redis + self.name = name + self.timeout = timeout + self.sleep = sleep + self.blocking = blocking + self.blocking_timeout = blocking_timeout + self.thread_local = bool(thread_local) + self.local = threading.local() if self.thread_local else SimpleNamespace() + self.local.token = None + self.register_scripts() + + def register_scripts(self): + cls = self.__class__ + client = self.redis + if cls.lua_release is None: + cls.lua_release = client.register_script(cls.LUA_RELEASE_SCRIPT) + if cls.lua_extend is None: + cls.lua_extend = client.register_script(cls.LUA_EXTEND_SCRIPT) + if cls.lua_reacquire is None: + cls.lua_reacquire = client.register_script(cls.LUA_REACQUIRE_SCRIPT) + + async def __aenter__(self): + if await self.acquire(): + return self + raise LockError("Unable to acquire lock within the time specified") + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.release() + + async def acquire( + self, + blocking: Optional[bool] = None, + blocking_timeout: Optional[float] = None, + token: Optional[Union[str, bytes]] = None, + ): + """ + Use Redis to hold a shared, distributed lock named ``name``. + Returns True once the lock is acquired. + + If ``blocking`` is False, always return immediately. If the lock + was acquired, return True, otherwise return False. + + ``blocking_timeout`` specifies the maximum number of seconds to + wait trying to acquire the lock. + + ``token`` specifies the token value to be used. If provided, token + must be a bytes object or a string that can be encoded to a bytes + object with the default encoding. If a token isn't specified, a UUID + will be generated. + """ + sleep = self.sleep + if token is None: + token = uuid.uuid1().hex.encode() + else: + try: + encoder = self.redis.connection_pool.get_encoder() + except AttributeError: + # Cluster + encoder = self.redis.get_encoder() + token = encoder.encode(token) + if blocking is None: + blocking = self.blocking + if blocking_timeout is None: + blocking_timeout = self.blocking_timeout + stop_trying_at = None + if blocking_timeout is not None: + stop_trying_at = asyncio.get_running_loop().time() + blocking_timeout + while True: + if await self.do_acquire(token): + self.local.token = token + return True + if not blocking: + return False + next_try_at = asyncio.get_running_loop().time() + sleep + if stop_trying_at is not None and next_try_at > stop_trying_at: + return False + await asyncio.sleep(sleep) + + async def do_acquire(self, token: Union[str, bytes]) -> bool: + if self.timeout: + # convert to milliseconds + timeout = int(self.timeout * 1000) + else: + timeout = None + if await self.redis.set(self.name, token, nx=True, px=timeout): + return True + return False + + async def locked(self) -> bool: + """ + Returns True if this key is locked by any process, otherwise False. + """ + return await self.redis.get(self.name) is not None + + async def owned(self) -> bool: + """ + Returns True if this key is locked by this lock, otherwise False. + """ + stored_token = await self.redis.get(self.name) + # need to always compare bytes to bytes + # TODO: this can be simplified when the context manager is finished + if stored_token and not isinstance(stored_token, bytes): + try: + encoder = self.redis.connection_pool.get_encoder() + except AttributeError: + # Cluster + encoder = self.redis.get_encoder() + stored_token = encoder.encode(stored_token) + return self.local.token is not None and stored_token == self.local.token + + def release(self) -> Awaitable[None]: + """Releases the already acquired lock""" + expected_token = self.local.token + if expected_token is None: + raise LockError("Cannot release an unlocked lock") + self.local.token = None + return self.do_release(expected_token) + + async def do_release(self, expected_token: bytes) -> None: + if not bool( + await self.lua_release( + keys=[self.name], args=[expected_token], client=self.redis + ) + ): + raise LockNotOwnedError("Cannot release a lock that's no longer owned") + + def extend( + self, additional_time: float, replace_ttl: bool = False + ) -> Awaitable[bool]: + """ + Adds more time to an already acquired lock. + + ``additional_time`` can be specified as an integer or a float, both + representing the number of seconds to add. + + ``replace_ttl`` if False (the default), add `additional_time` to + the lock's existing ttl. If True, replace the lock's ttl with + `additional_time`. + """ + if self.local.token is None: + raise LockError("Cannot extend an unlocked lock") + if self.timeout is None: + raise LockError("Cannot extend a lock with no timeout") + return self.do_extend(additional_time, replace_ttl) + + async def do_extend(self, additional_time, replace_ttl) -> bool: + additional_time = int(additional_time * 1000) + if not bool( + await self.lua_extend( + keys=[self.name], + args=[self.local.token, additional_time, replace_ttl and "1" or "0"], + client=self.redis, + ) + ): + raise LockNotOwnedError("Cannot extend a lock that's no longer owned") + return True + + def reacquire(self) -> Awaitable[bool]: + """ + Resets a TTL of an already acquired lock back to a timeout value. + """ + if self.local.token is None: + raise LockError("Cannot reacquire an unlocked lock") + if self.timeout is None: + raise LockError("Cannot reacquire a lock with no timeout") + return self.do_reacquire() + + async def do_reacquire(self) -> bool: + timeout = int(self.timeout * 1000) + if not bool( + await self.lua_reacquire( + keys=[self.name], args=[self.local.token, timeout], client=self.redis + ) + ): + raise LockNotOwnedError("Cannot reacquire a lock that's no longer owned") + return True diff --git a/templates/skills/spotify/dependencies/redis/asyncio/retry.py b/templates/skills/spotify/dependencies/redis/asyncio/retry.py new file mode 100644 index 00000000..7c5e3b0e --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/retry.py @@ -0,0 +1,67 @@ +from asyncio import sleep +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Tuple, Type, TypeVar + +from redis.exceptions import ConnectionError, RedisError, TimeoutError + +if TYPE_CHECKING: + from redis.backoff import AbstractBackoff + + +T = TypeVar("T") + + +class Retry: + """Retry a specific number of times after a failure""" + + __slots__ = "_backoff", "_retries", "_supported_errors" + + def __init__( + self, + backoff: "AbstractBackoff", + retries: int, + supported_errors: Tuple[Type[RedisError], ...] = ( + ConnectionError, + TimeoutError, + ), + ): + """ + Initialize a `Retry` object with a `Backoff` object + that retries a maximum of `retries` times. + `retries` can be negative to retry forever. + You can specify the types of supported errors which trigger + a retry with the `supported_errors` parameter. + """ + self._backoff = backoff + self._retries = retries + self._supported_errors = supported_errors + + def update_supported_errors(self, specified_errors: list): + """ + Updates the supported errors with the specified error types + """ + self._supported_errors = tuple( + set(self._supported_errors + tuple(specified_errors)) + ) + + async def call_with_retry( + self, do: Callable[[], Awaitable[T]], fail: Callable[[RedisError], Any] + ) -> T: + """ + Execute an operation that might fail and returns its result, or + raise the exception that was thrown depending on the `Backoff` object. + `do`: the operation to call. Expects no argument. + `fail`: the failure handler, expects the last error that was thrown + """ + self._backoff.reset() + failures = 0 + while True: + try: + return await do() + except self._supported_errors as error: + failures += 1 + await fail(error) + if self._retries >= 0 and failures > self._retries: + raise error + backoff = self._backoff.compute(failures) + if backoff > 0: + await sleep(backoff) diff --git a/templates/skills/spotify/dependencies/redis/asyncio/sentinel.py b/templates/skills/spotify/dependencies/redis/asyncio/sentinel.py new file mode 100644 index 00000000..6834fb19 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/sentinel.py @@ -0,0 +1,375 @@ +import asyncio +import random +import weakref +from typing import AsyncIterator, Iterable, Mapping, Optional, Sequence, Tuple, Type + +from redis.asyncio.client import Redis +from redis.asyncio.connection import ( + Connection, + ConnectionPool, + EncodableT, + SSLConnection, +) +from redis.commands import AsyncSentinelCommands +from redis.exceptions import ConnectionError, ReadOnlyError, ResponseError, TimeoutError +from redis.utils import str_if_bytes + + +class MasterNotFoundError(ConnectionError): + pass + + +class SlaveNotFoundError(ConnectionError): + pass + + +class SentinelManagedConnection(Connection): + def __init__(self, **kwargs): + self.connection_pool = kwargs.pop("connection_pool") + super().__init__(**kwargs) + + def __repr__(self): + pool = self.connection_pool + s = f"{self.__class__.__name__}" + + async def connect_to(self, address): + self.host, self.port = address + await super().connect() + if self.connection_pool.check_connection: + await self.send_command("PING") + if str_if_bytes(await self.read_response()) != "PONG": + raise ConnectionError("PING failed") + + async def _connect_retry(self): + if self._reader: + return # already connected + if self.connection_pool.is_master: + await self.connect_to(await self.connection_pool.get_master_address()) + else: + async for slave in self.connection_pool.rotate_slaves(): + try: + return await self.connect_to(slave) + except ConnectionError: + continue + raise SlaveNotFoundError # Never be here + + async def connect(self): + return await self.retry.call_with_retry( + self._connect_retry, + lambda error: asyncio.sleep(0), + ) + + async def read_response( + self, + disable_decoding: bool = False, + timeout: Optional[float] = None, + *, + disconnect_on_error: Optional[float] = True, + push_request: Optional[bool] = False, + ): + try: + return await super().read_response( + disable_decoding=disable_decoding, + timeout=timeout, + disconnect_on_error=disconnect_on_error, + push_request=push_request, + ) + except ReadOnlyError: + if self.connection_pool.is_master: + # When talking to a master, a ReadOnlyError when likely + # indicates that the previous master that we're still connected + # to has been demoted to a slave and there's a new master. + # calling disconnect will force the connection to re-query + # sentinel during the next connect() attempt. + await self.disconnect() + raise ConnectionError("The previous master is now a slave") + raise + + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): + pass + + +class SentinelConnectionPool(ConnectionPool): + """ + Sentinel backed connection pool. + + If ``check_connection`` flag is set to True, SentinelManagedConnection + sends a PING command right after establishing the connection. + """ + + def __init__(self, service_name, sentinel_manager, **kwargs): + kwargs["connection_class"] = kwargs.get( + "connection_class", + SentinelManagedSSLConnection + if kwargs.pop("ssl", False) + else SentinelManagedConnection, + ) + self.is_master = kwargs.pop("is_master", True) + self.check_connection = kwargs.pop("check_connection", False) + super().__init__(**kwargs) + self.connection_kwargs["connection_pool"] = weakref.proxy(self) + self.service_name = service_name + self.sentinel_manager = sentinel_manager + self.master_address = None + self.slave_rr_counter = None + + def __repr__(self): + return ( + f"{self.__class__.__name__}" + f"" + ) + + def reset(self): + super().reset() + self.master_address = None + self.slave_rr_counter = None + + def owns_connection(self, connection: Connection): + check = not self.is_master or ( + self.is_master and self.master_address == (connection.host, connection.port) + ) + return check and super().owns_connection(connection) + + async def get_master_address(self): + master_address = await self.sentinel_manager.discover_master(self.service_name) + if self.is_master: + if self.master_address != master_address: + self.master_address = master_address + # disconnect any idle connections so that they reconnect + # to the new master the next time that they are used. + await self.disconnect(inuse_connections=False) + return master_address + + async def rotate_slaves(self) -> AsyncIterator: + """Round-robin slave balancer""" + slaves = await self.sentinel_manager.discover_slaves(self.service_name) + if slaves: + if self.slave_rr_counter is None: + self.slave_rr_counter = random.randint(0, len(slaves) - 1) + for _ in range(len(slaves)): + self.slave_rr_counter = (self.slave_rr_counter + 1) % len(slaves) + slave = slaves[self.slave_rr_counter] + yield slave + # Fallback to the master connection + try: + yield await self.get_master_address() + except MasterNotFoundError: + pass + raise SlaveNotFoundError(f"No slave found for {self.service_name!r}") + + +class Sentinel(AsyncSentinelCommands): + """ + Redis Sentinel cluster client + + >>> from redis.sentinel import Sentinel + >>> sentinel = Sentinel([('localhost', 26379)], socket_timeout=0.1) + >>> master = sentinel.master_for('mymaster', socket_timeout=0.1) + >>> await master.set('foo', 'bar') + >>> slave = sentinel.slave_for('mymaster', socket_timeout=0.1) + >>> await slave.get('foo') + b'bar' + + ``sentinels`` is a list of sentinel nodes. Each node is represented by + a pair (hostname, port). + + ``min_other_sentinels`` defined a minimum number of peers for a sentinel. + When querying a sentinel, if it doesn't meet this threshold, responses + from that sentinel won't be considered valid. + + ``sentinel_kwargs`` is a dictionary of connection arguments used when + connecting to sentinel instances. Any argument that can be passed to + a normal Redis connection can be specified here. If ``sentinel_kwargs`` is + not specified, any socket_timeout and socket_keepalive options specified + in ``connection_kwargs`` will be used. + + ``connection_kwargs`` are keyword arguments that will be used when + establishing a connection to a Redis server. + """ + + def __init__( + self, + sentinels, + min_other_sentinels=0, + sentinel_kwargs=None, + **connection_kwargs, + ): + # if sentinel_kwargs isn't defined, use the socket_* options from + # connection_kwargs + if sentinel_kwargs is None: + sentinel_kwargs = { + k: v for k, v in connection_kwargs.items() if k.startswith("socket_") + } + self.sentinel_kwargs = sentinel_kwargs + + self.sentinels = [ + Redis(host=hostname, port=port, **self.sentinel_kwargs) + for hostname, port in sentinels + ] + self.min_other_sentinels = min_other_sentinels + self.connection_kwargs = connection_kwargs + + async def execute_command(self, *args, **kwargs): + """ + Execute Sentinel command in sentinel nodes. + once - If set to True, then execute the resulting command on a single + node at random, rather than across the entire sentinel cluster. + """ + once = bool(kwargs.get("once", False)) + if "once" in kwargs.keys(): + kwargs.pop("once") + + if once: + await random.choice(self.sentinels).execute_command(*args, **kwargs) + else: + tasks = [ + asyncio.Task(sentinel.execute_command(*args, **kwargs)) + for sentinel in self.sentinels + ] + await asyncio.gather(*tasks) + return True + + def __repr__(self): + sentinel_addresses = [] + for sentinel in self.sentinels: + sentinel_addresses.append( + f"{sentinel.connection_pool.connection_kwargs['host']}:" + f"{sentinel.connection_pool.connection_kwargs['port']}" + ) + return f"{self.__class__.__name__}" + + def check_master_state(self, state: dict, service_name: str) -> bool: + if not state["is_master"] or state["is_sdown"] or state["is_odown"]: + return False + # Check if our sentinel doesn't see other nodes + if state["num-other-sentinels"] < self.min_other_sentinels: + return False + return True + + async def discover_master(self, service_name: str): + """ + Asks sentinel servers for the Redis master's address corresponding + to the service labeled ``service_name``. + + Returns a pair (address, port) or raises MasterNotFoundError if no + master is found. + """ + collected_errors = list() + for sentinel_no, sentinel in enumerate(self.sentinels): + try: + masters = await sentinel.sentinel_masters() + except (ConnectionError, TimeoutError) as e: + collected_errors.append(f"{sentinel} - {e!r}") + continue + state = masters.get(service_name) + if state and self.check_master_state(state, service_name): + # Put this sentinel at the top of the list + self.sentinels[0], self.sentinels[sentinel_no] = ( + sentinel, + self.sentinels[0], + ) + return state["ip"], state["port"] + + error_info = "" + if len(collected_errors) > 0: + error_info = f" : {', '.join(collected_errors)}" + raise MasterNotFoundError(f"No master found for {service_name!r}{error_info}") + + def filter_slaves( + self, slaves: Iterable[Mapping] + ) -> Sequence[Tuple[EncodableT, EncodableT]]: + """Remove slaves that are in an ODOWN or SDOWN state""" + slaves_alive = [] + for slave in slaves: + if slave["is_odown"] or slave["is_sdown"]: + continue + slaves_alive.append((slave["ip"], slave["port"])) + return slaves_alive + + async def discover_slaves( + self, service_name: str + ) -> Sequence[Tuple[EncodableT, EncodableT]]: + """Returns a list of alive slaves for service ``service_name``""" + for sentinel in self.sentinels: + try: + slaves = await sentinel.sentinel_slaves(service_name) + except (ConnectionError, ResponseError, TimeoutError): + continue + slaves = self.filter_slaves(slaves) + if slaves: + return slaves + return [] + + def master_for( + self, + service_name: str, + redis_class: Type[Redis] = Redis, + connection_pool_class: Type[SentinelConnectionPool] = SentinelConnectionPool, + **kwargs, + ): + """ + Returns a redis client instance for the ``service_name`` master. + + A :py:class:`~redis.sentinel.SentinelConnectionPool` class is + used to retrieve the master's address before establishing a new + connection. + + NOTE: If the master's address has changed, any cached connections to + the old master are closed. + + By default clients will be a :py:class:`~redis.Redis` instance. + Specify a different class to the ``redis_class`` argument if you + desire something different. + + The ``connection_pool_class`` specifies the connection pool to + use. The :py:class:`~redis.sentinel.SentinelConnectionPool` + will be used by default. + + All other keyword arguments are merged with any connection_kwargs + passed to this class and passed to the connection pool as keyword + arguments to be used to initialize Redis connections. + """ + kwargs["is_master"] = True + connection_kwargs = dict(self.connection_kwargs) + connection_kwargs.update(kwargs) + + connection_pool = connection_pool_class(service_name, self, **connection_kwargs) + # The Redis object "owns" the pool + return redis_class.from_pool(connection_pool) + + def slave_for( + self, + service_name: str, + redis_class: Type[Redis] = Redis, + connection_pool_class: Type[SentinelConnectionPool] = SentinelConnectionPool, + **kwargs, + ): + """ + Returns redis client instance for the ``service_name`` slave(s). + + A SentinelConnectionPool class is used to retrieve the slave's + address before establishing a new connection. + + By default clients will be a :py:class:`~redis.Redis` instance. + Specify a different class to the ``redis_class`` argument if you + desire something different. + + The ``connection_pool_class`` specifies the connection pool to use. + The SentinelConnectionPool will be used by default. + + All other keyword arguments are merged with any connection_kwargs + passed to this class and passed to the connection pool as keyword + arguments to be used to initialize Redis connections. + """ + kwargs["is_master"] = False + connection_kwargs = dict(self.connection_kwargs) + connection_kwargs.update(kwargs) + + connection_pool = connection_pool_class(service_name, self, **connection_kwargs) + # The Redis object "owns" the pool + return redis_class.from_pool(connection_pool) diff --git a/templates/skills/spotify/dependencies/redis/asyncio/utils.py b/templates/skills/spotify/dependencies/redis/asyncio/utils.py new file mode 100644 index 00000000..5a55b36a --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/asyncio/utils.py @@ -0,0 +1,28 @@ +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from redis.asyncio.client import Pipeline, Redis + + +def from_url(url, **kwargs): + """ + Returns an active Redis client generated from the given database URL. + + Will attempt to extract the database id from the path url fragment, if + none is provided. + """ + from redis.asyncio.client import Redis + + return Redis.from_url(url, **kwargs) + + +class pipeline: + def __init__(self, redis_obj: "Redis"): + self.p: "Pipeline" = redis_obj.pipeline() + + async def __aenter__(self) -> "Pipeline": + return self.p + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.p.execute() + del self.p diff --git a/templates/skills/spotify/dependencies/redis/backoff.py b/templates/skills/spotify/dependencies/redis/backoff.py new file mode 100644 index 00000000..c62e760b --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/backoff.py @@ -0,0 +1,114 @@ +import random +from abc import ABC, abstractmethod + +# Maximum backoff between each retry in seconds +DEFAULT_CAP = 0.512 +# Minimum backoff between each retry in seconds +DEFAULT_BASE = 0.008 + + +class AbstractBackoff(ABC): + """Backoff interface""" + + def reset(self): + """ + Reset internal state before an operation. + `reset` is called once at the beginning of + every call to `Retry.call_with_retry` + """ + pass + + @abstractmethod + def compute(self, failures): + """Compute backoff in seconds upon failure""" + pass + + +class ConstantBackoff(AbstractBackoff): + """Constant backoff upon failure""" + + def __init__(self, backoff): + """`backoff`: backoff time in seconds""" + self._backoff = backoff + + def compute(self, failures): + return self._backoff + + +class NoBackoff(ConstantBackoff): + """No backoff upon failure""" + + def __init__(self): + super().__init__(0) + + +class ExponentialBackoff(AbstractBackoff): + """Exponential backoff upon failure""" + + def __init__(self, cap=DEFAULT_CAP, base=DEFAULT_BASE): + """ + `cap`: maximum backoff time in seconds + `base`: base backoff time in seconds + """ + self._cap = cap + self._base = base + + def compute(self, failures): + return min(self._cap, self._base * 2**failures) + + +class FullJitterBackoff(AbstractBackoff): + """Full jitter backoff upon failure""" + + def __init__(self, cap=DEFAULT_CAP, base=DEFAULT_BASE): + """ + `cap`: maximum backoff time in seconds + `base`: base backoff time in seconds + """ + self._cap = cap + self._base = base + + def compute(self, failures): + return random.uniform(0, min(self._cap, self._base * 2**failures)) + + +class EqualJitterBackoff(AbstractBackoff): + """Equal jitter backoff upon failure""" + + def __init__(self, cap=DEFAULT_CAP, base=DEFAULT_BASE): + """ + `cap`: maximum backoff time in seconds + `base`: base backoff time in seconds + """ + self._cap = cap + self._base = base + + def compute(self, failures): + temp = min(self._cap, self._base * 2**failures) / 2 + return temp + random.uniform(0, temp) + + +class DecorrelatedJitterBackoff(AbstractBackoff): + """Decorrelated jitter backoff upon failure""" + + def __init__(self, cap=DEFAULT_CAP, base=DEFAULT_BASE): + """ + `cap`: maximum backoff time in seconds + `base`: base backoff time in seconds + """ + self._cap = cap + self._base = base + self._previous_backoff = 0 + + def reset(self): + self._previous_backoff = 0 + + def compute(self, failures): + max_backoff = max(self._base, self._previous_backoff * 3) + temp = random.uniform(self._base, max_backoff) + self._previous_backoff = min(self._cap, temp) + return self._previous_backoff + + +def default_backoff(): + return EqualJitterBackoff() diff --git a/templates/skills/spotify/dependencies/redis/client.py b/templates/skills/spotify/dependencies/redis/client.py new file mode 100644 index 00000000..5a12f40e --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/client.py @@ -0,0 +1,1522 @@ +import copy +import re +import threading +import time +import warnings +from itertools import chain +from typing import Any, Callable, Dict, List, Optional, Type, Union + +from redis._parsers.encoders import Encoder +from redis._parsers.helpers import ( + _RedisCallbacks, + _RedisCallbacksRESP2, + _RedisCallbacksRESP3, + bool_ok, +) +from redis.commands import ( + CoreCommands, + RedisModuleCommands, + SentinelCommands, + list_or_args, +) +from redis.connection import ( + AbstractConnection, + ConnectionPool, + SSLConnection, + UnixDomainSocketConnection, +) +from redis.credentials import CredentialProvider +from redis.exceptions import ( + ConnectionError, + ExecAbortError, + PubSubError, + RedisError, + ResponseError, + TimeoutError, + WatchError, +) +from redis.lock import Lock +from redis.retry import Retry +from redis.utils import ( + HIREDIS_AVAILABLE, + _set_info_logger, + get_lib_version, + safe_str, + str_if_bytes, +) + +SYM_EMPTY = b"" +EMPTY_RESPONSE = "EMPTY_RESPONSE" + +# some responses (ie. dump) are binary, and just meant to never be decoded +NEVER_DECODE = "NEVER_DECODE" + + +class CaseInsensitiveDict(dict): + "Case insensitive dict implementation. Assumes string keys only." + + def __init__(self, data: Dict[str, str]) -> None: + for k, v in data.items(): + self[k.upper()] = v + + def __contains__(self, k): + return super().__contains__(k.upper()) + + def __delitem__(self, k): + super().__delitem__(k.upper()) + + def __getitem__(self, k): + return super().__getitem__(k.upper()) + + def get(self, k, default=None): + return super().get(k.upper(), default) + + def __setitem__(self, k, v): + super().__setitem__(k.upper(), v) + + def update(self, data): + data = CaseInsensitiveDict(data) + super().update(data) + + +class AbstractRedis: + pass + + +class Redis(RedisModuleCommands, CoreCommands, SentinelCommands): + """ + Implementation of the Redis protocol. + + This abstract class provides a Python interface to all Redis commands + and an implementation of the Redis protocol. + + Pipelines derive from this, implementing how + the commands are sent and received to the Redis server. Based on + configuration, an instance will either use a ConnectionPool, or + Connection object to talk to redis. + + It is not safe to pass PubSub or Pipeline objects between threads. + """ + + @classmethod + def from_url(cls, url: str, **kwargs) -> "Redis": + """ + Return a Redis client object configured from the given URL + + For example:: + + redis://[[username]:[password]]@localhost:6379/0 + rediss://[[username]:[password]]@localhost:6379/0 + unix://[username@]/path/to/socket.sock?db=0[&password=password] + + Three URL schemes are supported: + + - `redis://` creates a TCP socket connection. See more at: + + - `rediss://` creates a SSL wrapped TCP socket connection. See more at: + + - ``unix://``: creates a Unix Domain Socket connection. + + The username, password, hostname, path and all querystring values + are passed through urllib.parse.unquote in order to replace any + percent-encoded values with their corresponding characters. + + There are several ways to specify a database number. The first value + found will be used: + + 1. A ``db`` querystring option, e.g. redis://localhost?db=0 + 2. If using the redis:// or rediss:// schemes, the path argument + of the url, e.g. redis://localhost/0 + 3. A ``db`` keyword argument to this function. + + If none of these options are specified, the default db=0 is used. + + All querystring options are cast to their appropriate Python types. + Boolean arguments can be specified with string values "True"/"False" + or "Yes"/"No". Values that cannot be properly cast cause a + ``ValueError`` to be raised. Once parsed, the querystring arguments + and keyword arguments are passed to the ``ConnectionPool``'s + class initializer. In the case of conflicting arguments, querystring + arguments always win. + + """ + single_connection_client = kwargs.pop("single_connection_client", False) + connection_pool = ConnectionPool.from_url(url, **kwargs) + client = cls( + connection_pool=connection_pool, + single_connection_client=single_connection_client, + ) + client.auto_close_connection_pool = True + return client + + @classmethod + def from_pool( + cls: Type["Redis"], + connection_pool: ConnectionPool, + ) -> "Redis": + """ + Return a Redis client from the given connection pool. + The Redis client will take ownership of the connection pool and + close it when the Redis client is closed. + """ + client = cls( + connection_pool=connection_pool, + ) + client.auto_close_connection_pool = True + return client + + def __init__( + self, + host="localhost", + port=6379, + db=0, + password=None, + socket_timeout=None, + socket_connect_timeout=None, + socket_keepalive=None, + socket_keepalive_options=None, + connection_pool=None, + unix_socket_path=None, + encoding="utf-8", + encoding_errors="strict", + charset=None, + errors=None, + decode_responses=False, + retry_on_timeout=False, + retry_on_error=None, + ssl=False, + ssl_keyfile=None, + ssl_certfile=None, + ssl_cert_reqs="required", + ssl_ca_certs=None, + ssl_ca_path=None, + ssl_ca_data=None, + ssl_check_hostname=False, + ssl_password=None, + ssl_validate_ocsp=False, + ssl_validate_ocsp_stapled=False, + ssl_ocsp_context=None, + ssl_ocsp_expected_cert=None, + ssl_min_version=None, + ssl_ciphers=None, + max_connections=None, + single_connection_client=False, + health_check_interval=0, + client_name=None, + lib_name="redis-py", + lib_version=get_lib_version(), + username=None, + retry=None, + redis_connect_func=None, + credential_provider: Optional[CredentialProvider] = None, + protocol: Optional[int] = 2, + ) -> None: + """ + Initialize a new Redis client. + To specify a retry policy for specific errors, first set + `retry_on_error` to a list of the error/s to retry on, then set + `retry` to a valid `Retry` object. + To retry on TimeoutError, `retry_on_timeout` can also be set to `True`. + + Args: + + single_connection_client: + if `True`, connection pool is not used. In that case `Redis` + instance use is not thread safe. + """ + if not connection_pool: + if charset is not None: + warnings.warn( + DeprecationWarning( + '"charset" is deprecated. Use "encoding" instead' + ) + ) + encoding = charset + if errors is not None: + warnings.warn( + DeprecationWarning( + '"errors" is deprecated. Use "encoding_errors" instead' + ) + ) + encoding_errors = errors + if not retry_on_error: + retry_on_error = [] + if retry_on_timeout is True: + retry_on_error.append(TimeoutError) + kwargs = { + "db": db, + "username": username, + "password": password, + "socket_timeout": socket_timeout, + "encoding": encoding, + "encoding_errors": encoding_errors, + "decode_responses": decode_responses, + "retry_on_error": retry_on_error, + "retry": copy.deepcopy(retry), + "max_connections": max_connections, + "health_check_interval": health_check_interval, + "client_name": client_name, + "lib_name": lib_name, + "lib_version": lib_version, + "redis_connect_func": redis_connect_func, + "credential_provider": credential_provider, + "protocol": protocol, + } + # based on input, setup appropriate connection args + if unix_socket_path is not None: + kwargs.update( + { + "path": unix_socket_path, + "connection_class": UnixDomainSocketConnection, + } + ) + else: + # TCP specific options + kwargs.update( + { + "host": host, + "port": port, + "socket_connect_timeout": socket_connect_timeout, + "socket_keepalive": socket_keepalive, + "socket_keepalive_options": socket_keepalive_options, + } + ) + + if ssl: + kwargs.update( + { + "connection_class": SSLConnection, + "ssl_keyfile": ssl_keyfile, + "ssl_certfile": ssl_certfile, + "ssl_cert_reqs": ssl_cert_reqs, + "ssl_ca_certs": ssl_ca_certs, + "ssl_ca_data": ssl_ca_data, + "ssl_check_hostname": ssl_check_hostname, + "ssl_password": ssl_password, + "ssl_ca_path": ssl_ca_path, + "ssl_validate_ocsp_stapled": ssl_validate_ocsp_stapled, + "ssl_validate_ocsp": ssl_validate_ocsp, + "ssl_ocsp_context": ssl_ocsp_context, + "ssl_ocsp_expected_cert": ssl_ocsp_expected_cert, + "ssl_min_version": ssl_min_version, + "ssl_ciphers": ssl_ciphers, + } + ) + connection_pool = ConnectionPool(**kwargs) + self.auto_close_connection_pool = True + else: + self.auto_close_connection_pool = False + + self.connection_pool = connection_pool + self.connection = None + if single_connection_client: + self.connection = self.connection_pool.get_connection("_") + + self.response_callbacks = CaseInsensitiveDict(_RedisCallbacks) + + if self.connection_pool.connection_kwargs.get("protocol") in ["3", 3]: + self.response_callbacks.update(_RedisCallbacksRESP3) + else: + self.response_callbacks.update(_RedisCallbacksRESP2) + + def __repr__(self) -> str: + return f"{type(self).__name__}<{repr(self.connection_pool)}>" + + def get_encoder(self) -> "Encoder": + """Get the connection pool's encoder""" + return self.connection_pool.get_encoder() + + def get_connection_kwargs(self) -> Dict: + """Get the connection's key-word arguments""" + return self.connection_pool.connection_kwargs + + def get_retry(self) -> Optional["Retry"]: + return self.get_connection_kwargs().get("retry") + + def set_retry(self, retry: "Retry") -> None: + self.get_connection_kwargs().update({"retry": retry}) + self.connection_pool.set_retry(retry) + + def set_response_callback(self, command: str, callback: Callable) -> None: + """Set a custom Response Callback""" + self.response_callbacks[command] = callback + + def load_external_module(self, funcname, func) -> None: + """ + This function can be used to add externally defined redis modules, + and their namespaces to the redis client. + + funcname - A string containing the name of the function to create + func - The function, being added to this class. + + ex: Assume that one has a custom redis module named foomod that + creates command named 'foo.dothing' and 'foo.anotherthing' in redis. + To load function functions into this namespace: + + from redis import Redis + from foomodule import F + r = Redis() + r.load_external_module("foo", F) + r.foo().dothing('your', 'arguments') + + For a concrete example see the reimport of the redisjson module in + tests/test_connection.py::test_loading_external_modules + """ + setattr(self, funcname, func) + + def pipeline(self, transaction=True, shard_hint=None) -> "Pipeline": + """ + Return a new pipeline object that can queue multiple commands for + later execution. ``transaction`` indicates whether all commands + should be executed atomically. Apart from making a group of operations + atomic, pipelines are useful for reducing the back-and-forth overhead + between the client and server. + """ + return Pipeline( + self.connection_pool, self.response_callbacks, transaction, shard_hint + ) + + def transaction( + self, func: Callable[["Pipeline"], None], *watches, **kwargs + ) -> None: + """ + Convenience method for executing the callable `func` as a transaction + while watching all keys specified in `watches`. The 'func' callable + should expect a single argument which is a Pipeline object. + """ + shard_hint = kwargs.pop("shard_hint", None) + value_from_callable = kwargs.pop("value_from_callable", False) + watch_delay = kwargs.pop("watch_delay", None) + with self.pipeline(True, shard_hint) as pipe: + while True: + try: + if watches: + pipe.watch(*watches) + func_value = func(pipe) + exec_value = pipe.execute() + return func_value if value_from_callable else exec_value + except WatchError: + if watch_delay is not None and watch_delay > 0: + time.sleep(watch_delay) + continue + + def lock( + self, + name: str, + timeout: Optional[float] = None, + sleep: float = 0.1, + blocking: bool = True, + blocking_timeout: Optional[float] = None, + lock_class: Union[None, Any] = None, + thread_local: bool = True, + ): + """ + Return a new Lock object using key ``name`` that mimics + the behavior of threading.Lock. + + If specified, ``timeout`` indicates a maximum life for the lock. + By default, it will remain locked until release() is called. + + ``sleep`` indicates the amount of time to sleep per loop iteration + when the lock is in blocking mode and another client is currently + holding the lock. + + ``blocking`` indicates whether calling ``acquire`` should block until + the lock has been acquired or to fail immediately, causing ``acquire`` + to return False and the lock not being acquired. Defaults to True. + Note this value can be overridden by passing a ``blocking`` + argument to ``acquire``. + + ``blocking_timeout`` indicates the maximum amount of time in seconds to + spend trying to acquire the lock. A value of ``None`` indicates + continue trying forever. ``blocking_timeout`` can be specified as a + float or integer, both representing the number of seconds to wait. + + ``lock_class`` forces the specified lock implementation. Note that as + of redis-py 3.0, the only lock class we implement is ``Lock`` (which is + a Lua-based lock). So, it's unlikely you'll need this parameter, unless + you have created your own custom lock class. + + ``thread_local`` indicates whether the lock token is placed in + thread-local storage. By default, the token is placed in thread local + storage so that a thread only sees its token, not a token set by + another thread. Consider the following timeline: + + time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds. + thread-1 sets the token to "abc" + time: 1, thread-2 blocks trying to acquire `my-lock` using the + Lock instance. + time: 5, thread-1 has not yet completed. redis expires the lock + key. + time: 5, thread-2 acquired `my-lock` now that it's available. + thread-2 sets the token to "xyz" + time: 6, thread-1 finishes its work and calls release(). if the + token is *not* stored in thread local storage, then + thread-1 would see the token value as "xyz" and would be + able to successfully release the thread-2's lock. + + In some use cases it's necessary to disable thread local storage. For + example, if you have code where one thread acquires a lock and passes + that lock instance to a worker thread to release later. If thread + local storage isn't disabled in this case, the worker thread won't see + the token set by the thread that acquired the lock. Our assumption + is that these cases aren't common and as such default to using + thread local storage.""" + if lock_class is None: + lock_class = Lock + return lock_class( + self, + name, + timeout=timeout, + sleep=sleep, + blocking=blocking, + blocking_timeout=blocking_timeout, + thread_local=thread_local, + ) + + def pubsub(self, **kwargs): + """ + Return a Publish/Subscribe object. With this object, you can + subscribe to channels and listen for messages that get published to + them. + """ + return PubSub(self.connection_pool, **kwargs) + + def monitor(self): + return Monitor(self.connection_pool) + + def client(self): + return self.__class__( + connection_pool=self.connection_pool, single_connection_client=True + ) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def __del__(self): + self.close() + + def close(self): + # In case a connection property does not yet exist + # (due to a crash earlier in the Redis() constructor), return + # immediately as there is nothing to clean-up. + if not hasattr(self, "connection"): + return + + conn = self.connection + if conn: + self.connection = None + self.connection_pool.release(conn) + + if self.auto_close_connection_pool: + self.connection_pool.disconnect() + + def _send_command_parse_response(self, conn, command_name, *args, **options): + """ + Send a command and parse the response + """ + conn.send_command(*args) + return self.parse_response(conn, command_name, **options) + + def _disconnect_raise(self, conn, error): + """ + Close the connection and raise an exception + if retry_on_error is not set or the error + is not one of the specified error types + """ + conn.disconnect() + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + raise error + + # COMMAND EXECUTION AND PROTOCOL PARSING + def execute_command(self, *args, **options): + """Execute a command and return a parsed response""" + pool = self.connection_pool + command_name = args[0] + conn = self.connection or pool.get_connection(command_name, **options) + + try: + return conn.retry.call_with_retry( + lambda: self._send_command_parse_response( + conn, command_name, *args, **options + ), + lambda error: self._disconnect_raise(conn, error), + ) + finally: + if not self.connection: + pool.release(conn) + + def parse_response(self, connection, command_name, **options): + """Parses a response from the Redis server""" + try: + if NEVER_DECODE in options: + response = connection.read_response(disable_decoding=True) + options.pop(NEVER_DECODE) + else: + response = connection.read_response() + except ResponseError: + if EMPTY_RESPONSE in options: + return options[EMPTY_RESPONSE] + raise + + if EMPTY_RESPONSE in options: + options.pop(EMPTY_RESPONSE) + + if command_name in self.response_callbacks: + return self.response_callbacks[command_name](response, **options) + return response + + +StrictRedis = Redis + + +class Monitor: + """ + Monitor is useful for handling the MONITOR command to the redis server. + next_command() method returns one command from monitor + listen() method yields commands from monitor. + """ + + monitor_re = re.compile(r"\[(\d+) (.*?)\] (.*)") + command_re = re.compile(r'"(.*?)(? "PubSub": + return self + + def __exit__(self, exc_type, exc_value, traceback) -> None: + self.reset() + + def __del__(self) -> None: + try: + # if this object went out of scope prior to shutting down + # subscriptions, close the connection manually before + # returning it to the connection pool + self.reset() + except Exception: + pass + + def reset(self) -> None: + if self.connection: + self.connection.disconnect() + self.connection.deregister_connect_callback(self.on_connect) + self.connection_pool.release(self.connection) + self.connection = None + self.health_check_response_counter = 0 + self.channels = {} + self.pending_unsubscribe_channels = set() + self.shard_channels = {} + self.pending_unsubscribe_shard_channels = set() + self.patterns = {} + self.pending_unsubscribe_patterns = set() + self.subscribed_event.clear() + + def close(self) -> None: + self.reset() + + def on_connect(self, connection) -> None: + "Re-subscribe to any channels and patterns previously subscribed to" + # NOTE: for python3, we can't pass bytestrings as keyword arguments + # so we need to decode channel/pattern names back to unicode strings + # before passing them to [p]subscribe. + self.pending_unsubscribe_channels.clear() + self.pending_unsubscribe_patterns.clear() + self.pending_unsubscribe_shard_channels.clear() + if self.channels: + channels = { + self.encoder.decode(k, force=True): v for k, v in self.channels.items() + } + self.subscribe(**channels) + if self.patterns: + patterns = { + self.encoder.decode(k, force=True): v for k, v in self.patterns.items() + } + self.psubscribe(**patterns) + if self.shard_channels: + shard_channels = { + self.encoder.decode(k, force=True): v + for k, v in self.shard_channels.items() + } + self.ssubscribe(**shard_channels) + + @property + def subscribed(self) -> bool: + """Indicates if there are subscriptions to any channels or patterns""" + return self.subscribed_event.is_set() + + def execute_command(self, *args): + """Execute a publish/subscribe command""" + + # NOTE: don't parse the response in this function -- it could pull a + # legitimate message off the stack if the connection is already + # subscribed to one or more channels + + if self.connection is None: + self.connection = self.connection_pool.get_connection( + "pubsub", self.shard_hint + ) + # register a callback that re-subscribes to any channels we + # were listening to when we were disconnected + self.connection.register_connect_callback(self.on_connect) + if self.push_handler_func is not None and not HIREDIS_AVAILABLE: + self.connection._parser.set_push_handler(self.push_handler_func) + connection = self.connection + kwargs = {"check_health": not self.subscribed} + if not self.subscribed: + self.clean_health_check_responses() + self._execute(connection, connection.send_command, *args, **kwargs) + + def clean_health_check_responses(self) -> None: + """ + If any health check responses are present, clean them + """ + ttl = 10 + conn = self.connection + while self.health_check_response_counter > 0 and ttl > 0: + if self._execute(conn, conn.can_read, timeout=conn.socket_timeout): + response = self._execute(conn, conn.read_response) + if self.is_health_check_response(response): + self.health_check_response_counter -= 1 + else: + raise PubSubError( + "A non health check response was cleaned by " + "execute_command: {0}".format(response) + ) + ttl -= 1 + + def _disconnect_raise_connect(self, conn, error) -> None: + """ + Close the connection and raise an exception + if retry_on_error is not set or the error is not one + of the specified error types. Otherwise, try to + reconnect + """ + conn.disconnect() + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + raise error + conn.connect() + + def _execute(self, conn, command, *args, **kwargs): + """ + Connect manually upon disconnection. If the Redis server is down, + this will fail and raise a ConnectionError as desired. + After reconnection, the ``on_connect`` callback should have been + called by the # connection to resubscribe us to any channels and + patterns we were previously listening to + """ + return conn.retry.call_with_retry( + lambda: command(*args, **kwargs), + lambda error: self._disconnect_raise_connect(conn, error), + ) + + def parse_response(self, block=True, timeout=0): + """Parse the response from a publish/subscribe command""" + conn = self.connection + if conn is None: + raise RuntimeError( + "pubsub connection not set: " + "did you forget to call subscribe() or psubscribe()?" + ) + + self.check_health() + + def try_read(): + if not block: + if not conn.can_read(timeout=timeout): + return None + else: + conn.connect() + return conn.read_response(disconnect_on_error=False, push_request=True) + + response = self._execute(conn, try_read) + + if self.is_health_check_response(response): + # ignore the health check message as user might not expect it + self.health_check_response_counter -= 1 + return None + return response + + def is_health_check_response(self, response) -> bool: + """ + Check if the response is a health check response. + If there are no subscriptions redis responds to PING command with a + bulk response, instead of a multi-bulk with "pong" and the response. + """ + return response in [ + self.health_check_response, # If there was a subscription + self.health_check_response_b, # If there wasn't + ] + + def check_health(self) -> None: + conn = self.connection + if conn is None: + raise RuntimeError( + "pubsub connection not set: " + "did you forget to call subscribe() or psubscribe()?" + ) + + if conn.health_check_interval and time.time() > conn.next_health_check: + conn.send_command("PING", self.HEALTH_CHECK_MESSAGE, check_health=False) + self.health_check_response_counter += 1 + + def _normalize_keys(self, data) -> Dict: + """ + normalize channel/pattern names to be either bytes or strings + based on whether responses are automatically decoded. this saves us + from coercing the value for each message coming in. + """ + encode = self.encoder.encode + decode = self.encoder.decode + return {decode(encode(k)): v for k, v in data.items()} + + def psubscribe(self, *args, **kwargs): + """ + Subscribe to channel patterns. Patterns supplied as keyword arguments + expect a pattern name as the key and a callable as the value. A + pattern's callable will be invoked automatically when a message is + received on that pattern rather than producing a message via + ``listen()``. + """ + if args: + args = list_or_args(args[0], args[1:]) + new_patterns = dict.fromkeys(args) + new_patterns.update(kwargs) + ret_val = self.execute_command("PSUBSCRIBE", *new_patterns.keys()) + # update the patterns dict AFTER we send the command. we don't want to + # subscribe twice to these patterns, once for the command and again + # for the reconnection. + new_patterns = self._normalize_keys(new_patterns) + self.patterns.update(new_patterns) + if not self.subscribed: + # Set the subscribed_event flag to True + self.subscribed_event.set() + # Clear the health check counter + self.health_check_response_counter = 0 + self.pending_unsubscribe_patterns.difference_update(new_patterns) + return ret_val + + def punsubscribe(self, *args): + """ + Unsubscribe from the supplied patterns. If empty, unsubscribe from + all patterns. + """ + if args: + args = list_or_args(args[0], args[1:]) + patterns = self._normalize_keys(dict.fromkeys(args)) + else: + patterns = self.patterns + self.pending_unsubscribe_patterns.update(patterns) + return self.execute_command("PUNSUBSCRIBE", *args) + + def subscribe(self, *args, **kwargs): + """ + Subscribe to channels. Channels supplied as keyword arguments expect + a channel name as the key and a callable as the value. A channel's + callable will be invoked automatically when a message is received on + that channel rather than producing a message via ``listen()`` or + ``get_message()``. + """ + if args: + args = list_or_args(args[0], args[1:]) + new_channels = dict.fromkeys(args) + new_channels.update(kwargs) + ret_val = self.execute_command("SUBSCRIBE", *new_channels.keys()) + # update the channels dict AFTER we send the command. we don't want to + # subscribe twice to these channels, once for the command and again + # for the reconnection. + new_channels = self._normalize_keys(new_channels) + self.channels.update(new_channels) + if not self.subscribed: + # Set the subscribed_event flag to True + self.subscribed_event.set() + # Clear the health check counter + self.health_check_response_counter = 0 + self.pending_unsubscribe_channels.difference_update(new_channels) + return ret_val + + def unsubscribe(self, *args): + """ + Unsubscribe from the supplied channels. If empty, unsubscribe from + all channels + """ + if args: + args = list_or_args(args[0], args[1:]) + channels = self._normalize_keys(dict.fromkeys(args)) + else: + channels = self.channels + self.pending_unsubscribe_channels.update(channels) + return self.execute_command("UNSUBSCRIBE", *args) + + def ssubscribe(self, *args, target_node=None, **kwargs): + """ + Subscribes the client to the specified shard channels. + Channels supplied as keyword arguments expect a channel name as the key + and a callable as the value. A channel's callable will be invoked automatically + when a message is received on that channel rather than producing a message via + ``listen()`` or ``get_sharded_message()``. + """ + if args: + args = list_or_args(args[0], args[1:]) + new_s_channels = dict.fromkeys(args) + new_s_channels.update(kwargs) + ret_val = self.execute_command("SSUBSCRIBE", *new_s_channels.keys()) + # update the s_channels dict AFTER we send the command. we don't want to + # subscribe twice to these channels, once for the command and again + # for the reconnection. + new_s_channels = self._normalize_keys(new_s_channels) + self.shard_channels.update(new_s_channels) + if not self.subscribed: + # Set the subscribed_event flag to True + self.subscribed_event.set() + # Clear the health check counter + self.health_check_response_counter = 0 + self.pending_unsubscribe_shard_channels.difference_update(new_s_channels) + return ret_val + + def sunsubscribe(self, *args, target_node=None): + """ + Unsubscribe from the supplied shard_channels. If empty, unsubscribe from + all shard_channels + """ + if args: + args = list_or_args(args[0], args[1:]) + s_channels = self._normalize_keys(dict.fromkeys(args)) + else: + s_channels = self.shard_channels + self.pending_unsubscribe_shard_channels.update(s_channels) + return self.execute_command("SUNSUBSCRIBE", *args) + + def listen(self): + "Listen for messages on channels this client has been subscribed to" + while self.subscribed: + response = self.handle_message(self.parse_response(block=True)) + if response is not None: + yield response + + def get_message( + self, ignore_subscribe_messages: bool = False, timeout: float = 0.0 + ): + """ + Get the next message if one is available, otherwise None. + + If timeout is specified, the system will wait for `timeout` seconds + before returning. Timeout should be specified as a floating point + number, or None, to wait indefinitely. + """ + if not self.subscribed: + # Wait for subscription + start_time = time.time() + if self.subscribed_event.wait(timeout) is True: + # The connection was subscribed during the timeout time frame. + # The timeout should be adjusted based on the time spent + # waiting for the subscription + time_spent = time.time() - start_time + timeout = max(0.0, timeout - time_spent) + else: + # The connection isn't subscribed to any channels or patterns, + # so no messages are available + return None + + response = self.parse_response(block=(timeout is None), timeout=timeout) + if response: + return self.handle_message(response, ignore_subscribe_messages) + return None + + get_sharded_message = get_message + + def ping(self, message: Union[str, None] = None) -> bool: + """ + Ping the Redis server + """ + args = ["PING", message] if message is not None else ["PING"] + return self.execute_command(*args) + + def handle_message(self, response, ignore_subscribe_messages=False): + """ + Parses a pub/sub message. If the channel or pattern was subscribed to + with a message handler, the handler is invoked instead of a parsed + message being returned. + """ + if response is None: + return None + if isinstance(response, bytes): + response = [b"pong", response] if response != b"PONG" else [b"pong", b""] + message_type = str_if_bytes(response[0]) + if message_type == "pmessage": + message = { + "type": message_type, + "pattern": response[1], + "channel": response[2], + "data": response[3], + } + elif message_type == "pong": + message = { + "type": message_type, + "pattern": None, + "channel": None, + "data": response[1], + } + else: + message = { + "type": message_type, + "pattern": None, + "channel": response[1], + "data": response[2], + } + + # if this is an unsubscribe message, remove it from memory + if message_type in self.UNSUBSCRIBE_MESSAGE_TYPES: + if message_type == "punsubscribe": + pattern = response[1] + if pattern in self.pending_unsubscribe_patterns: + self.pending_unsubscribe_patterns.remove(pattern) + self.patterns.pop(pattern, None) + elif message_type == "sunsubscribe": + s_channel = response[1] + if s_channel in self.pending_unsubscribe_shard_channels: + self.pending_unsubscribe_shard_channels.remove(s_channel) + self.shard_channels.pop(s_channel, None) + else: + channel = response[1] + if channel in self.pending_unsubscribe_channels: + self.pending_unsubscribe_channels.remove(channel) + self.channels.pop(channel, None) + if not self.channels and not self.patterns and not self.shard_channels: + # There are no subscriptions anymore, set subscribed_event flag + # to false + self.subscribed_event.clear() + + if message_type in self.PUBLISH_MESSAGE_TYPES: + # if there's a message handler, invoke it + if message_type == "pmessage": + handler = self.patterns.get(message["pattern"], None) + elif message_type == "smessage": + handler = self.shard_channels.get(message["channel"], None) + else: + handler = self.channels.get(message["channel"], None) + if handler: + handler(message) + return None + elif message_type != "pong": + # this is a subscribe/unsubscribe message. ignore if we don't + # want them + if ignore_subscribe_messages or self.ignore_subscribe_messages: + return None + + return message + + def run_in_thread( + self, + sleep_time: float = 0.0, + daemon: bool = False, + exception_handler: Optional[Callable] = None, + ) -> "PubSubWorkerThread": + for channel, handler in self.channels.items(): + if handler is None: + raise PubSubError(f"Channel: '{channel}' has no handler registered") + for pattern, handler in self.patterns.items(): + if handler is None: + raise PubSubError(f"Pattern: '{pattern}' has no handler registered") + for s_channel, handler in self.shard_channels.items(): + if handler is None: + raise PubSubError( + f"Shard Channel: '{s_channel}' has no handler registered" + ) + + thread = PubSubWorkerThread( + self, sleep_time, daemon=daemon, exception_handler=exception_handler + ) + thread.start() + return thread + + +class PubSubWorkerThread(threading.Thread): + def __init__( + self, + pubsub, + sleep_time: float, + daemon: bool = False, + exception_handler: Union[ + Callable[[Exception, "PubSub", "PubSubWorkerThread"], None], None + ] = None, + ): + super().__init__() + self.daemon = daemon + self.pubsub = pubsub + self.sleep_time = sleep_time + self.exception_handler = exception_handler + self._running = threading.Event() + + def run(self) -> None: + if self._running.is_set(): + return + self._running.set() + pubsub = self.pubsub + sleep_time = self.sleep_time + while self._running.is_set(): + try: + pubsub.get_message(ignore_subscribe_messages=True, timeout=sleep_time) + except BaseException as e: + if self.exception_handler is None: + raise + self.exception_handler(e, pubsub, self) + pubsub.close() + + def stop(self) -> None: + # trip the flag so the run loop exits. the run loop will + # close the pubsub connection, which disconnects the socket + # and returns the connection to the pool. + self._running.clear() + + +class Pipeline(Redis): + """ + Pipelines provide a way to transmit multiple commands to the Redis server + in one transmission. This is convenient for batch processing, such as + saving all the values in a list to Redis. + + All commands executed within a pipeline are wrapped with MULTI and EXEC + calls. This guarantees all commands executed in the pipeline will be + executed atomically. + + Any command raising an exception does *not* halt the execution of + subsequent commands in the pipeline. Instead, the exception is caught + and its instance is placed into the response list returned by execute(). + Code iterating over the response list should be able to deal with an + instance of an exception as a potential value. In general, these will be + ResponseError exceptions, such as those raised when issuing a command + on a key of a different datatype. + """ + + UNWATCH_COMMANDS = {"DISCARD", "EXEC", "UNWATCH"} + + def __init__(self, connection_pool, response_callbacks, transaction, shard_hint): + self.connection_pool = connection_pool + self.connection = None + self.response_callbacks = response_callbacks + self.transaction = transaction + self.shard_hint = shard_hint + + self.watching = False + self.reset() + + def __enter__(self) -> "Pipeline": + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.reset() + + def __del__(self): + try: + self.reset() + except Exception: + pass + + def __len__(self) -> int: + return len(self.command_stack) + + def __bool__(self) -> bool: + """Pipeline instances should always evaluate to True""" + return True + + def reset(self) -> None: + self.command_stack = [] + self.scripts = set() + # make sure to reset the connection state in the event that we were + # watching something + if self.watching and self.connection: + try: + # call this manually since our unwatch or + # immediate_execute_command methods can call reset() + self.connection.send_command("UNWATCH") + self.connection.read_response() + except ConnectionError: + # disconnect will also remove any previous WATCHes + self.connection.disconnect() + # clean up the other instance attributes + self.watching = False + self.explicit_transaction = False + # we can safely return the connection to the pool here since we're + # sure we're no longer WATCHing anything + if self.connection: + self.connection_pool.release(self.connection) + self.connection = None + + def close(self) -> None: + """Close the pipeline""" + self.reset() + + def multi(self) -> None: + """ + Start a transactional block of the pipeline after WATCH commands + are issued. End the transactional block with `execute`. + """ + if self.explicit_transaction: + raise RedisError("Cannot issue nested calls to MULTI") + if self.command_stack: + raise RedisError( + "Commands without an initial WATCH have already been issued" + ) + self.explicit_transaction = True + + def execute_command(self, *args, **kwargs): + if (self.watching or args[0] == "WATCH") and not self.explicit_transaction: + return self.immediate_execute_command(*args, **kwargs) + return self.pipeline_execute_command(*args, **kwargs) + + def _disconnect_reset_raise(self, conn, error) -> None: + """ + Close the connection, reset watching state and + raise an exception if we were watching, + if retry_on_error is not set or the error is not one + of the specified error types. + """ + conn.disconnect() + # if we were already watching a variable, the watch is no longer + # valid since this connection has died. raise a WatchError, which + # indicates the user should retry this transaction. + if self.watching: + self.reset() + raise WatchError( + "A ConnectionError occurred on while watching one or more keys" + ) + # if retry_on_error is not set or the error is not one + # of the specified error types, raise it + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + self.reset() + raise + + def immediate_execute_command(self, *args, **options): + """ + Execute a command immediately, but don't auto-retry on a + ConnectionError if we're already WATCHing a variable. Used when + issuing WATCH or subsequent commands retrieving their values but before + MULTI is called. + """ + command_name = args[0] + conn = self.connection + # if this is the first call, we need a connection + if not conn: + conn = self.connection_pool.get_connection(command_name, self.shard_hint) + self.connection = conn + + return conn.retry.call_with_retry( + lambda: self._send_command_parse_response( + conn, command_name, *args, **options + ), + lambda error: self._disconnect_reset_raise(conn, error), + ) + + def pipeline_execute_command(self, *args, **options) -> "Pipeline": + """ + Stage a command to be executed when execute() is next called + + Returns the current Pipeline object back so commands can be + chained together, such as: + + pipe = pipe.set('foo', 'bar').incr('baz').decr('bang') + + At some other point, you can then run: pipe.execute(), + which will execute all commands queued in the pipe. + """ + self.command_stack.append((args, options)) + return self + + def _execute_transaction(self, connection, commands, raise_on_error) -> List: + cmds = chain([(("MULTI",), {})], commands, [(("EXEC",), {})]) + all_cmds = connection.pack_commands( + [args for args, options in cmds if EMPTY_RESPONSE not in options] + ) + connection.send_packed_command(all_cmds) + errors = [] + + # parse off the response for MULTI + # NOTE: we need to handle ResponseErrors here and continue + # so that we read all the additional command messages from + # the socket + try: + self.parse_response(connection, "_") + except ResponseError as e: + errors.append((0, e)) + + # and all the other commands + for i, command in enumerate(commands): + if EMPTY_RESPONSE in command[1]: + errors.append((i, command[1][EMPTY_RESPONSE])) + else: + try: + self.parse_response(connection, "_") + except ResponseError as e: + self.annotate_exception(e, i + 1, command[0]) + errors.append((i, e)) + + # parse the EXEC. + try: + response = self.parse_response(connection, "_") + except ExecAbortError: + if errors: + raise errors[0][1] + raise + + # EXEC clears any watched keys + self.watching = False + + if response is None: + raise WatchError("Watched variable changed.") + + # put any parse errors into the response + for i, e in errors: + response.insert(i, e) + + if len(response) != len(commands): + self.connection.disconnect() + raise ResponseError( + "Wrong number of response items from pipeline execution" + ) + + # find any errors in the response and raise if necessary + if raise_on_error: + self.raise_first_error(commands, response) + + # We have to run response callbacks manually + data = [] + for r, cmd in zip(response, commands): + if not isinstance(r, Exception): + args, options = cmd + command_name = args[0] + if command_name in self.response_callbacks: + r = self.response_callbacks[command_name](r, **options) + data.append(r) + return data + + def _execute_pipeline(self, connection, commands, raise_on_error): + # build up all commands into a single request to increase network perf + all_cmds = connection.pack_commands([args for args, _ in commands]) + connection.send_packed_command(all_cmds) + + response = [] + for args, options in commands: + try: + response.append(self.parse_response(connection, args[0], **options)) + except ResponseError as e: + response.append(e) + + if raise_on_error: + self.raise_first_error(commands, response) + return response + + def raise_first_error(self, commands, response): + for i, r in enumerate(response): + if isinstance(r, ResponseError): + self.annotate_exception(r, i + 1, commands[i][0]) + raise r + + def annotate_exception(self, exception, number, command): + cmd = " ".join(map(safe_str, command)) + msg = ( + f"Command # {number} ({cmd}) of pipeline " + f"caused error: {exception.args[0]}" + ) + exception.args = (msg,) + exception.args[1:] + + def parse_response(self, connection, command_name, **options): + result = Redis.parse_response(self, connection, command_name, **options) + if command_name in self.UNWATCH_COMMANDS: + self.watching = False + elif command_name == "WATCH": + self.watching = True + return result + + def load_scripts(self): + # make sure all scripts that are about to be run on this pipeline exist + scripts = list(self.scripts) + immediate = self.immediate_execute_command + shas = [s.sha for s in scripts] + # we can't use the normal script_* methods because they would just + # get buffered in the pipeline. + exists = immediate("SCRIPT EXISTS", *shas) + if not all(exists): + for s, exist in zip(scripts, exists): + if not exist: + s.sha = immediate("SCRIPT LOAD", s.script) + + def _disconnect_raise_reset( + self, + conn: AbstractConnection, + error: Exception, + ) -> None: + """ + Close the connection, raise an exception if we were watching, + and raise an exception if retry_on_error is not set or the + error is not one of the specified error types. + """ + conn.disconnect() + # if we were watching a variable, the watch is no longer valid + # since this connection has died. raise a WatchError, which + # indicates the user should retry this transaction. + if self.watching: + raise WatchError( + "A ConnectionError occurred on while watching one or more keys" + ) + # if retry_on_error is not set or the error is not one + # of the specified error types, raise it + if ( + conn.retry_on_error is None + or isinstance(error, tuple(conn.retry_on_error)) is False + ): + + self.reset() + raise error + + def execute(self, raise_on_error=True): + """Execute all the commands in the current pipeline""" + stack = self.command_stack + if not stack and not self.watching: + return [] + if self.scripts: + self.load_scripts() + if self.transaction or self.explicit_transaction: + execute = self._execute_transaction + else: + execute = self._execute_pipeline + + conn = self.connection + if not conn: + conn = self.connection_pool.get_connection("MULTI", self.shard_hint) + # assign to self.connection so reset() releases the connection + # back to the pool after we're done + self.connection = conn + + try: + return conn.retry.call_with_retry( + lambda: execute(conn, stack, raise_on_error), + lambda error: self._disconnect_raise_reset(conn, error), + ) + finally: + self.reset() + + def discard(self): + """ + Flushes all previously queued commands + See: https://redis.io/commands/DISCARD + """ + self.execute_command("DISCARD") + + def watch(self, *names): + """Watches the values at keys ``names``""" + if self.explicit_transaction: + raise RedisError("Cannot issue a WATCH after a MULTI") + return self.execute_command("WATCH", *names) + + def unwatch(self) -> bool: + """Unwatches all previously specified keys""" + return self.watching and self.execute_command("UNWATCH") or True diff --git a/templates/skills/spotify/dependencies/redis/cluster.py b/templates/skills/spotify/dependencies/redis/cluster.py new file mode 100644 index 00000000..4dc0e389 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/cluster.py @@ -0,0 +1,2490 @@ +import random +import socket +import sys +import threading +import time +from collections import OrderedDict +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from redis._parsers import CommandsParser, Encoder +from redis._parsers.helpers import parse_scan +from redis.backoff import default_backoff +from redis.client import CaseInsensitiveDict, PubSub, Redis +from redis.commands import READ_COMMANDS, RedisClusterCommands +from redis.commands.helpers import list_or_args +from redis.connection import ConnectionPool, DefaultParser, parse_url +from redis.crc import REDIS_CLUSTER_HASH_SLOTS, key_slot +from redis.exceptions import ( + AskError, + AuthenticationError, + ClusterCrossSlotError, + ClusterDownError, + ClusterError, + ConnectionError, + DataError, + MasterDownError, + MovedError, + RedisClusterException, + RedisError, + ResponseError, + SlotNotCoveredError, + TimeoutError, + TryAgainError, +) +from redis.lock import Lock +from redis.retry import Retry +from redis.utils import ( + HIREDIS_AVAILABLE, + dict_merge, + list_keys_to_dict, + merge_result, + safe_str, + str_if_bytes, +) + + +def get_node_name(host: str, port: Union[str, int]) -> str: + return f"{host}:{port}" + + +def get_connection(redis_node, *args, **options): + return redis_node.connection or redis_node.connection_pool.get_connection( + args[0], **options + ) + + +def parse_scan_result(command, res, **options): + cursors = {} + ret = [] + for node_name, response in res.items(): + cursor, r = parse_scan(response, **options) + cursors[node_name] = cursor + ret += r + + return cursors, ret + + +def parse_pubsub_numsub(command, res, **options): + numsub_d = OrderedDict() + for numsub_tups in res.values(): + for channel, numsubbed in numsub_tups: + try: + numsub_d[channel] += numsubbed + except KeyError: + numsub_d[channel] = numsubbed + + ret_numsub = [(channel, numsub) for channel, numsub in numsub_d.items()] + return ret_numsub + + +def parse_cluster_slots( + resp: Any, **options: Any +) -> Dict[Tuple[int, int], Dict[str, Any]]: + current_host = options.get("current_host", "") + + def fix_server(*args: Any) -> Tuple[str, Any]: + return str_if_bytes(args[0]) or current_host, args[1] + + slots = {} + for slot in resp: + start, end, primary = slot[:3] + replicas = slot[3:] + slots[start, end] = { + "primary": fix_server(*primary), + "replicas": [fix_server(*replica) for replica in replicas], + } + + return slots + + +def parse_cluster_shards(resp, **options): + """ + Parse CLUSTER SHARDS response. + """ + if isinstance(resp[0], dict): + return resp + shards = [] + for x in resp: + shard = {"slots": [], "nodes": []} + for i in range(0, len(x[1]), 2): + shard["slots"].append((x[1][i], (x[1][i + 1]))) + nodes = x[3] + for node in nodes: + dict_node = {} + for i in range(0, len(node), 2): + dict_node[node[i]] = node[i + 1] + shard["nodes"].append(dict_node) + shards.append(shard) + + return shards + + +def parse_cluster_myshardid(resp, **options): + """ + Parse CLUSTER MYSHARDID response. + """ + return resp.decode("utf-8") + + +PRIMARY = "primary" +REPLICA = "replica" +SLOT_ID = "slot-id" + +REDIS_ALLOWED_KEYS = ( + "charset", + "connection_class", + "connection_pool", + "connection_pool_class", + "client_name", + "credential_provider", + "db", + "decode_responses", + "encoding", + "encoding_errors", + "errors", + "host", + "lib_name", + "lib_version", + "max_connections", + "nodes_flag", + "redis_connect_func", + "password", + "port", + "queue_class", + "retry", + "retry_on_timeout", + "protocol", + "socket_connect_timeout", + "socket_keepalive", + "socket_keepalive_options", + "socket_timeout", + "ssl", + "ssl_ca_certs", + "ssl_ca_data", + "ssl_certfile", + "ssl_cert_reqs", + "ssl_keyfile", + "ssl_password", + "unix_socket_path", + "username", +) +KWARGS_DISABLED_KEYS = ("host", "port") + + +def cleanup_kwargs(**kwargs): + """ + Remove unsupported or disabled keys from kwargs + """ + connection_kwargs = { + k: v + for k, v in kwargs.items() + if k in REDIS_ALLOWED_KEYS and k not in KWARGS_DISABLED_KEYS + } + + return connection_kwargs + + +class ClusterParser(DefaultParser): + EXCEPTION_CLASSES = dict_merge( + DefaultParser.EXCEPTION_CLASSES, + { + "ASK": AskError, + "TRYAGAIN": TryAgainError, + "MOVED": MovedError, + "CLUSTERDOWN": ClusterDownError, + "CROSSSLOT": ClusterCrossSlotError, + "MASTERDOWN": MasterDownError, + }, + ) + + +class AbstractRedisCluster: + RedisClusterRequestTTL = 16 + + PRIMARIES = "primaries" + REPLICAS = "replicas" + ALL_NODES = "all" + RANDOM = "random" + DEFAULT_NODE = "default-node" + + NODE_FLAGS = {PRIMARIES, REPLICAS, ALL_NODES, RANDOM, DEFAULT_NODE} + + COMMAND_FLAGS = dict_merge( + list_keys_to_dict( + [ + "ACL CAT", + "ACL DELUSER", + "ACL DRYRUN", + "ACL GENPASS", + "ACL GETUSER", + "ACL HELP", + "ACL LIST", + "ACL LOG", + "ACL LOAD", + "ACL SAVE", + "ACL SETUSER", + "ACL USERS", + "ACL WHOAMI", + "AUTH", + "CLIENT LIST", + "CLIENT SETINFO", + "CLIENT SETNAME", + "CLIENT GETNAME", + "CONFIG SET", + "CONFIG REWRITE", + "CONFIG RESETSTAT", + "TIME", + "PUBSUB CHANNELS", + "PUBSUB NUMPAT", + "PUBSUB NUMSUB", + "PUBSUB SHARDCHANNELS", + "PUBSUB SHARDNUMSUB", + "PING", + "INFO", + "SHUTDOWN", + "KEYS", + "DBSIZE", + "BGSAVE", + "SLOWLOG GET", + "SLOWLOG LEN", + "SLOWLOG RESET", + "WAIT", + "WAITAOF", + "SAVE", + "MEMORY PURGE", + "MEMORY MALLOC-STATS", + "MEMORY STATS", + "LASTSAVE", + "CLIENT TRACKINGINFO", + "CLIENT PAUSE", + "CLIENT UNPAUSE", + "CLIENT UNBLOCK", + "CLIENT ID", + "CLIENT REPLY", + "CLIENT GETREDIR", + "CLIENT INFO", + "CLIENT KILL", + "READONLY", + "CLUSTER INFO", + "CLUSTER MEET", + "CLUSTER MYSHARDID", + "CLUSTER NODES", + "CLUSTER REPLICAS", + "CLUSTER RESET", + "CLUSTER SET-CONFIG-EPOCH", + "CLUSTER SLOTS", + "CLUSTER SHARDS", + "CLUSTER COUNT-FAILURE-REPORTS", + "CLUSTER KEYSLOT", + "COMMAND", + "COMMAND COUNT", + "COMMAND LIST", + "COMMAND GETKEYS", + "CONFIG GET", + "DEBUG", + "RANDOMKEY", + "READONLY", + "READWRITE", + "TIME", + "TFUNCTION LOAD", + "TFUNCTION DELETE", + "TFUNCTION LIST", + "TFCALL", + "TFCALLASYNC", + "GRAPH.CONFIG", + "LATENCY HISTORY", + "LATENCY LATEST", + "LATENCY RESET", + "MODULE LIST", + "MODULE LOAD", + "MODULE UNLOAD", + "MODULE LOADEX", + ], + DEFAULT_NODE, + ), + list_keys_to_dict( + [ + "FLUSHALL", + "FLUSHDB", + "FUNCTION DELETE", + "FUNCTION FLUSH", + "FUNCTION LIST", + "FUNCTION LOAD", + "FUNCTION RESTORE", + "REDISGEARS_2.REFRESHCLUSTER", + "SCAN", + "SCRIPT EXISTS", + "SCRIPT FLUSH", + "SCRIPT LOAD", + ], + PRIMARIES, + ), + list_keys_to_dict(["FUNCTION DUMP"], RANDOM), + list_keys_to_dict( + [ + "CLUSTER COUNTKEYSINSLOT", + "CLUSTER DELSLOTS", + "CLUSTER DELSLOTSRANGE", + "CLUSTER GETKEYSINSLOT", + "CLUSTER SETSLOT", + ], + SLOT_ID, + ), + ) + + SEARCH_COMMANDS = ( + [ + "FT.CREATE", + "FT.SEARCH", + "FT.AGGREGATE", + "FT.EXPLAIN", + "FT.EXPLAINCLI", + "FT,PROFILE", + "FT.ALTER", + "FT.DROPINDEX", + "FT.ALIASADD", + "FT.ALIASUPDATE", + "FT.ALIASDEL", + "FT.TAGVALS", + "FT.SUGADD", + "FT.SUGGET", + "FT.SUGDEL", + "FT.SUGLEN", + "FT.SYNUPDATE", + "FT.SYNDUMP", + "FT.SPELLCHECK", + "FT.DICTADD", + "FT.DICTDEL", + "FT.DICTDUMP", + "FT.INFO", + "FT._LIST", + "FT.CONFIG", + "FT.ADD", + "FT.DEL", + "FT.DROP", + "FT.GET", + "FT.MGET", + "FT.SYNADD", + ], + ) + + CLUSTER_COMMANDS_RESPONSE_CALLBACKS = { + "CLUSTER SLOTS": parse_cluster_slots, + "CLUSTER SHARDS": parse_cluster_shards, + "CLUSTER MYSHARDID": parse_cluster_myshardid, + } + + RESULT_CALLBACKS = dict_merge( + list_keys_to_dict(["PUBSUB NUMSUB", "PUBSUB SHARDNUMSUB"], parse_pubsub_numsub), + list_keys_to_dict( + ["PUBSUB NUMPAT"], lambda command, res: sum(list(res.values())) + ), + list_keys_to_dict( + ["KEYS", "PUBSUB CHANNELS", "PUBSUB SHARDCHANNELS"], merge_result + ), + list_keys_to_dict( + [ + "PING", + "CONFIG SET", + "CONFIG REWRITE", + "CONFIG RESETSTAT", + "CLIENT SETNAME", + "BGSAVE", + "SLOWLOG RESET", + "SAVE", + "MEMORY PURGE", + "CLIENT PAUSE", + "CLIENT UNPAUSE", + ], + lambda command, res: all(res.values()) if isinstance(res, dict) else res, + ), + list_keys_to_dict( + ["DBSIZE", "WAIT"], + lambda command, res: sum(res.values()) if isinstance(res, dict) else res, + ), + list_keys_to_dict( + ["CLIENT UNBLOCK"], lambda command, res: 1 if sum(res.values()) > 0 else 0 + ), + list_keys_to_dict(["SCAN"], parse_scan_result), + list_keys_to_dict( + ["SCRIPT LOAD"], lambda command, res: list(res.values()).pop() + ), + list_keys_to_dict( + ["SCRIPT EXISTS"], lambda command, res: [all(k) for k in zip(*res.values())] + ), + list_keys_to_dict(["SCRIPT FLUSH"], lambda command, res: all(res.values())), + ) + + ERRORS_ALLOW_RETRY = (ConnectionError, TimeoutError, ClusterDownError) + + def replace_default_node(self, target_node: "ClusterNode" = None) -> None: + """Replace the default cluster node. + A random cluster node will be chosen if target_node isn't passed, and primaries + will be prioritized. The default node will not be changed if there are no other + nodes in the cluster. + + Args: + target_node (ClusterNode, optional): Target node to replace the default + node. Defaults to None. + """ + if target_node: + self.nodes_manager.default_node = target_node + else: + curr_node = self.get_default_node() + primaries = [node for node in self.get_primaries() if node != curr_node] + if primaries: + # Choose a primary if the cluster contains different primaries + self.nodes_manager.default_node = random.choice(primaries) + else: + # Otherwise, hoose a primary if the cluster contains different primaries + replicas = [node for node in self.get_replicas() if node != curr_node] + if replicas: + self.nodes_manager.default_node = random.choice(replicas) + + +class RedisCluster(AbstractRedisCluster, RedisClusterCommands): + @classmethod + def from_url(cls, url, **kwargs): + """ + Return a Redis client object configured from the given URL + + For example:: + + redis://[[username]:[password]]@localhost:6379/0 + rediss://[[username]:[password]]@localhost:6379/0 + unix://[username@]/path/to/socket.sock?db=0[&password=password] + + Three URL schemes are supported: + + - `redis://` creates a TCP socket connection. See more at: + + - `rediss://` creates a SSL wrapped TCP socket connection. See more at: + + - ``unix://``: creates a Unix Domain Socket connection. + + The username, password, hostname, path and all querystring values + are passed through urllib.parse.unquote in order to replace any + percent-encoded values with their corresponding characters. + + There are several ways to specify a database number. The first value + found will be used: + + 1. A ``db`` querystring option, e.g. redis://localhost?db=0 + 2. If using the redis:// or rediss:// schemes, the path argument + of the url, e.g. redis://localhost/0 + 3. A ``db`` keyword argument to this function. + + If none of these options are specified, the default db=0 is used. + + All querystring options are cast to their appropriate Python types. + Boolean arguments can be specified with string values "True"/"False" + or "Yes"/"No". Values that cannot be properly cast cause a + ``ValueError`` to be raised. Once parsed, the querystring arguments + and keyword arguments are passed to the ``ConnectionPool``'s + class initializer. In the case of conflicting arguments, querystring + arguments always win. + + """ + return cls(url=url, **kwargs) + + def __init__( + self, + host: Optional[str] = None, + port: int = 6379, + startup_nodes: Optional[List["ClusterNode"]] = None, + cluster_error_retry_attempts: int = 3, + retry: Optional["Retry"] = None, + require_full_coverage: bool = False, + reinitialize_steps: int = 5, + read_from_replicas: bool = False, + dynamic_startup_nodes: bool = True, + url: Optional[str] = None, + address_remap: Optional[Callable[[str, int], Tuple[str, int]]] = None, + **kwargs, + ): + """ + Initialize a new RedisCluster client. + + :param startup_nodes: + List of nodes from which initial bootstrapping can be done + :param host: + Can be used to point to a startup node + :param port: + Can be used to point to a startup node + :param require_full_coverage: + When set to False (default value): the client will not require a + full coverage of the slots. However, if not all slots are covered, + and at least one node has 'cluster-require-full-coverage' set to + 'yes,' the server will throw a ClusterDownError for some key-based + commands. See - + https://redis.io/topics/cluster-tutorial#redis-cluster-configuration-parameters + When set to True: all slots must be covered to construct the + cluster client. If not all slots are covered, RedisClusterException + will be thrown. + :param read_from_replicas: + Enable read from replicas in READONLY mode. You can read possibly + stale data. + When set to true, read commands will be assigned between the + primary and its replications in a Round-Robin manner. + :param dynamic_startup_nodes: + Set the RedisCluster's startup nodes to all of the discovered nodes. + If true (default value), the cluster's discovered nodes will be used to + determine the cluster nodes-slots mapping in the next topology refresh. + It will remove the initial passed startup nodes if their endpoints aren't + listed in the CLUSTER SLOTS output. + If you use dynamic DNS endpoints for startup nodes but CLUSTER SLOTS lists + specific IP addresses, it is best to set it to false. + :param cluster_error_retry_attempts: + Number of times to retry before raising an error when + :class:`~.TimeoutError` or :class:`~.ConnectionError` or + :class:`~.ClusterDownError` are encountered + :param reinitialize_steps: + Specifies the number of MOVED errors that need to occur before + reinitializing the whole cluster topology. If a MOVED error occurs + and the cluster does not need to be reinitialized on this current + error handling, only the MOVED slot will be patched with the + redirected node. + To reinitialize the cluster on every MOVED error, set + reinitialize_steps to 1. + To avoid reinitializing the cluster on moved errors, set + reinitialize_steps to 0. + :param address_remap: + An optional callable which, when provided with an internal network + address of a node, e.g. a `(host, port)` tuple, will return the address + where the node is reachable. This can be used to map the addresses at + which the nodes _think_ they are, to addresses at which a client may + reach them, such as when they sit behind a proxy. + + :**kwargs: + Extra arguments that will be sent into Redis instance when created + (See Official redis-py doc for supported kwargs + [https://github.com/andymccurdy/redis-py/blob/master/redis/client.py]) + Some kwargs are not supported and will raise a + RedisClusterException: + - db (Redis do not support database SELECT in cluster mode) + """ + if startup_nodes is None: + startup_nodes = [] + + if "db" in kwargs: + # Argument 'db' is not possible to use in cluster mode + raise RedisClusterException( + "Argument 'db' is not possible to use in cluster mode" + ) + + # Get the startup node/s + from_url = False + if url is not None: + from_url = True + url_options = parse_url(url) + if "path" in url_options: + raise RedisClusterException( + "RedisCluster does not currently support Unix Domain " + "Socket connections" + ) + if "db" in url_options and url_options["db"] != 0: + # Argument 'db' is not possible to use in cluster mode + raise RedisClusterException( + "A ``db`` querystring option can only be 0 in cluster mode" + ) + kwargs.update(url_options) + host = kwargs.get("host") + port = kwargs.get("port", port) + startup_nodes.append(ClusterNode(host, port)) + elif host is not None and port is not None: + startup_nodes.append(ClusterNode(host, port)) + elif len(startup_nodes) == 0: + # No startup node was provided + raise RedisClusterException( + "RedisCluster requires at least one node to discover the " + "cluster. Please provide one of the followings:\n" + "1. host and port, for example:\n" + " RedisCluster(host='localhost', port=6379)\n" + "2. list of startup nodes, for example:\n" + " RedisCluster(startup_nodes=[ClusterNode('localhost', 6379)," + " ClusterNode('localhost', 6378)])" + ) + # Update the connection arguments + # Whenever a new connection is established, RedisCluster's on_connect + # method should be run + # If the user passed on_connect function we'll save it and run it + # inside the RedisCluster.on_connect() function + self.user_on_connect_func = kwargs.pop("redis_connect_func", None) + kwargs.update({"redis_connect_func": self.on_connect}) + kwargs = cleanup_kwargs(**kwargs) + if retry: + self.retry = retry + kwargs.update({"retry": self.retry}) + else: + kwargs.update({"retry": Retry(default_backoff(), 0)}) + + self.encoder = Encoder( + kwargs.get("encoding", "utf-8"), + kwargs.get("encoding_errors", "strict"), + kwargs.get("decode_responses", False), + ) + self.cluster_error_retry_attempts = cluster_error_retry_attempts + self.command_flags = self.__class__.COMMAND_FLAGS.copy() + self.node_flags = self.__class__.NODE_FLAGS.copy() + self.read_from_replicas = read_from_replicas + self.reinitialize_counter = 0 + self.reinitialize_steps = reinitialize_steps + self.nodes_manager = NodesManager( + startup_nodes=startup_nodes, + from_url=from_url, + require_full_coverage=require_full_coverage, + dynamic_startup_nodes=dynamic_startup_nodes, + address_remap=address_remap, + **kwargs, + ) + + self.cluster_response_callbacks = CaseInsensitiveDict( + self.__class__.CLUSTER_COMMANDS_RESPONSE_CALLBACKS + ) + self.result_callbacks = CaseInsensitiveDict(self.__class__.RESULT_CALLBACKS) + self.commands_parser = CommandsParser(self) + self._lock = threading.Lock() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def __del__(self): + self.close() + + def disconnect_connection_pools(self): + for node in self.get_nodes(): + if node.redis_connection: + try: + node.redis_connection.connection_pool.disconnect() + except OSError: + # Client was already disconnected. do nothing + pass + + def on_connect(self, connection): + """ + Initialize the connection, authenticate and select a database and send + READONLY if it is set during object initialization. + """ + connection.set_parser(ClusterParser) + connection.on_connect() + + if self.read_from_replicas: + # Sending READONLY command to server to configure connection as + # readonly. Since each cluster node may change its server type due + # to a failover, we should establish a READONLY connection + # regardless of the server type. If this is a primary connection, + # READONLY would not affect executing write commands. + connection.send_command("READONLY") + if str_if_bytes(connection.read_response()) != "OK": + raise ConnectionError("READONLY command failed") + + if self.user_on_connect_func is not None: + self.user_on_connect_func(connection) + + def get_redis_connection(self, node): + if not node.redis_connection: + with self._lock: + if not node.redis_connection: + self.nodes_manager.create_redis_connections([node]) + return node.redis_connection + + def get_node(self, host=None, port=None, node_name=None): + return self.nodes_manager.get_node(host, port, node_name) + + def get_primaries(self): + return self.nodes_manager.get_nodes_by_server_type(PRIMARY) + + def get_replicas(self): + return self.nodes_manager.get_nodes_by_server_type(REPLICA) + + def get_random_node(self): + return random.choice(list(self.nodes_manager.nodes_cache.values())) + + def get_nodes(self): + return list(self.nodes_manager.nodes_cache.values()) + + def get_node_from_key(self, key, replica=False): + """ + Get the node that holds the key's slot. + If replica set to True but the slot doesn't have any replicas, None is + returned. + """ + slot = self.keyslot(key) + slot_cache = self.nodes_manager.slots_cache.get(slot) + if slot_cache is None or len(slot_cache) == 0: + raise SlotNotCoveredError(f'Slot "{slot}" is not covered by the cluster.') + if replica and len(self.nodes_manager.slots_cache[slot]) < 2: + return None + elif replica: + node_idx = 1 + else: + # primary + node_idx = 0 + + return slot_cache[node_idx] + + def get_default_node(self): + """ + Get the cluster's default node + """ + return self.nodes_manager.default_node + + def set_default_node(self, node): + """ + Set the default node of the cluster. + :param node: 'ClusterNode' + :return True if the default node was set, else False + """ + if node is None or self.get_node(node_name=node.name) is None: + return False + self.nodes_manager.default_node = node + return True + + def get_retry(self) -> Optional["Retry"]: + return self.retry + + def set_retry(self, retry: "Retry") -> None: + self.retry = retry + for node in self.get_nodes(): + node.redis_connection.set_retry(retry) + + def monitor(self, target_node=None): + """ + Returns a Monitor object for the specified target node. + The default cluster node will be selected if no target node was + specified. + Monitor is useful for handling the MONITOR command to the redis server. + next_command() method returns one command from monitor + listen() method yields commands from monitor. + """ + if target_node is None: + target_node = self.get_default_node() + if target_node.redis_connection is None: + raise RedisClusterException( + f"Cluster Node {target_node.name} has no redis_connection" + ) + return target_node.redis_connection.monitor() + + def pubsub(self, node=None, host=None, port=None, **kwargs): + """ + Allows passing a ClusterNode, or host&port, to get a pubsub instance + connected to the specified node + """ + return ClusterPubSub(self, node=node, host=host, port=port, **kwargs) + + def pipeline(self, transaction=None, shard_hint=None): + """ + Cluster impl: + Pipelines do not work in cluster mode the same way they + do in normal mode. Create a clone of this object so + that simulating pipelines will work correctly. Each + command will be called directly when used and + when calling execute() will only return the result stack. + """ + if shard_hint: + raise RedisClusterException("shard_hint is deprecated in cluster mode") + + if transaction: + raise RedisClusterException("transaction is deprecated in cluster mode") + + return ClusterPipeline( + nodes_manager=self.nodes_manager, + commands_parser=self.commands_parser, + startup_nodes=self.nodes_manager.startup_nodes, + result_callbacks=self.result_callbacks, + cluster_response_callbacks=self.cluster_response_callbacks, + cluster_error_retry_attempts=self.cluster_error_retry_attempts, + read_from_replicas=self.read_from_replicas, + reinitialize_steps=self.reinitialize_steps, + lock=self._lock, + ) + + def lock( + self, + name, + timeout=None, + sleep=0.1, + blocking=True, + blocking_timeout=None, + lock_class=None, + thread_local=True, + ): + """ + Return a new Lock object using key ``name`` that mimics + the behavior of threading.Lock. + + If specified, ``timeout`` indicates a maximum life for the lock. + By default, it will remain locked until release() is called. + + ``sleep`` indicates the amount of time to sleep per loop iteration + when the lock is in blocking mode and another client is currently + holding the lock. + + ``blocking`` indicates whether calling ``acquire`` should block until + the lock has been acquired or to fail immediately, causing ``acquire`` + to return False and the lock not being acquired. Defaults to True. + Note this value can be overridden by passing a ``blocking`` + argument to ``acquire``. + + ``blocking_timeout`` indicates the maximum amount of time in seconds to + spend trying to acquire the lock. A value of ``None`` indicates + continue trying forever. ``blocking_timeout`` can be specified as a + float or integer, both representing the number of seconds to wait. + + ``lock_class`` forces the specified lock implementation. Note that as + of redis-py 3.0, the only lock class we implement is ``Lock`` (which is + a Lua-based lock). So, it's unlikely you'll need this parameter, unless + you have created your own custom lock class. + + ``thread_local`` indicates whether the lock token is placed in + thread-local storage. By default, the token is placed in thread local + storage so that a thread only sees its token, not a token set by + another thread. Consider the following timeline: + + time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds. + thread-1 sets the token to "abc" + time: 1, thread-2 blocks trying to acquire `my-lock` using the + Lock instance. + time: 5, thread-1 has not yet completed. redis expires the lock + key. + time: 5, thread-2 acquired `my-lock` now that it's available. + thread-2 sets the token to "xyz" + time: 6, thread-1 finishes its work and calls release(). if the + token is *not* stored in thread local storage, then + thread-1 would see the token value as "xyz" and would be + able to successfully release the thread-2's lock. + + In some use cases it's necessary to disable thread local storage. For + example, if you have code where one thread acquires a lock and passes + that lock instance to a worker thread to release later. If thread + local storage isn't disabled in this case, the worker thread won't see + the token set by the thread that acquired the lock. Our assumption + is that these cases aren't common and as such default to using + thread local storage.""" + if lock_class is None: + lock_class = Lock + return lock_class( + self, + name, + timeout=timeout, + sleep=sleep, + blocking=blocking, + blocking_timeout=blocking_timeout, + thread_local=thread_local, + ) + + def set_response_callback(self, command, callback): + """Set a custom Response Callback""" + self.cluster_response_callbacks[command] = callback + + def _determine_nodes(self, *args, **kwargs) -> List["ClusterNode"]: + # Determine which nodes should be executed the command on. + # Returns a list of target nodes. + command = args[0].upper() + if len(args) >= 2 and f"{args[0]} {args[1]}".upper() in self.command_flags: + command = f"{args[0]} {args[1]}".upper() + + nodes_flag = kwargs.pop("nodes_flag", None) + if nodes_flag is not None: + # nodes flag passed by the user + command_flag = nodes_flag + else: + # get the nodes group for this command if it was predefined + command_flag = self.command_flags.get(command) + if command_flag == self.__class__.RANDOM: + # return a random node + return [self.get_random_node()] + elif command_flag == self.__class__.PRIMARIES: + # return all primaries + return self.get_primaries() + elif command_flag == self.__class__.REPLICAS: + # return all replicas + return self.get_replicas() + elif command_flag == self.__class__.ALL_NODES: + # return all nodes + return self.get_nodes() + elif command_flag == self.__class__.DEFAULT_NODE: + # return the cluster's default node + return [self.nodes_manager.default_node] + elif command in self.__class__.SEARCH_COMMANDS[0]: + return [self.nodes_manager.default_node] + else: + # get the node that holds the key's slot + slot = self.determine_slot(*args) + node = self.nodes_manager.get_node_from_slot( + slot, self.read_from_replicas and command in READ_COMMANDS + ) + return [node] + + def _should_reinitialized(self): + # To reinitialize the cluster on every MOVED error, + # set reinitialize_steps to 1. + # To avoid reinitializing the cluster on moved errors, set + # reinitialize_steps to 0. + if self.reinitialize_steps == 0: + return False + else: + return self.reinitialize_counter % self.reinitialize_steps == 0 + + def keyslot(self, key): + """ + Calculate keyslot for a given key. + See Keys distribution model in https://redis.io/topics/cluster-spec + """ + k = self.encoder.encode(key) + return key_slot(k) + + def _get_command_keys(self, *args): + """ + Get the keys in the command. If the command has no keys in in, None is + returned. + + NOTE: Due to a bug in redis<7.0, this function does not work properly + for EVAL or EVALSHA when the `numkeys` arg is 0. + - issue: https://github.com/redis/redis/issues/9493 + - fix: https://github.com/redis/redis/pull/9733 + + So, don't use this function with EVAL or EVALSHA. + """ + redis_conn = self.get_default_node().redis_connection + return self.commands_parser.get_keys(redis_conn, *args) + + def determine_slot(self, *args): + """ + Figure out what slot to use based on args. + + Raises a RedisClusterException if there's a missing key and we can't + determine what slots to map the command to; or, if the keys don't + all map to the same key slot. + """ + command = args[0] + if self.command_flags.get(command) == SLOT_ID: + # The command contains the slot ID + return args[1] + + # Get the keys in the command + + # EVAL and EVALSHA are common enough that it's wasteful to go to the + # redis server to parse the keys. Besides, there is a bug in redis<7.0 + # where `self._get_command_keys()` fails anyway. So, we special case + # EVAL/EVALSHA. + if command.upper() in ("EVAL", "EVALSHA"): + # command syntax: EVAL "script body" num_keys ... + if len(args) <= 2: + raise RedisClusterException(f"Invalid args in command: {args}") + num_actual_keys = int(args[2]) + eval_keys = args[3 : 3 + num_actual_keys] + # if there are 0 keys, that means the script can be run on any node + # so we can just return a random slot + if len(eval_keys) == 0: + return random.randrange(0, REDIS_CLUSTER_HASH_SLOTS) + keys = eval_keys + else: + keys = self._get_command_keys(*args) + if keys is None or len(keys) == 0: + # FCALL can call a function with 0 keys, that means the function + # can be run on any node so we can just return a random slot + if command.upper() in ("FCALL", "FCALL_RO"): + return random.randrange(0, REDIS_CLUSTER_HASH_SLOTS) + raise RedisClusterException( + "No way to dispatch this command to Redis Cluster. " + "Missing key.\nYou can execute the command by specifying " + f"target nodes.\nCommand: {args}" + ) + + # single key command + if len(keys) == 1: + return self.keyslot(keys[0]) + + # multi-key command; we need to make sure all keys are mapped to + # the same slot + slots = {self.keyslot(key) for key in keys} + if len(slots) != 1: + raise RedisClusterException( + f"{command} - all keys must map to the same key slot" + ) + + return slots.pop() + + def get_encoder(self): + """ + Get the connections' encoder + """ + return self.encoder + + def get_connection_kwargs(self): + """ + Get the connections' key-word arguments + """ + return self.nodes_manager.connection_kwargs + + def _is_nodes_flag(self, target_nodes): + return isinstance(target_nodes, str) and target_nodes in self.node_flags + + def _parse_target_nodes(self, target_nodes): + if isinstance(target_nodes, list): + nodes = target_nodes + elif isinstance(target_nodes, ClusterNode): + # Supports passing a single ClusterNode as a variable + nodes = [target_nodes] + elif isinstance(target_nodes, dict): + # Supports dictionaries of the format {node_name: node}. + # It enables to execute commands with multi nodes as follows: + # rc.cluster_save_config(rc.get_primaries()) + nodes = target_nodes.values() + else: + raise TypeError( + "target_nodes type can be one of the following: " + "node_flag (PRIMARIES, REPLICAS, RANDOM, ALL_NODES)," + "ClusterNode, list, or dict. " + f"The passed type is {type(target_nodes)}" + ) + return nodes + + def execute_command(self, *args, **kwargs): + """ + Wrapper for ERRORS_ALLOW_RETRY error handling. + + It will try the number of times specified by the config option + "self.cluster_error_retry_attempts" which defaults to 3 unless manually + configured. + + If it reaches the number of times, the command will raise the exception + + Key argument :target_nodes: can be passed with the following types: + nodes_flag: PRIMARIES, REPLICAS, ALL_NODES, RANDOM + ClusterNode + list + dict + """ + target_nodes_specified = False + is_default_node = False + target_nodes = None + passed_targets = kwargs.pop("target_nodes", None) + if passed_targets is not None and not self._is_nodes_flag(passed_targets): + target_nodes = self._parse_target_nodes(passed_targets) + target_nodes_specified = True + # If an error that allows retrying was thrown, the nodes and slots + # cache were reinitialized. We will retry executing the command with + # the updated cluster setup only when the target nodes can be + # determined again with the new cache tables. Therefore, when target + # nodes were passed to this function, we cannot retry the command + # execution since the nodes may not be valid anymore after the tables + # were reinitialized. So in case of passed target nodes, + # retry_attempts will be set to 0. + retry_attempts = ( + 0 if target_nodes_specified else self.cluster_error_retry_attempts + ) + # Add one for the first execution + execute_attempts = 1 + retry_attempts + for _ in range(execute_attempts): + try: + res = {} + if not target_nodes_specified: + # Determine the nodes to execute the command on + target_nodes = self._determine_nodes( + *args, **kwargs, nodes_flag=passed_targets + ) + if not target_nodes: + raise RedisClusterException( + f"No targets were found to execute {args} command on" + ) + if ( + len(target_nodes) == 1 + and target_nodes[0] == self.get_default_node() + ): + is_default_node = True + for node in target_nodes: + res[node.name] = self._execute_command(node, *args, **kwargs) + # Return the processed result + return self._process_result(args[0], res, **kwargs) + except Exception as e: + if retry_attempts > 0 and type(e) in self.__class__.ERRORS_ALLOW_RETRY: + if is_default_node: + # Replace the default cluster node + self.replace_default_node() + # The nodes and slots cache were reinitialized. + # Try again with the new cluster setup. + retry_attempts -= 1 + continue + else: + # raise the exception + raise e + + def _execute_command(self, target_node, *args, **kwargs): + """ + Send a command to a node in the cluster + """ + command = args[0] + redis_node = None + connection = None + redirect_addr = None + asking = False + moved = False + ttl = int(self.RedisClusterRequestTTL) + + while ttl > 0: + ttl -= 1 + try: + if asking: + target_node = self.get_node(node_name=redirect_addr) + elif moved: + # MOVED occurred and the slots cache was updated, + # refresh the target node + slot = self.determine_slot(*args) + target_node = self.nodes_manager.get_node_from_slot( + slot, self.read_from_replicas and command in READ_COMMANDS + ) + moved = False + + redis_node = self.get_redis_connection(target_node) + connection = get_connection(redis_node, *args, **kwargs) + if asking: + connection.send_command("ASKING") + redis_node.parse_response(connection, "ASKING", **kwargs) + asking = False + + connection.send_command(*args) + response = redis_node.parse_response(connection, command, **kwargs) + if command in self.cluster_response_callbacks: + response = self.cluster_response_callbacks[command]( + response, **kwargs + ) + return response + except AuthenticationError: + raise + except (ConnectionError, TimeoutError) as e: + # Connection retries are being handled in the node's + # Retry object. + # ConnectionError can also be raised if we couldn't get a + # connection from the pool before timing out, so check that + # this is an actual connection before attempting to disconnect. + if connection is not None: + connection.disconnect() + + # Remove the failed node from the startup nodes before we try + # to reinitialize the cluster + self.nodes_manager.startup_nodes.pop(target_node.name, None) + # Reset the cluster node's connection + target_node.redis_connection = None + self.nodes_manager.initialize() + raise e + except MovedError as e: + # First, we will try to patch the slots/nodes cache with the + # redirected node output and try again. If MovedError exceeds + # 'reinitialize_steps' number of times, we will force + # reinitializing the tables, and then try again. + # 'reinitialize_steps' counter will increase faster when + # the same client object is shared between multiple threads. To + # reduce the frequency you can set this variable in the + # RedisCluster constructor. + self.reinitialize_counter += 1 + if self._should_reinitialized(): + self.nodes_manager.initialize() + # Reset the counter + self.reinitialize_counter = 0 + else: + self.nodes_manager.update_moved_exception(e) + moved = True + except TryAgainError: + if ttl < self.RedisClusterRequestTTL / 2: + time.sleep(0.05) + except AskError as e: + redirect_addr = get_node_name(host=e.host, port=e.port) + asking = True + except ClusterDownError as e: + # ClusterDownError can occur during a failover and to get + # self-healed, we will try to reinitialize the cluster layout + # and retry executing the command + time.sleep(0.25) + self.nodes_manager.initialize() + raise e + except ResponseError: + raise + except Exception as e: + if connection: + connection.disconnect() + raise e + finally: + if connection is not None: + redis_node.connection_pool.release(connection) + + raise ClusterError("TTL exhausted.") + + def close(self): + try: + with self._lock: + if self.nodes_manager: + self.nodes_manager.close() + except AttributeError: + # RedisCluster's __init__ can fail before nodes_manager is set + pass + + def _process_result(self, command, res, **kwargs): + """ + Process the result of the executed command. + The function would return a dict or a single value. + + :type command: str + :type res: dict + + `res` should be in the following format: + Dict + """ + if command in self.result_callbacks: + return self.result_callbacks[command](command, res, **kwargs) + elif len(res) == 1: + # When we execute the command on a single node, we can + # remove the dictionary and return a single response + return list(res.values())[0] + else: + return res + + def load_external_module(self, funcname, func): + """ + This function can be used to add externally defined redis modules, + and their namespaces to the redis client. + + ``funcname`` - A string containing the name of the function to create + ``func`` - The function, being added to this class. + """ + setattr(self, funcname, func) + + +class ClusterNode: + def __init__(self, host, port, server_type=None, redis_connection=None): + if host == "localhost": + host = socket.gethostbyname(host) + + self.host = host + self.port = port + self.name = get_node_name(host, port) + self.server_type = server_type + self.redis_connection = redis_connection + + def __repr__(self): + return ( + f"[host={self.host}," + f"port={self.port}," + f"name={self.name}," + f"server_type={self.server_type}," + f"redis_connection={self.redis_connection}]" + ) + + def __eq__(self, obj): + return isinstance(obj, ClusterNode) and obj.name == self.name + + def __del__(self): + if self.redis_connection is not None: + self.redis_connection.close() + + +class LoadBalancer: + """ + Round-Robin Load Balancing + """ + + def __init__(self, start_index: int = 0) -> None: + self.primary_to_idx = {} + self.start_index = start_index + + def get_server_index(self, primary: str, list_size: int) -> int: + server_index = self.primary_to_idx.setdefault(primary, self.start_index) + # Update the index + self.primary_to_idx[primary] = (server_index + 1) % list_size + return server_index + + def reset(self) -> None: + self.primary_to_idx.clear() + + +class NodesManager: + def __init__( + self, + startup_nodes, + from_url=False, + require_full_coverage=False, + lock=None, + dynamic_startup_nodes=True, + connection_pool_class=ConnectionPool, + address_remap: Optional[Callable[[str, int], Tuple[str, int]]] = None, + **kwargs, + ): + self.nodes_cache = {} + self.slots_cache = {} + self.startup_nodes = {} + self.default_node = None + self.populate_startup_nodes(startup_nodes) + self.from_url = from_url + self._require_full_coverage = require_full_coverage + self._dynamic_startup_nodes = dynamic_startup_nodes + self.connection_pool_class = connection_pool_class + self.address_remap = address_remap + self._moved_exception = None + self.connection_kwargs = kwargs + self.read_load_balancer = LoadBalancer() + if lock is None: + lock = threading.Lock() + self._lock = lock + self.initialize() + + def get_node(self, host=None, port=None, node_name=None): + """ + Get the requested node from the cluster's nodes. + nodes. + :return: ClusterNode if the node exists, else None + """ + if host and port: + # the user passed host and port + if host == "localhost": + host = socket.gethostbyname(host) + return self.nodes_cache.get(get_node_name(host=host, port=port)) + elif node_name: + return self.nodes_cache.get(node_name) + else: + return None + + def update_moved_exception(self, exception): + self._moved_exception = exception + + def _update_moved_slots(self): + """ + Update the slot's node with the redirected one + """ + e = self._moved_exception + redirected_node = self.get_node(host=e.host, port=e.port) + if redirected_node is not None: + # The node already exists + if redirected_node.server_type is not PRIMARY: + # Update the node's server type + redirected_node.server_type = PRIMARY + else: + # This is a new node, we will add it to the nodes cache + redirected_node = ClusterNode(e.host, e.port, PRIMARY) + self.nodes_cache[redirected_node.name] = redirected_node + if redirected_node in self.slots_cache[e.slot_id]: + # The MOVED error resulted from a failover, and the new slot owner + # had previously been a replica. + old_primary = self.slots_cache[e.slot_id][0] + # Update the old primary to be a replica and add it to the end of + # the slot's node list + old_primary.server_type = REPLICA + self.slots_cache[e.slot_id].append(old_primary) + # Remove the old replica, which is now a primary, from the slot's + # node list + self.slots_cache[e.slot_id].remove(redirected_node) + # Override the old primary with the new one + self.slots_cache[e.slot_id][0] = redirected_node + if self.default_node == old_primary: + # Update the default node with the new primary + self.default_node = redirected_node + else: + # The new slot owner is a new server, or a server from a different + # shard. We need to remove all current nodes from the slot's list + # (including replications) and add just the new node. + self.slots_cache[e.slot_id] = [redirected_node] + # Reset moved_exception + self._moved_exception = None + + def get_node_from_slot(self, slot, read_from_replicas=False, server_type=None): + """ + Gets a node that servers this hash slot + """ + if self._moved_exception: + with self._lock: + if self._moved_exception: + self._update_moved_slots() + + if self.slots_cache.get(slot) is None or len(self.slots_cache[slot]) == 0: + raise SlotNotCoveredError( + f'Slot "{slot}" not covered by the cluster. ' + f'"require_full_coverage={self._require_full_coverage}"' + ) + + if read_from_replicas is True: + # get the server index in a Round-Robin manner + primary_name = self.slots_cache[slot][0].name + node_idx = self.read_load_balancer.get_server_index( + primary_name, len(self.slots_cache[slot]) + ) + elif ( + server_type is None + or server_type == PRIMARY + or len(self.slots_cache[slot]) == 1 + ): + # return a primary + node_idx = 0 + else: + # return a replica + # randomly choose one of the replicas + node_idx = random.randint(1, len(self.slots_cache[slot]) - 1) + + return self.slots_cache[slot][node_idx] + + def get_nodes_by_server_type(self, server_type): + """ + Get all nodes with the specified server type + :param server_type: 'primary' or 'replica' + :return: list of ClusterNode + """ + return [ + node + for node in self.nodes_cache.values() + if node.server_type == server_type + ] + + def populate_startup_nodes(self, nodes): + """ + Populate all startup nodes and filters out any duplicates + """ + for n in nodes: + self.startup_nodes[n.name] = n + + def check_slots_coverage(self, slots_cache): + # Validate if all slots are covered or if we should try next + # startup node + for i in range(0, REDIS_CLUSTER_HASH_SLOTS): + if i not in slots_cache: + return False + return True + + def create_redis_connections(self, nodes): + """ + This function will create a redis connection to all nodes in :nodes: + """ + for node in nodes: + if node.redis_connection is None: + node.redis_connection = self.create_redis_node( + host=node.host, port=node.port, **self.connection_kwargs + ) + + def create_redis_node(self, host, port, **kwargs): + if self.from_url: + # Create a redis node with a costumed connection pool + kwargs.update({"host": host}) + kwargs.update({"port": port}) + r = Redis(connection_pool=self.connection_pool_class(**kwargs)) + else: + r = Redis(host=host, port=port, **kwargs) + return r + + def _get_or_create_cluster_node(self, host, port, role, tmp_nodes_cache): + node_name = get_node_name(host, port) + # check if we already have this node in the tmp_nodes_cache + target_node = tmp_nodes_cache.get(node_name) + if target_node is None: + # before creating a new cluster node, check if the cluster node already + # exists in the current nodes cache and has a valid connection so we can + # reuse it + target_node = self.nodes_cache.get(node_name) + if target_node is None or target_node.redis_connection is None: + # create new cluster node for this cluster + target_node = ClusterNode(host, port, role) + if target_node.server_type != role: + target_node.server_type = role + + return target_node + + def initialize(self): + """ + Initializes the nodes cache, slots cache and redis connections. + :startup_nodes: + Responsible for discovering other nodes in the cluster + """ + self.reset() + tmp_nodes_cache = {} + tmp_slots = {} + disagreements = [] + startup_nodes_reachable = False + fully_covered = False + kwargs = self.connection_kwargs + exception = None + for startup_node in self.startup_nodes.values(): + try: + if startup_node.redis_connection: + r = startup_node.redis_connection + else: + # Create a new Redis connection + r = self.create_redis_node( + startup_node.host, startup_node.port, **kwargs + ) + self.startup_nodes[startup_node.name].redis_connection = r + # Make sure cluster mode is enabled on this node + if bool(r.info().get("cluster_enabled")) is False: + raise RedisClusterException( + "Cluster mode is not enabled on this node" + ) + cluster_slots = str_if_bytes(r.execute_command("CLUSTER SLOTS")) + startup_nodes_reachable = True + except Exception as e: + # Try the next startup node. + # The exception is saved and raised only if we have no more nodes. + exception = e + continue + + # CLUSTER SLOTS command results in the following output: + # [[slot_section[from_slot,to_slot,master,replica1,...,replicaN]]] + # where each node contains the following list: [IP, port, node_id] + # Therefore, cluster_slots[0][2][0] will be the IP address of the + # primary node of the first slot section. + # If there's only one server in the cluster, its ``host`` is '' + # Fix it to the host in startup_nodes + if ( + len(cluster_slots) == 1 + and len(cluster_slots[0][2][0]) == 0 + and len(self.startup_nodes) == 1 + ): + cluster_slots[0][2][0] = startup_node.host + + for slot in cluster_slots: + primary_node = slot[2] + host = str_if_bytes(primary_node[0]) + if host == "": + host = startup_node.host + port = int(primary_node[1]) + host, port = self.remap_host_port(host, port) + + target_node = self._get_or_create_cluster_node( + host, port, PRIMARY, tmp_nodes_cache + ) + # add this node to the nodes cache + tmp_nodes_cache[target_node.name] = target_node + + for i in range(int(slot[0]), int(slot[1]) + 1): + if i not in tmp_slots: + tmp_slots[i] = [] + tmp_slots[i].append(target_node) + replica_nodes = [slot[j] for j in range(3, len(slot))] + + for replica_node in replica_nodes: + host = str_if_bytes(replica_node[0]) + port = replica_node[1] + host, port = self.remap_host_port(host, port) + + target_replica_node = self._get_or_create_cluster_node( + host, port, REPLICA, tmp_nodes_cache + ) + tmp_slots[i].append(target_replica_node) + # add this node to the nodes cache + tmp_nodes_cache[ + target_replica_node.name + ] = target_replica_node + else: + # Validate that 2 nodes want to use the same slot cache + # setup + tmp_slot = tmp_slots[i][0] + if tmp_slot.name != target_node.name: + disagreements.append( + f"{tmp_slot.name} vs {target_node.name} on slot: {i}" + ) + + if len(disagreements) > 5: + raise RedisClusterException( + f"startup_nodes could not agree on a valid " + f'slots cache: {", ".join(disagreements)}' + ) + + fully_covered = self.check_slots_coverage(tmp_slots) + if fully_covered: + # Don't need to continue to the next startup node if all + # slots are covered + break + + if not startup_nodes_reachable: + raise RedisClusterException( + f"Redis Cluster cannot be connected. Please provide at least " + f"one reachable node: {str(exception)}" + ) from exception + + # Create Redis connections to all nodes + self.create_redis_connections(list(tmp_nodes_cache.values())) + + # Check if the slots are not fully covered + if not fully_covered and self._require_full_coverage: + # Despite the requirement that the slots be covered, there + # isn't a full coverage + raise RedisClusterException( + f"All slots are not covered after query all startup_nodes. " + f"{len(tmp_slots)} of {REDIS_CLUSTER_HASH_SLOTS} " + f"covered..." + ) + + # Set the tmp variables to the real variables + self.nodes_cache = tmp_nodes_cache + self.slots_cache = tmp_slots + # Set the default node + self.default_node = self.get_nodes_by_server_type(PRIMARY)[0] + if self._dynamic_startup_nodes: + # Populate the startup nodes with all discovered nodes + self.startup_nodes = tmp_nodes_cache + # If initialize was called after a MovedError, clear it + self._moved_exception = None + + def close(self): + self.default_node = None + for node in self.nodes_cache.values(): + if node.redis_connection: + node.redis_connection.close() + + def reset(self): + try: + self.read_load_balancer.reset() + except TypeError: + # The read_load_balancer is None, do nothing + pass + + def remap_host_port(self, host: str, port: int) -> Tuple[str, int]: + """ + Remap the host and port returned from the cluster to a different + internal value. Useful if the client is not connecting directly + to the cluster. + """ + if self.address_remap: + return self.address_remap((host, port)) + return host, port + + +class ClusterPubSub(PubSub): + """ + Wrapper for PubSub class. + + IMPORTANT: before using ClusterPubSub, read about the known limitations + with pubsub in Cluster mode and learn how to workaround them: + https://redis-py-cluster.readthedocs.io/en/stable/pubsub.html + """ + + def __init__( + self, + redis_cluster, + node=None, + host=None, + port=None, + push_handler_func=None, + **kwargs, + ): + """ + When a pubsub instance is created without specifying a node, a single + node will be transparently chosen for the pubsub connection on the + first command execution. The node will be determined by: + 1. Hashing the channel name in the request to find its keyslot + 2. Selecting a node that handles the keyslot: If read_from_replicas is + set to true, a replica can be selected. + + :type redis_cluster: RedisCluster + :type node: ClusterNode + :type host: str + :type port: int + """ + self.node = None + self.set_pubsub_node(redis_cluster, node, host, port) + connection_pool = ( + None + if self.node is None + else redis_cluster.get_redis_connection(self.node).connection_pool + ) + self.cluster = redis_cluster + self.node_pubsub_mapping = {} + self._pubsubs_generator = self._pubsubs_generator() + super().__init__( + connection_pool=connection_pool, + encoder=redis_cluster.encoder, + push_handler_func=push_handler_func, + **kwargs, + ) + + def set_pubsub_node(self, cluster, node=None, host=None, port=None): + """ + The pubsub node will be set according to the passed node, host and port + When none of the node, host, or port are specified - the node is set + to None and will be determined by the keyslot of the channel in the + first command to be executed. + RedisClusterException will be thrown if the passed node does not exist + in the cluster. + If host is passed without port, or vice versa, a DataError will be + thrown. + :type cluster: RedisCluster + :type node: ClusterNode + :type host: str + :type port: int + """ + if node is not None: + # node is passed by the user + self._raise_on_invalid_node(cluster, node, node.host, node.port) + pubsub_node = node + elif host is not None and port is not None: + # host and port passed by the user + node = cluster.get_node(host=host, port=port) + self._raise_on_invalid_node(cluster, node, host, port) + pubsub_node = node + elif any([host, port]) is True: + # only 'host' or 'port' passed + raise DataError("Passing a host requires passing a port, and vice versa") + else: + # nothing passed by the user. set node to None + pubsub_node = None + + self.node = pubsub_node + + def get_pubsub_node(self): + """ + Get the node that is being used as the pubsub connection + """ + return self.node + + def _raise_on_invalid_node(self, redis_cluster, node, host, port): + """ + Raise a RedisClusterException if the node is None or doesn't exist in + the cluster. + """ + if node is None or redis_cluster.get_node(node_name=node.name) is None: + raise RedisClusterException( + f"Node {host}:{port} doesn't exist in the cluster" + ) + + def execute_command(self, *args): + """ + Execute a subscribe/unsubscribe command. + + Taken code from redis-py and tweak to make it work within a cluster. + """ + # NOTE: don't parse the response in this function -- it could pull a + # legitimate message off the stack if the connection is already + # subscribed to one or more channels + + if self.connection is None: + if self.connection_pool is None: + if len(args) > 1: + # Hash the first channel and get one of the nodes holding + # this slot + channel = args[1] + slot = self.cluster.keyslot(channel) + node = self.cluster.nodes_manager.get_node_from_slot( + slot, self.cluster.read_from_replicas + ) + else: + # Get a random node + node = self.cluster.get_random_node() + self.node = node + redis_connection = self.cluster.get_redis_connection(node) + self.connection_pool = redis_connection.connection_pool + self.connection = self.connection_pool.get_connection( + "pubsub", self.shard_hint + ) + # register a callback that re-subscribes to any channels we + # were listening to when we were disconnected + self.connection.register_connect_callback(self.on_connect) + if self.push_handler_func is not None and not HIREDIS_AVAILABLE: + self.connection._parser.set_push_handler(self.push_handler_func) + connection = self.connection + self._execute(connection, connection.send_command, *args) + + def _get_node_pubsub(self, node): + try: + return self.node_pubsub_mapping[node.name] + except KeyError: + pubsub = node.redis_connection.pubsub( + push_handler_func=self.push_handler_func + ) + self.node_pubsub_mapping[node.name] = pubsub + return pubsub + + def _sharded_message_generator(self): + for _ in range(len(self.node_pubsub_mapping)): + pubsub = next(self._pubsubs_generator) + message = pubsub.get_message() + if message is not None: + return message + return None + + def _pubsubs_generator(self): + while True: + for pubsub in self.node_pubsub_mapping.values(): + yield pubsub + + def get_sharded_message( + self, ignore_subscribe_messages=False, timeout=0.0, target_node=None + ): + if target_node: + message = self.node_pubsub_mapping[target_node.name].get_message( + ignore_subscribe_messages=ignore_subscribe_messages, timeout=timeout + ) + else: + message = self._sharded_message_generator() + if message is None: + return None + elif str_if_bytes(message["type"]) == "sunsubscribe": + if message["channel"] in self.pending_unsubscribe_shard_channels: + self.pending_unsubscribe_shard_channels.remove(message["channel"]) + self.shard_channels.pop(message["channel"], None) + node = self.cluster.get_node_from_key(message["channel"]) + if self.node_pubsub_mapping[node.name].subscribed is False: + self.node_pubsub_mapping.pop(node.name) + if not self.channels and not self.patterns and not self.shard_channels: + # There are no subscriptions anymore, set subscribed_event flag + # to false + self.subscribed_event.clear() + if self.ignore_subscribe_messages or ignore_subscribe_messages: + return None + return message + + def ssubscribe(self, *args, **kwargs): + if args: + args = list_or_args(args[0], args[1:]) + s_channels = dict.fromkeys(args) + s_channels.update(kwargs) + for s_channel, handler in s_channels.items(): + node = self.cluster.get_node_from_key(s_channel) + pubsub = self._get_node_pubsub(node) + if handler: + pubsub.ssubscribe(**{s_channel: handler}) + else: + pubsub.ssubscribe(s_channel) + self.shard_channels.update(pubsub.shard_channels) + self.pending_unsubscribe_shard_channels.difference_update( + self._normalize_keys({s_channel: None}) + ) + if pubsub.subscribed and not self.subscribed: + self.subscribed_event.set() + self.health_check_response_counter = 0 + + def sunsubscribe(self, *args): + if args: + args = list_or_args(args[0], args[1:]) + else: + args = self.shard_channels + + for s_channel in args: + node = self.cluster.get_node_from_key(s_channel) + p = self._get_node_pubsub(node) + p.sunsubscribe(s_channel) + self.pending_unsubscribe_shard_channels.update( + p.pending_unsubscribe_shard_channels + ) + + def get_redis_connection(self): + """ + Get the Redis connection of the pubsub connected node. + """ + if self.node is not None: + return self.node.redis_connection + + def disconnect(self): + """ + Disconnect the pubsub connection. + """ + if self.connection: + self.connection.disconnect() + for pubsub in self.node_pubsub_mapping.values(): + pubsub.connection.disconnect() + + +class ClusterPipeline(RedisCluster): + """ + Support for Redis pipeline + in cluster mode + """ + + ERRORS_ALLOW_RETRY = ( + ConnectionError, + TimeoutError, + MovedError, + AskError, + TryAgainError, + ) + + def __init__( + self, + nodes_manager: "NodesManager", + commands_parser: "CommandsParser", + result_callbacks: Optional[Dict[str, Callable]] = None, + cluster_response_callbacks: Optional[Dict[str, Callable]] = None, + startup_nodes: Optional[List["ClusterNode"]] = None, + read_from_replicas: bool = False, + cluster_error_retry_attempts: int = 3, + reinitialize_steps: int = 5, + lock=None, + **kwargs, + ): + """ """ + self.command_stack = [] + self.nodes_manager = nodes_manager + self.commands_parser = commands_parser + self.refresh_table_asap = False + self.result_callbacks = ( + result_callbacks or self.__class__.RESULT_CALLBACKS.copy() + ) + self.startup_nodes = startup_nodes if startup_nodes else [] + self.read_from_replicas = read_from_replicas + self.command_flags = self.__class__.COMMAND_FLAGS.copy() + self.cluster_response_callbacks = cluster_response_callbacks + self.cluster_error_retry_attempts = cluster_error_retry_attempts + self.reinitialize_counter = 0 + self.reinitialize_steps = reinitialize_steps + self.encoder = Encoder( + kwargs.get("encoding", "utf-8"), + kwargs.get("encoding_errors", "strict"), + kwargs.get("decode_responses", False), + ) + if lock is None: + lock = threading.Lock() + self._lock = lock + + def __repr__(self): + """ """ + return f"{type(self).__name__}" + + def __enter__(self): + """ """ + return self + + def __exit__(self, exc_type, exc_value, traceback): + """ """ + self.reset() + + def __del__(self): + try: + self.reset() + except Exception: + pass + + def __len__(self): + """ """ + return len(self.command_stack) + + def __bool__(self): + "Pipeline instances should always evaluate to True on Python 3+" + return True + + def execute_command(self, *args, **kwargs): + """ + Wrapper function for pipeline_execute_command + """ + return self.pipeline_execute_command(*args, **kwargs) + + def pipeline_execute_command(self, *args, **options): + """ + Appends the executed command to the pipeline's command stack + """ + self.command_stack.append( + PipelineCommand(args, options, len(self.command_stack)) + ) + return self + + def raise_first_error(self, stack): + """ + Raise the first exception on the stack + """ + for c in stack: + r = c.result + if isinstance(r, Exception): + self.annotate_exception(r, c.position + 1, c.args) + raise r + + def annotate_exception(self, exception, number, command): + """ + Provides extra context to the exception prior to it being handled + """ + cmd = " ".join(map(safe_str, command)) + msg = ( + f"Command # {number} ({cmd}) of pipeline " + f"caused error: {exception.args[0]}" + ) + exception.args = (msg,) + exception.args[1:] + + def execute(self, raise_on_error=True): + """ + Execute all the commands in the current pipeline + """ + stack = self.command_stack + try: + return self.send_cluster_commands(stack, raise_on_error) + finally: + self.reset() + + def reset(self): + """ + Reset back to empty pipeline. + """ + self.command_stack = [] + + self.scripts = set() + + # TODO: Implement + # make sure to reset the connection state in the event that we were + # watching something + # if self.watching and self.connection: + # try: + # # call this manually since our unwatch or + # # immediate_execute_command methods can call reset() + # self.connection.send_command('UNWATCH') + # self.connection.read_response() + # except ConnectionError: + # # disconnect will also remove any previous WATCHes + # self.connection.disconnect() + + # clean up the other instance attributes + self.watching = False + self.explicit_transaction = False + + # TODO: Implement + # we can safely return the connection to the pool here since we're + # sure we're no longer WATCHing anything + # if self.connection: + # self.connection_pool.release(self.connection) + # self.connection = None + + def send_cluster_commands( + self, stack, raise_on_error=True, allow_redirections=True + ): + """ + Wrapper for CLUSTERDOWN error handling. + + If the cluster reports it is down it is assumed that: + - connection_pool was disconnected + - connection_pool was reseted + - refereh_table_asap set to True + + It will try the number of times specified by + the config option "self.cluster_error_retry_attempts" + which defaults to 3 unless manually configured. + + If it reaches the number of times, the command will + raises ClusterDownException. + """ + if not stack: + return [] + retry_attempts = self.cluster_error_retry_attempts + while True: + try: + return self._send_cluster_commands( + stack, + raise_on_error=raise_on_error, + allow_redirections=allow_redirections, + ) + except (ClusterDownError, ConnectionError) as e: + if retry_attempts > 0: + # Try again with the new cluster setup. All other errors + # should be raised. + retry_attempts -= 1 + pass + else: + raise e + + def _send_cluster_commands( + self, stack, raise_on_error=True, allow_redirections=True + ): + """ + Send a bunch of cluster commands to the redis cluster. + + `allow_redirections` If the pipeline should follow + `ASK` & `MOVED` responses automatically. If set + to false it will raise RedisClusterException. + """ + # the first time sending the commands we send all of + # the commands that were queued up. + # if we have to run through it again, we only retry + # the commands that failed. + attempt = sorted(stack, key=lambda x: x.position) + is_default_node = False + # build a list of node objects based on node names we need to + nodes = {} + + # as we move through each command that still needs to be processed, + # we figure out the slot number that command maps to, then from + # the slot determine the node. + for c in attempt: + while True: + # refer to our internal node -> slot table that + # tells us where a given command should route to. + # (it might be possible we have a cached node that no longer + # exists in the cluster, which is why we do this in a loop) + passed_targets = c.options.pop("target_nodes", None) + if passed_targets and not self._is_nodes_flag(passed_targets): + target_nodes = self._parse_target_nodes(passed_targets) + else: + target_nodes = self._determine_nodes( + *c.args, node_flag=passed_targets + ) + if not target_nodes: + raise RedisClusterException( + f"No targets were found to execute {c.args} command on" + ) + if len(target_nodes) > 1: + raise RedisClusterException( + f"Too many targets for command {c.args}" + ) + + node = target_nodes[0] + if node == self.get_default_node(): + is_default_node = True + + # now that we know the name of the node + # ( it's just a string in the form of host:port ) + # we can build a list of commands for each node. + node_name = node.name + if node_name not in nodes: + redis_node = self.get_redis_connection(node) + try: + connection = get_connection(redis_node, c.args) + except ConnectionError: + for n in nodes.values(): + n.connection_pool.release(n.connection) + # Connection retries are being handled in the node's + # Retry object. Reinitialize the node -> slot table. + self.nodes_manager.initialize() + if is_default_node: + self.replace_default_node() + raise + nodes[node_name] = NodeCommands( + redis_node.parse_response, + redis_node.connection_pool, + connection, + ) + nodes[node_name].append(c) + break + + # send the commands in sequence. + # we write to all the open sockets for each node first, + # before reading anything + # this allows us to flush all the requests out across the + # network essentially in parallel + # so that we can read them all in parallel as they come back. + # we dont' multiplex on the sockets as they come available, + # but that shouldn't make too much difference. + node_commands = nodes.values() + try: + node_commands = nodes.values() + for n in node_commands: + n.write() + + for n in node_commands: + n.read() + finally: + # release all of the redis connections we allocated earlier + # back into the connection pool. + # we used to do this step as part of a try/finally block, + # but it is really dangerous to + # release connections back into the pool if for some + # reason the socket has data still left in it + # from a previous operation. The write and + # read operations already have try/catch around them for + # all known types of errors including connection + # and socket level errors. + # So if we hit an exception, something really bad + # happened and putting any oF + # these connections back into the pool is a very bad idea. + # the socket might have unread buffer still sitting in it, + # and then the next time we read from it we pass the + # buffered result back from a previous command and + # every single request after to that connection will always get + # a mismatched result. + for n in nodes.values(): + n.connection_pool.release(n.connection) + + # if the response isn't an exception it is a + # valid response from the node + # we're all done with that command, YAY! + # if we have more commands to attempt, we've run into problems. + # collect all the commands we are allowed to retry. + # (MOVED, ASK, or connection errors or timeout errors) + attempt = sorted( + ( + c + for c in attempt + if isinstance(c.result, ClusterPipeline.ERRORS_ALLOW_RETRY) + ), + key=lambda x: x.position, + ) + if attempt and allow_redirections: + # RETRY MAGIC HAPPENS HERE! + # send these remaining commands one at a time using `execute_command` + # in the main client. This keeps our retry logic + # in one place mostly, + # and allows us to be more confident in correctness of behavior. + # at this point any speed gains from pipelining have been lost + # anyway, so we might as well make the best + # attempt to get the correct behavior. + # + # The client command will handle retries for each + # individual command sequentially as we pass each + # one into `execute_command`. Any exceptions + # that bubble out should only appear once all + # retries have been exhausted. + # + # If a lot of commands have failed, we'll be setting the + # flag to rebuild the slots table from scratch. + # So MOVED errors should correct themselves fairly quickly. + self.reinitialize_counter += 1 + if self._should_reinitialized(): + self.nodes_manager.initialize() + if is_default_node: + self.replace_default_node() + for c in attempt: + try: + # send each command individually like we + # do in the main client. + c.result = super().execute_command(*c.args, **c.options) + except RedisError as e: + c.result = e + + # turn the response back into a simple flat array that corresponds + # to the sequence of commands issued in the stack in pipeline.execute() + response = [] + for c in sorted(stack, key=lambda x: x.position): + if c.args[0] in self.cluster_response_callbacks: + c.result = self.cluster_response_callbacks[c.args[0]]( + c.result, **c.options + ) + response.append(c.result) + + if raise_on_error: + self.raise_first_error(stack) + + return response + + def _fail_on_redirect(self, allow_redirections): + """ """ + if not allow_redirections: + raise RedisClusterException( + "ASK & MOVED redirection not allowed in this pipeline" + ) + + def exists(self, *keys): + return self.execute_command("EXISTS", *keys) + + def eval(self): + """ """ + raise RedisClusterException("method eval() is not implemented") + + def multi(self): + """ """ + raise RedisClusterException("method multi() is not implemented") + + def immediate_execute_command(self, *args, **options): + """ """ + raise RedisClusterException( + "method immediate_execute_command() is not implemented" + ) + + def _execute_transaction(self, *args, **kwargs): + """ """ + raise RedisClusterException("method _execute_transaction() is not implemented") + + def load_scripts(self): + """ """ + raise RedisClusterException("method load_scripts() is not implemented") + + def watch(self, *names): + """ """ + raise RedisClusterException("method watch() is not implemented") + + def unwatch(self): + """ """ + raise RedisClusterException("method unwatch() is not implemented") + + def script_load_for_pipeline(self, *args, **kwargs): + """ """ + raise RedisClusterException( + "method script_load_for_pipeline() is not implemented" + ) + + def delete(self, *names): + """ + "Delete a key specified by ``names``" + """ + if len(names) != 1: + raise RedisClusterException( + "deleting multiple keys is not implemented in pipeline command" + ) + + return self.execute_command("DEL", names[0]) + + def unlink(self, *names): + """ + "Unlink a key specified by ``names``" + """ + if len(names) != 1: + raise RedisClusterException( + "unlinking multiple keys is not implemented in pipeline command" + ) + + return self.execute_command("UNLINK", names[0]) + + +def block_pipeline_command(name: str) -> Callable[..., Any]: + """ + Prints error because some pipelined commands should + be blocked when running in cluster-mode + """ + + def inner(*args, **kwargs): + raise RedisClusterException( + f"ERROR: Calling pipelined function {name} is blocked " + f"when running redis in cluster mode..." + ) + + return inner + + +# Blocked pipeline commands +PIPELINE_BLOCKED_COMMANDS = ( + "BGREWRITEAOF", + "BGSAVE", + "BITOP", + "BRPOPLPUSH", + "CLIENT GETNAME", + "CLIENT KILL", + "CLIENT LIST", + "CLIENT SETNAME", + "CLIENT", + "CONFIG GET", + "CONFIG RESETSTAT", + "CONFIG REWRITE", + "CONFIG SET", + "CONFIG", + "DBSIZE", + "ECHO", + "EVALSHA", + "FLUSHALL", + "FLUSHDB", + "INFO", + "KEYS", + "LASTSAVE", + "MGET", + "MGET NONATOMIC", + "MOVE", + "MSET", + "MSET NONATOMIC", + "MSETNX", + "PFCOUNT", + "PFMERGE", + "PING", + "PUBLISH", + "RANDOMKEY", + "READONLY", + "READWRITE", + "RENAME", + "RENAMENX", + "RPOPLPUSH", + "SAVE", + "SCAN", + "SCRIPT EXISTS", + "SCRIPT FLUSH", + "SCRIPT KILL", + "SCRIPT LOAD", + "SCRIPT", + "SDIFF", + "SDIFFSTORE", + "SENTINEL GET MASTER ADDR BY NAME", + "SENTINEL MASTER", + "SENTINEL MASTERS", + "SENTINEL MONITOR", + "SENTINEL REMOVE", + "SENTINEL SENTINELS", + "SENTINEL SET", + "SENTINEL SLAVES", + "SENTINEL", + "SHUTDOWN", + "SINTER", + "SINTERSTORE", + "SLAVEOF", + "SLOWLOG GET", + "SLOWLOG LEN", + "SLOWLOG RESET", + "SLOWLOG", + "SMOVE", + "SORT", + "SUNION", + "SUNIONSTORE", + "TIME", +) +for command in PIPELINE_BLOCKED_COMMANDS: + command = command.replace(" ", "_").lower() + + setattr(ClusterPipeline, command, block_pipeline_command(command)) + + +class PipelineCommand: + """ """ + + def __init__(self, args, options=None, position=None): + self.args = args + if options is None: + options = {} + self.options = options + self.position = position + self.result = None + self.node = None + self.asking = False + + +class NodeCommands: + """ """ + + def __init__(self, parse_response, connection_pool, connection): + """ """ + self.parse_response = parse_response + self.connection_pool = connection_pool + self.connection = connection + self.commands = [] + + def append(self, c): + """ """ + self.commands.append(c) + + def write(self): + """ + Code borrowed from Redis so it can be fixed + """ + connection = self.connection + commands = self.commands + + # We are going to clobber the commands with the write, so go ahead + # and ensure that nothing is sitting there from a previous run. + for c in commands: + c.result = None + + # build up all commands into a single request to increase network perf + # send all the commands and catch connection and timeout errors. + try: + connection.send_packed_command( + connection.pack_commands([c.args for c in commands]) + ) + except (ConnectionError, TimeoutError) as e: + for c in commands: + c.result = e + + def read(self): + """ """ + connection = self.connection + for c in self.commands: + # if there is a result on this command, + # it means we ran into an exception + # like a connection error. Trying to parse + # a response on a connection that + # is no longer open will result in a + # connection error raised by redis-py. + # but redis-py doesn't check in parse_response + # that the sock object is + # still set and if you try to + # read from a closed connection, it will + # result in an AttributeError because + # it will do a readline() call on None. + # This can have all kinds of nasty side-effects. + # Treating this case as a connection error + # is fine because it will dump + # the connection object back into the + # pool and on the next write, it will + # explicitly open the connection and all will be well. + if c.result is None: + try: + c.result = self.parse_response(connection, c.args[0], **c.options) + except (ConnectionError, TimeoutError) as e: + for c in self.commands: + c.result = e + return + except RedisError: + c.result = sys.exc_info()[1] diff --git a/templates/skills/spotify/dependencies/redis/commands/__init__.py b/templates/skills/spotify/dependencies/redis/commands/__init__.py new file mode 100644 index 00000000..a94d9764 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/__init__.py @@ -0,0 +1,18 @@ +from .cluster import READ_COMMANDS, AsyncRedisClusterCommands, RedisClusterCommands +from .core import AsyncCoreCommands, CoreCommands +from .helpers import list_or_args +from .redismodules import AsyncRedisModuleCommands, RedisModuleCommands +from .sentinel import AsyncSentinelCommands, SentinelCommands + +__all__ = [ + "AsyncCoreCommands", + "AsyncRedisClusterCommands", + "AsyncRedisModuleCommands", + "AsyncSentinelCommands", + "CoreCommands", + "READ_COMMANDS", + "RedisClusterCommands", + "RedisModuleCommands", + "SentinelCommands", + "list_or_args", +] diff --git a/templates/skills/spotify/dependencies/redis/commands/bf/__init__.py b/templates/skills/spotify/dependencies/redis/commands/bf/__init__.py new file mode 100644 index 00000000..959358f8 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/bf/__init__.py @@ -0,0 +1,253 @@ +from redis._parsers.helpers import bool_ok + +from ..helpers import get_protocol_version, parse_to_list +from .commands import * # noqa +from .info import BFInfo, CFInfo, CMSInfo, TDigestInfo, TopKInfo + + +class AbstractBloom(object): + """ + The client allows to interact with RedisBloom and use all of + it's functionality. + + - BF for Bloom Filter + - CF for Cuckoo Filter + - CMS for Count-Min Sketch + - TOPK for TopK Data Structure + - TDIGEST for estimate rank statistics + """ + + @staticmethod + def append_items(params, items): + """Append ITEMS to params.""" + params.extend(["ITEMS"]) + params += items + + @staticmethod + def append_error(params, error): + """Append ERROR to params.""" + if error is not None: + params.extend(["ERROR", error]) + + @staticmethod + def append_capacity(params, capacity): + """Append CAPACITY to params.""" + if capacity is not None: + params.extend(["CAPACITY", capacity]) + + @staticmethod + def append_expansion(params, expansion): + """Append EXPANSION to params.""" + if expansion is not None: + params.extend(["EXPANSION", expansion]) + + @staticmethod + def append_no_scale(params, noScale): + """Append NONSCALING tag to params.""" + if noScale is not None: + params.extend(["NONSCALING"]) + + @staticmethod + def append_weights(params, weights): + """Append WEIGHTS to params.""" + if len(weights) > 0: + params.append("WEIGHTS") + params += weights + + @staticmethod + def append_no_create(params, noCreate): + """Append NOCREATE tag to params.""" + if noCreate is not None: + params.extend(["NOCREATE"]) + + @staticmethod + def append_items_and_increments(params, items, increments): + """Append pairs of items and increments to params.""" + for i in range(len(items)): + params.append(items[i]) + params.append(increments[i]) + + @staticmethod + def append_values_and_weights(params, items, weights): + """Append pairs of items and weights to params.""" + for i in range(len(items)): + params.append(items[i]) + params.append(weights[i]) + + @staticmethod + def append_max_iterations(params, max_iterations): + """Append MAXITERATIONS to params.""" + if max_iterations is not None: + params.extend(["MAXITERATIONS", max_iterations]) + + @staticmethod + def append_bucket_size(params, bucket_size): + """Append BUCKETSIZE to params.""" + if bucket_size is not None: + params.extend(["BUCKETSIZE", bucket_size]) + + +class CMSBloom(CMSCommands, AbstractBloom): + def __init__(self, client, **kwargs): + """Create a new RedisBloom client.""" + # Set the module commands' callbacks + _MODULE_CALLBACKS = { + CMS_INITBYDIM: bool_ok, + CMS_INITBYPROB: bool_ok, + # CMS_INCRBY: spaceHolder, + # CMS_QUERY: spaceHolder, + CMS_MERGE: bool_ok, + } + + _RESP2_MODULE_CALLBACKS = { + CMS_INFO: CMSInfo, + } + _RESP3_MODULE_CALLBACKS = {} + + self.client = client + self.commandmixin = CMSCommands + self.execute_command = client.execute_command + + if get_protocol_version(self.client) in ["3", 3]: + _MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS) + else: + _MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS) + + for k, v in _MODULE_CALLBACKS.items(): + self.client.set_response_callback(k, v) + + +class TOPKBloom(TOPKCommands, AbstractBloom): + def __init__(self, client, **kwargs): + """Create a new RedisBloom client.""" + # Set the module commands' callbacks + _MODULE_CALLBACKS = { + TOPK_RESERVE: bool_ok, + # TOPK_QUERY: spaceHolder, + # TOPK_COUNT: spaceHolder, + } + + _RESP2_MODULE_CALLBACKS = { + TOPK_ADD: parse_to_list, + TOPK_INCRBY: parse_to_list, + TOPK_INFO: TopKInfo, + TOPK_LIST: parse_to_list, + } + _RESP3_MODULE_CALLBACKS = {} + + self.client = client + self.commandmixin = TOPKCommands + self.execute_command = client.execute_command + + if get_protocol_version(self.client) in ["3", 3]: + _MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS) + else: + _MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS) + + for k, v in _MODULE_CALLBACKS.items(): + self.client.set_response_callback(k, v) + + +class CFBloom(CFCommands, AbstractBloom): + def __init__(self, client, **kwargs): + """Create a new RedisBloom client.""" + # Set the module commands' callbacks + _MODULE_CALLBACKS = { + CF_RESERVE: bool_ok, + # CF_ADD: spaceHolder, + # CF_ADDNX: spaceHolder, + # CF_INSERT: spaceHolder, + # CF_INSERTNX: spaceHolder, + # CF_EXISTS: spaceHolder, + # CF_DEL: spaceHolder, + # CF_COUNT: spaceHolder, + # CF_SCANDUMP: spaceHolder, + # CF_LOADCHUNK: spaceHolder, + } + + _RESP2_MODULE_CALLBACKS = { + CF_INFO: CFInfo, + } + _RESP3_MODULE_CALLBACKS = {} + + self.client = client + self.commandmixin = CFCommands + self.execute_command = client.execute_command + + if get_protocol_version(self.client) in ["3", 3]: + _MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS) + else: + _MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS) + + for k, v in _MODULE_CALLBACKS.items(): + self.client.set_response_callback(k, v) + + +class TDigestBloom(TDigestCommands, AbstractBloom): + def __init__(self, client, **kwargs): + """Create a new RedisBloom client.""" + # Set the module commands' callbacks + _MODULE_CALLBACKS = { + TDIGEST_CREATE: bool_ok, + # TDIGEST_RESET: bool_ok, + # TDIGEST_ADD: spaceHolder, + # TDIGEST_MERGE: spaceHolder, + } + + _RESP2_MODULE_CALLBACKS = { + TDIGEST_BYRANK: parse_to_list, + TDIGEST_BYREVRANK: parse_to_list, + TDIGEST_CDF: parse_to_list, + TDIGEST_INFO: TDigestInfo, + TDIGEST_MIN: float, + TDIGEST_MAX: float, + TDIGEST_TRIMMED_MEAN: float, + TDIGEST_QUANTILE: parse_to_list, + } + _RESP3_MODULE_CALLBACKS = {} + + self.client = client + self.commandmixin = TDigestCommands + self.execute_command = client.execute_command + + if get_protocol_version(self.client) in ["3", 3]: + _MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS) + else: + _MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS) + + for k, v in _MODULE_CALLBACKS.items(): + self.client.set_response_callback(k, v) + + +class BFBloom(BFCommands, AbstractBloom): + def __init__(self, client, **kwargs): + """Create a new RedisBloom client.""" + # Set the module commands' callbacks + _MODULE_CALLBACKS = { + BF_RESERVE: bool_ok, + # BF_ADD: spaceHolder, + # BF_MADD: spaceHolder, + # BF_INSERT: spaceHolder, + # BF_EXISTS: spaceHolder, + # BF_MEXISTS: spaceHolder, + # BF_SCANDUMP: spaceHolder, + # BF_LOADCHUNK: spaceHolder, + # BF_CARD: spaceHolder, + } + + _RESP2_MODULE_CALLBACKS = { + BF_INFO: BFInfo, + } + _RESP3_MODULE_CALLBACKS = {} + + self.client = client + self.commandmixin = BFCommands + self.execute_command = client.execute_command + + if get_protocol_version(self.client) in ["3", 3]: + _MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS) + else: + _MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS) + + for k, v in _MODULE_CALLBACKS.items(): + self.client.set_response_callback(k, v) diff --git a/templates/skills/spotify/dependencies/redis/commands/bf/commands.py b/templates/skills/spotify/dependencies/redis/commands/bf/commands.py new file mode 100644 index 00000000..447f8445 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/bf/commands.py @@ -0,0 +1,542 @@ +from redis.client import NEVER_DECODE +from redis.exceptions import ModuleError +from redis.utils import HIREDIS_AVAILABLE, deprecated_function + +BF_RESERVE = "BF.RESERVE" +BF_ADD = "BF.ADD" +BF_MADD = "BF.MADD" +BF_INSERT = "BF.INSERT" +BF_EXISTS = "BF.EXISTS" +BF_MEXISTS = "BF.MEXISTS" +BF_SCANDUMP = "BF.SCANDUMP" +BF_LOADCHUNK = "BF.LOADCHUNK" +BF_INFO = "BF.INFO" +BF_CARD = "BF.CARD" + +CF_RESERVE = "CF.RESERVE" +CF_ADD = "CF.ADD" +CF_ADDNX = "CF.ADDNX" +CF_INSERT = "CF.INSERT" +CF_INSERTNX = "CF.INSERTNX" +CF_EXISTS = "CF.EXISTS" +CF_MEXISTS = "CF.MEXISTS" +CF_DEL = "CF.DEL" +CF_COUNT = "CF.COUNT" +CF_SCANDUMP = "CF.SCANDUMP" +CF_LOADCHUNK = "CF.LOADCHUNK" +CF_INFO = "CF.INFO" + +CMS_INITBYDIM = "CMS.INITBYDIM" +CMS_INITBYPROB = "CMS.INITBYPROB" +CMS_INCRBY = "CMS.INCRBY" +CMS_QUERY = "CMS.QUERY" +CMS_MERGE = "CMS.MERGE" +CMS_INFO = "CMS.INFO" + +TOPK_RESERVE = "TOPK.RESERVE" +TOPK_ADD = "TOPK.ADD" +TOPK_INCRBY = "TOPK.INCRBY" +TOPK_QUERY = "TOPK.QUERY" +TOPK_COUNT = "TOPK.COUNT" +TOPK_LIST = "TOPK.LIST" +TOPK_INFO = "TOPK.INFO" + +TDIGEST_CREATE = "TDIGEST.CREATE" +TDIGEST_RESET = "TDIGEST.RESET" +TDIGEST_ADD = "TDIGEST.ADD" +TDIGEST_MERGE = "TDIGEST.MERGE" +TDIGEST_CDF = "TDIGEST.CDF" +TDIGEST_QUANTILE = "TDIGEST.QUANTILE" +TDIGEST_MIN = "TDIGEST.MIN" +TDIGEST_MAX = "TDIGEST.MAX" +TDIGEST_INFO = "TDIGEST.INFO" +TDIGEST_TRIMMED_MEAN = "TDIGEST.TRIMMED_MEAN" +TDIGEST_RANK = "TDIGEST.RANK" +TDIGEST_REVRANK = "TDIGEST.REVRANK" +TDIGEST_BYRANK = "TDIGEST.BYRANK" +TDIGEST_BYREVRANK = "TDIGEST.BYREVRANK" + + +class BFCommands: + """Bloom Filter commands.""" + + def create(self, key, errorRate, capacity, expansion=None, noScale=None): + """ + Create a new Bloom Filter `key` with desired probability of false positives + `errorRate` expected entries to be inserted as `capacity`. + Default expansion value is 2. By default, filter is auto-scaling. + For more information see `BF.RESERVE `_. + """ # noqa + params = [key, errorRate, capacity] + self.append_expansion(params, expansion) + self.append_no_scale(params, noScale) + return self.execute_command(BF_RESERVE, *params) + + reserve = create + + def add(self, key, item): + """ + Add to a Bloom Filter `key` an `item`. + For more information see `BF.ADD `_. + """ # noqa + return self.execute_command(BF_ADD, key, item) + + def madd(self, key, *items): + """ + Add to a Bloom Filter `key` multiple `items`. + For more information see `BF.MADD `_. + """ # noqa + return self.execute_command(BF_MADD, key, *items) + + def insert( + self, + key, + items, + capacity=None, + error=None, + noCreate=None, + expansion=None, + noScale=None, + ): + """ + Add to a Bloom Filter `key` multiple `items`. + + If `nocreate` remain `None` and `key` does not exist, a new Bloom Filter + `key` will be created with desired probability of false positives `errorRate` + and expected entries to be inserted as `size`. + For more information see `BF.INSERT `_. + """ # noqa + params = [key] + self.append_capacity(params, capacity) + self.append_error(params, error) + self.append_expansion(params, expansion) + self.append_no_create(params, noCreate) + self.append_no_scale(params, noScale) + self.append_items(params, items) + + return self.execute_command(BF_INSERT, *params) + + def exists(self, key, item): + """ + Check whether an `item` exists in Bloom Filter `key`. + For more information see `BF.EXISTS `_. + """ # noqa + return self.execute_command(BF_EXISTS, key, item) + + def mexists(self, key, *items): + """ + Check whether `items` exist in Bloom Filter `key`. + For more information see `BF.MEXISTS `_. + """ # noqa + return self.execute_command(BF_MEXISTS, key, *items) + + def scandump(self, key, iter): + """ + Begin an incremental save of the bloom filter `key`. + + This is useful for large bloom filters which cannot fit into the normal SAVE and RESTORE model. + The first time this command is called, the value of `iter` should be 0. + This command will return successive (iter, data) pairs until (0, NULL) to indicate completion. + For more information see `BF.SCANDUMP `_. + """ # noqa + if HIREDIS_AVAILABLE: + raise ModuleError("This command cannot be used when hiredis is available.") + + params = [key, iter] + options = {} + options[NEVER_DECODE] = [] + return self.execute_command(BF_SCANDUMP, *params, **options) + + def loadchunk(self, key, iter, data): + """ + Restore a filter previously saved using SCANDUMP. + + See the SCANDUMP command for example usage. + This command will overwrite any bloom filter stored under key. + Ensure that the bloom filter will not be modified between invocations. + For more information see `BF.LOADCHUNK `_. + """ # noqa + return self.execute_command(BF_LOADCHUNK, key, iter, data) + + def info(self, key): + """ + Return capacity, size, number of filters, number of items inserted, and expansion rate. + For more information see `BF.INFO `_. + """ # noqa + return self.execute_command(BF_INFO, key) + + def card(self, key): + """ + Returns the cardinality of a Bloom filter - number of items that were added to a Bloom filter and detected as unique + (items that caused at least one bit to be set in at least one sub-filter). + For more information see `BF.CARD `_. + """ # noqa + return self.execute_command(BF_CARD, key) + + +class CFCommands: + """Cuckoo Filter commands.""" + + def create( + self, key, capacity, expansion=None, bucket_size=None, max_iterations=None + ): + """ + Create a new Cuckoo Filter `key` an initial `capacity` items. + For more information see `CF.RESERVE `_. + """ # noqa + params = [key, capacity] + self.append_expansion(params, expansion) + self.append_bucket_size(params, bucket_size) + self.append_max_iterations(params, max_iterations) + return self.execute_command(CF_RESERVE, *params) + + reserve = create + + def add(self, key, item): + """ + Add an `item` to a Cuckoo Filter `key`. + For more information see `CF.ADD `_. + """ # noqa + return self.execute_command(CF_ADD, key, item) + + def addnx(self, key, item): + """ + Add an `item` to a Cuckoo Filter `key` only if item does not yet exist. + Command might be slower that `add`. + For more information see `CF.ADDNX `_. + """ # noqa + return self.execute_command(CF_ADDNX, key, item) + + def insert(self, key, items, capacity=None, nocreate=None): + """ + Add multiple `items` to a Cuckoo Filter `key`, allowing the filter + to be created with a custom `capacity` if it does not yet exist. + `items` must be provided as a list. + For more information see `CF.INSERT `_. + """ # noqa + params = [key] + self.append_capacity(params, capacity) + self.append_no_create(params, nocreate) + self.append_items(params, items) + return self.execute_command(CF_INSERT, *params) + + def insertnx(self, key, items, capacity=None, nocreate=None): + """ + Add multiple `items` to a Cuckoo Filter `key` only if they do not exist yet, + allowing the filter to be created with a custom `capacity` if it does not yet exist. + `items` must be provided as a list. + For more information see `CF.INSERTNX `_. + """ # noqa + params = [key] + self.append_capacity(params, capacity) + self.append_no_create(params, nocreate) + self.append_items(params, items) + return self.execute_command(CF_INSERTNX, *params) + + def exists(self, key, item): + """ + Check whether an `item` exists in Cuckoo Filter `key`. + For more information see `CF.EXISTS `_. + """ # noqa + return self.execute_command(CF_EXISTS, key, item) + + def mexists(self, key, *items): + """ + Check whether an `items` exist in Cuckoo Filter `key`. + For more information see `CF.MEXISTS `_. + """ # noqa + return self.execute_command(CF_MEXISTS, key, *items) + + def delete(self, key, item): + """ + Delete `item` from `key`. + For more information see `CF.DEL `_. + """ # noqa + return self.execute_command(CF_DEL, key, item) + + def count(self, key, item): + """ + Return the number of times an `item` may be in the `key`. + For more information see `CF.COUNT `_. + """ # noqa + return self.execute_command(CF_COUNT, key, item) + + def scandump(self, key, iter): + """ + Begin an incremental save of the Cuckoo filter `key`. + + This is useful for large Cuckoo filters which cannot fit into the normal + SAVE and RESTORE model. + The first time this command is called, the value of `iter` should be 0. + This command will return successive (iter, data) pairs until + (0, NULL) to indicate completion. + For more information see `CF.SCANDUMP `_. + """ # noqa + return self.execute_command(CF_SCANDUMP, key, iter) + + def loadchunk(self, key, iter, data): + """ + Restore a filter previously saved using SCANDUMP. See the SCANDUMP command for example usage. + + This command will overwrite any Cuckoo filter stored under key. + Ensure that the Cuckoo filter will not be modified between invocations. + For more information see `CF.LOADCHUNK `_. + """ # noqa + return self.execute_command(CF_LOADCHUNK, key, iter, data) + + def info(self, key): + """ + Return size, number of buckets, number of filter, number of items inserted, + number of items deleted, bucket size, expansion rate, and max iteration. + For more information see `CF.INFO `_. + """ # noqa + return self.execute_command(CF_INFO, key) + + +class TOPKCommands: + """TOP-k Filter commands.""" + + def reserve(self, key, k, width, depth, decay): + """ + Create a new Top-K Filter `key` with desired probability of false + positives `errorRate` expected entries to be inserted as `size`. + For more information see `TOPK.RESERVE `_. + """ # noqa + return self.execute_command(TOPK_RESERVE, key, k, width, depth, decay) + + def add(self, key, *items): + """ + Add one `item` or more to a Top-K Filter `key`. + For more information see `TOPK.ADD `_. + """ # noqa + return self.execute_command(TOPK_ADD, key, *items) + + def incrby(self, key, items, increments): + """ + Add/increase `items` to a Top-K Sketch `key` by ''increments''. + Both `items` and `increments` are lists. + For more information see `TOPK.INCRBY `_. + + Example: + + >>> topkincrby('A', ['foo'], [1]) + """ # noqa + params = [key] + self.append_items_and_increments(params, items, increments) + return self.execute_command(TOPK_INCRBY, *params) + + def query(self, key, *items): + """ + Check whether one `item` or more is a Top-K item at `key`. + For more information see `TOPK.QUERY `_. + """ # noqa + return self.execute_command(TOPK_QUERY, key, *items) + + @deprecated_function(version="4.4.0", reason="deprecated since redisbloom 2.4.0") + def count(self, key, *items): + """ + Return count for one `item` or more from `key`. + For more information see `TOPK.COUNT `_. + """ # noqa + return self.execute_command(TOPK_COUNT, key, *items) + + def list(self, key, withcount=False): + """ + Return full list of items in Top-K list of `key`. + If `withcount` set to True, return full list of items + with probabilistic count in Top-K list of `key`. + For more information see `TOPK.LIST `_. + """ # noqa + params = [key] + if withcount: + params.append("WITHCOUNT") + return self.execute_command(TOPK_LIST, *params) + + def info(self, key): + """ + Return k, width, depth and decay values of `key`. + For more information see `TOPK.INFO `_. + """ # noqa + return self.execute_command(TOPK_INFO, key) + + +class TDigestCommands: + def create(self, key, compression=100): + """ + Allocate the memory and initialize the t-digest. + For more information see `TDIGEST.CREATE `_. + """ # noqa + return self.execute_command(TDIGEST_CREATE, key, "COMPRESSION", compression) + + def reset(self, key): + """ + Reset the sketch `key` to zero - empty out the sketch and re-initialize it. + For more information see `TDIGEST.RESET `_. + """ # noqa + return self.execute_command(TDIGEST_RESET, key) + + def add(self, key, values): + """ + Adds one or more observations to a t-digest sketch `key`. + + For more information see `TDIGEST.ADD `_. + """ # noqa + return self.execute_command(TDIGEST_ADD, key, *values) + + def merge(self, destination_key, num_keys, *keys, compression=None, override=False): + """ + Merges all of the values from `keys` to 'destination-key' sketch. + It is mandatory to provide the `num_keys` before passing the input keys and + the other (optional) arguments. + If `destination_key` already exists its values are merged with the input keys. + If you wish to override the destination key contents use the `OVERRIDE` parameter. + + For more information see `TDIGEST.MERGE `_. + """ # noqa + params = [destination_key, num_keys, *keys] + if compression is not None: + params.extend(["COMPRESSION", compression]) + if override: + params.append("OVERRIDE") + return self.execute_command(TDIGEST_MERGE, *params) + + def min(self, key): + """ + Return minimum value from the sketch `key`. Will return DBL_MAX if the sketch is empty. + For more information see `TDIGEST.MIN `_. + """ # noqa + return self.execute_command(TDIGEST_MIN, key) + + def max(self, key): + """ + Return maximum value from the sketch `key`. Will return DBL_MIN if the sketch is empty. + For more information see `TDIGEST.MAX `_. + """ # noqa + return self.execute_command(TDIGEST_MAX, key) + + def quantile(self, key, quantile, *quantiles): + """ + Returns estimates of one or more cutoffs such that a specified fraction of the + observations added to this t-digest would be less than or equal to each of the + specified cutoffs. (Multiple quantiles can be returned with one call) + For more information see `TDIGEST.QUANTILE `_. + """ # noqa + return self.execute_command(TDIGEST_QUANTILE, key, quantile, *quantiles) + + def cdf(self, key, value, *values): + """ + Return double fraction of all points added which are <= value. + For more information see `TDIGEST.CDF `_. + """ # noqa + return self.execute_command(TDIGEST_CDF, key, value, *values) + + def info(self, key): + """ + Return Compression, Capacity, Merged Nodes, Unmerged Nodes, Merged Weight, Unmerged Weight + and Total Compressions. + For more information see `TDIGEST.INFO `_. + """ # noqa + return self.execute_command(TDIGEST_INFO, key) + + def trimmed_mean(self, key, low_cut_quantile, high_cut_quantile): + """ + Return mean value from the sketch, excluding observation values outside + the low and high cutoff quantiles. + For more information see `TDIGEST.TRIMMED_MEAN `_. + """ # noqa + return self.execute_command( + TDIGEST_TRIMMED_MEAN, key, low_cut_quantile, high_cut_quantile + ) + + def rank(self, key, value, *values): + """ + Retrieve the estimated rank of value (the number of observations in the sketch + that are smaller than value + half the number of observations that are equal to value). + + For more information see `TDIGEST.RANK `_. + """ # noqa + return self.execute_command(TDIGEST_RANK, key, value, *values) + + def revrank(self, key, value, *values): + """ + Retrieve the estimated rank of value (the number of observations in the sketch + that are larger than value + half the number of observations that are equal to value). + + For more information see `TDIGEST.REVRANK `_. + """ # noqa + return self.execute_command(TDIGEST_REVRANK, key, value, *values) + + def byrank(self, key, rank, *ranks): + """ + Retrieve an estimation of the value with the given rank. + + For more information see `TDIGEST.BY_RANK `_. + """ # noqa + return self.execute_command(TDIGEST_BYRANK, key, rank, *ranks) + + def byrevrank(self, key, rank, *ranks): + """ + Retrieve an estimation of the value with the given reverse rank. + + For more information see `TDIGEST.BY_REVRANK `_. + """ # noqa + return self.execute_command(TDIGEST_BYREVRANK, key, rank, *ranks) + + +class CMSCommands: + """Count-Min Sketch Commands""" + + def initbydim(self, key, width, depth): + """ + Initialize a Count-Min Sketch `key` to dimensions (`width`, `depth`) specified by user. + For more information see `CMS.INITBYDIM `_. + """ # noqa + return self.execute_command(CMS_INITBYDIM, key, width, depth) + + def initbyprob(self, key, error, probability): + """ + Initialize a Count-Min Sketch `key` to characteristics (`error`, `probability`) specified by user. + For more information see `CMS.INITBYPROB `_. + """ # noqa + return self.execute_command(CMS_INITBYPROB, key, error, probability) + + def incrby(self, key, items, increments): + """ + Add/increase `items` to a Count-Min Sketch `key` by ''increments''. + Both `items` and `increments` are lists. + For more information see `CMS.INCRBY `_. + + Example: + + >>> cmsincrby('A', ['foo'], [1]) + """ # noqa + params = [key] + self.append_items_and_increments(params, items, increments) + return self.execute_command(CMS_INCRBY, *params) + + def query(self, key, *items): + """ + Return count for an `item` from `key`. Multiple items can be queried with one call. + For more information see `CMS.QUERY `_. + """ # noqa + return self.execute_command(CMS_QUERY, key, *items) + + def merge(self, destKey, numKeys, srcKeys, weights=[]): + """ + Merge `numKeys` of sketches into `destKey`. Sketches specified in `srcKeys`. + All sketches must have identical width and depth. + `Weights` can be used to multiply certain sketches. Default weight is 1. + Both `srcKeys` and `weights` are lists. + For more information see `CMS.MERGE `_. + """ # noqa + params = [destKey, numKeys] + params += srcKeys + self.append_weights(params, weights) + return self.execute_command(CMS_MERGE, *params) + + def info(self, key): + """ + Return width, depth and total count of the sketch. + For more information see `CMS.INFO `_. + """ # noqa + return self.execute_command(CMS_INFO, key) diff --git a/templates/skills/spotify/dependencies/redis/commands/bf/info.py b/templates/skills/spotify/dependencies/redis/commands/bf/info.py new file mode 100644 index 00000000..e1f02086 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/bf/info.py @@ -0,0 +1,120 @@ +from ..helpers import nativestr + + +class BFInfo(object): + capacity = None + size = None + filterNum = None + insertedNum = None + expansionRate = None + + def __init__(self, args): + response = dict(zip(map(nativestr, args[::2]), args[1::2])) + self.capacity = response["Capacity"] + self.size = response["Size"] + self.filterNum = response["Number of filters"] + self.insertedNum = response["Number of items inserted"] + self.expansionRate = response["Expansion rate"] + + def get(self, item): + try: + return self.__getitem__(item) + except AttributeError: + return None + + def __getitem__(self, item): + return getattr(self, item) + + +class CFInfo(object): + size = None + bucketNum = None + filterNum = None + insertedNum = None + deletedNum = None + bucketSize = None + expansionRate = None + maxIteration = None + + def __init__(self, args): + response = dict(zip(map(nativestr, args[::2]), args[1::2])) + self.size = response["Size"] + self.bucketNum = response["Number of buckets"] + self.filterNum = response["Number of filters"] + self.insertedNum = response["Number of items inserted"] + self.deletedNum = response["Number of items deleted"] + self.bucketSize = response["Bucket size"] + self.expansionRate = response["Expansion rate"] + self.maxIteration = response["Max iterations"] + + def get(self, item): + try: + return self.__getitem__(item) + except AttributeError: + return None + + def __getitem__(self, item): + return getattr(self, item) + + +class CMSInfo(object): + width = None + depth = None + count = None + + def __init__(self, args): + response = dict(zip(map(nativestr, args[::2]), args[1::2])) + self.width = response["width"] + self.depth = response["depth"] + self.count = response["count"] + + def __getitem__(self, item): + return getattr(self, item) + + +class TopKInfo(object): + k = None + width = None + depth = None + decay = None + + def __init__(self, args): + response = dict(zip(map(nativestr, args[::2]), args[1::2])) + self.k = response["k"] + self.width = response["width"] + self.depth = response["depth"] + self.decay = response["decay"] + + def __getitem__(self, item): + return getattr(self, item) + + +class TDigestInfo(object): + compression = None + capacity = None + merged_nodes = None + unmerged_nodes = None + merged_weight = None + unmerged_weight = None + total_compressions = None + memory_usage = None + + def __init__(self, args): + response = dict(zip(map(nativestr, args[::2]), args[1::2])) + self.compression = response["Compression"] + self.capacity = response["Capacity"] + self.merged_nodes = response["Merged nodes"] + self.unmerged_nodes = response["Unmerged nodes"] + self.merged_weight = response["Merged weight"] + self.unmerged_weight = response["Unmerged weight"] + self.total_compressions = response["Total compressions"] + self.memory_usage = response["Memory usage"] + + def get(self, item): + try: + return self.__getitem__(item) + except AttributeError: + return None + + def __getitem__(self, item): + return getattr(self, item) diff --git a/templates/skills/spotify/dependencies/redis/commands/cluster.py b/templates/skills/spotify/dependencies/redis/commands/cluster.py new file mode 100644 index 00000000..67be5e3a --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/cluster.py @@ -0,0 +1,929 @@ +import asyncio +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Dict, + Iterable, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Union, +) + +from redis.compat import Literal +from redis.crc import key_slot +from redis.exceptions import RedisClusterException, RedisError +from redis.typing import ( + AnyKeyT, + ClusterCommandsProtocol, + EncodableT, + KeysT, + KeyT, + PatternT, +) + +from .core import ( + ACLCommands, + AsyncACLCommands, + AsyncDataAccessCommands, + AsyncFunctionCommands, + AsyncGearsCommands, + AsyncManagementCommands, + AsyncModuleCommands, + AsyncScriptCommands, + DataAccessCommands, + FunctionCommands, + GearsCommands, + ManagementCommands, + ModuleCommands, + PubSubCommands, + ResponseT, + ScriptCommands, +) +from .helpers import list_or_args +from .redismodules import AsyncRedisModuleCommands, RedisModuleCommands + +if TYPE_CHECKING: + from redis.asyncio.cluster import TargetNodesT + +# Not complete, but covers the major ones +# https://redis.io/commands +READ_COMMANDS = frozenset( + [ + "BITCOUNT", + "BITPOS", + "EVAL_RO", + "EVALSHA_RO", + "EXISTS", + "GEODIST", + "GEOHASH", + "GEOPOS", + "GEORADIUS", + "GEORADIUSBYMEMBER", + "GET", + "GETBIT", + "GETRANGE", + "HEXISTS", + "HGET", + "HGETALL", + "HKEYS", + "HLEN", + "HMGET", + "HSTRLEN", + "HVALS", + "KEYS", + "LINDEX", + "LLEN", + "LRANGE", + "MGET", + "PTTL", + "RANDOMKEY", + "SCARD", + "SDIFF", + "SINTER", + "SISMEMBER", + "SMEMBERS", + "SRANDMEMBER", + "STRLEN", + "SUNION", + "TTL", + "ZCARD", + "ZCOUNT", + "ZRANGE", + "ZSCORE", + ] +) + + +class ClusterMultiKeyCommands(ClusterCommandsProtocol): + """ + A class containing commands that handle more than one key + """ + + def _partition_keys_by_slot(self, keys: Iterable[KeyT]) -> Dict[int, List[KeyT]]: + """Split keys into a dictionary that maps a slot to a list of keys.""" + + slots_to_keys = {} + for key in keys: + slot = key_slot(self.encoder.encode(key)) + slots_to_keys.setdefault(slot, []).append(key) + + return slots_to_keys + + def _partition_pairs_by_slot( + self, mapping: Mapping[AnyKeyT, EncodableT] + ) -> Dict[int, List[EncodableT]]: + """Split pairs into a dictionary that maps a slot to a list of pairs.""" + + slots_to_pairs = {} + for pair in mapping.items(): + slot = key_slot(self.encoder.encode(pair[0])) + slots_to_pairs.setdefault(slot, []).extend(pair) + + return slots_to_pairs + + def _execute_pipeline_by_slot( + self, command: str, slots_to_args: Mapping[int, Iterable[EncodableT]] + ) -> List[Any]: + read_from_replicas = self.read_from_replicas and command in READ_COMMANDS + pipe = self.pipeline() + [ + pipe.execute_command( + command, + *slot_args, + target_nodes=[ + self.nodes_manager.get_node_from_slot(slot, read_from_replicas) + ], + ) + for slot, slot_args in slots_to_args.items() + ] + return pipe.execute() + + def _reorder_keys_by_command( + self, + keys: Iterable[KeyT], + slots_to_args: Mapping[int, Iterable[EncodableT]], + responses: Iterable[Any], + ) -> List[Any]: + results = { + k: v + for slot_values, response in zip(slots_to_args.values(), responses) + for k, v in zip(slot_values, response) + } + return [results[key] for key in keys] + + def mget_nonatomic(self, keys: KeysT, *args: KeyT) -> List[Optional[Any]]: + """ + Splits the keys into different slots and then calls MGET + for the keys of every slot. This operation will not be atomic + if keys belong to more than one slot. + + Returns a list of values ordered identically to ``keys`` + + For more information see https://redis.io/commands/mget + """ + + # Concatenate all keys into a list + keys = list_or_args(keys, args) + + # Split keys into slots + slots_to_keys = self._partition_keys_by_slot(keys) + + # Execute commands using a pipeline + res = self._execute_pipeline_by_slot("MGET", slots_to_keys) + + # Reorder keys in the order the user provided & return + return self._reorder_keys_by_command(keys, slots_to_keys, res) + + def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> List[bool]: + """ + Sets key/values based on a mapping. Mapping is a dictionary of + key/value pairs. Both keys and values should be strings or types that + can be cast to a string via str(). + + Splits the keys into different slots and then calls MSET + for the keys of every slot. This operation will not be atomic + if keys belong to more than one slot. + + For more information see https://redis.io/commands/mset + """ + + # Partition the keys by slot + slots_to_pairs = self._partition_pairs_by_slot(mapping) + + # Execute commands using a pipeline & return list of replies + return self._execute_pipeline_by_slot("MSET", slots_to_pairs) + + def _split_command_across_slots(self, command: str, *keys: KeyT) -> int: + """ + Runs the given command once for the keys + of each slot. Returns the sum of the return values. + """ + + # Partition the keys by slot + slots_to_keys = self._partition_keys_by_slot(keys) + + # Sum up the reply from each command + return sum(self._execute_pipeline_by_slot(command, slots_to_keys)) + + def exists(self, *keys: KeyT) -> ResponseT: + """ + Returns the number of ``names`` that exist in the + whole cluster. The keys are first split up into slots + and then an EXISTS command is sent for every slot + + For more information see https://redis.io/commands/exists + """ + return self._split_command_across_slots("EXISTS", *keys) + + def delete(self, *keys: KeyT) -> ResponseT: + """ + Deletes the given keys in the cluster. + The keys are first split up into slots + and then an DEL command is sent for every slot + + Non-existent keys are ignored. + Returns the number of keys that were deleted. + + For more information see https://redis.io/commands/del + """ + return self._split_command_across_slots("DEL", *keys) + + def touch(self, *keys: KeyT) -> ResponseT: + """ + Updates the last access time of given keys across the + cluster. + + The keys are first split up into slots + and then an TOUCH command is sent for every slot + + Non-existent keys are ignored. + Returns the number of keys that were touched. + + For more information see https://redis.io/commands/touch + """ + return self._split_command_across_slots("TOUCH", *keys) + + def unlink(self, *keys: KeyT) -> ResponseT: + """ + Remove the specified keys in a different thread. + + The keys are first split up into slots + and then an TOUCH command is sent for every slot + + Non-existent keys are ignored. + Returns the number of keys that were unlinked. + + For more information see https://redis.io/commands/unlink + """ + return self._split_command_across_slots("UNLINK", *keys) + + +class AsyncClusterMultiKeyCommands(ClusterMultiKeyCommands): + """ + A class containing commands that handle more than one key + """ + + async def mget_nonatomic(self, keys: KeysT, *args: KeyT) -> List[Optional[Any]]: + """ + Splits the keys into different slots and then calls MGET + for the keys of every slot. This operation will not be atomic + if keys belong to more than one slot. + + Returns a list of values ordered identically to ``keys`` + + For more information see https://redis.io/commands/mget + """ + + # Concatenate all keys into a list + keys = list_or_args(keys, args) + + # Split keys into slots + slots_to_keys = self._partition_keys_by_slot(keys) + + # Execute commands using a pipeline + res = await self._execute_pipeline_by_slot("MGET", slots_to_keys) + + # Reorder keys in the order the user provided & return + return self._reorder_keys_by_command(keys, slots_to_keys, res) + + async def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> List[bool]: + """ + Sets key/values based on a mapping. Mapping is a dictionary of + key/value pairs. Both keys and values should be strings or types that + can be cast to a string via str(). + + Splits the keys into different slots and then calls MSET + for the keys of every slot. This operation will not be atomic + if keys belong to more than one slot. + + For more information see https://redis.io/commands/mset + """ + + # Partition the keys by slot + slots_to_pairs = self._partition_pairs_by_slot(mapping) + + # Execute commands using a pipeline & return list of replies + return await self._execute_pipeline_by_slot("MSET", slots_to_pairs) + + async def _split_command_across_slots(self, command: str, *keys: KeyT) -> int: + """ + Runs the given command once for the keys + of each slot. Returns the sum of the return values. + """ + + # Partition the keys by slot + slots_to_keys = self._partition_keys_by_slot(keys) + + # Sum up the reply from each command + return sum(await self._execute_pipeline_by_slot(command, slots_to_keys)) + + async def _execute_pipeline_by_slot( + self, command: str, slots_to_args: Mapping[int, Iterable[EncodableT]] + ) -> List[Any]: + if self._initialize: + await self.initialize() + read_from_replicas = self.read_from_replicas and command in READ_COMMANDS + pipe = self.pipeline() + [ + pipe.execute_command( + command, + *slot_args, + target_nodes=[ + self.nodes_manager.get_node_from_slot(slot, read_from_replicas) + ], + ) + for slot, slot_args in slots_to_args.items() + ] + return await pipe.execute() + + +class ClusterManagementCommands(ManagementCommands): + """ + A class for Redis Cluster management commands + + The class inherits from Redis's core ManagementCommands class and do the + required adjustments to work with cluster mode + """ + + def slaveof(self, *args, **kwargs) -> NoReturn: + """ + Make the server a replica of another instance, or promote it as master. + + For more information see https://redis.io/commands/slaveof + """ + raise RedisClusterException("SLAVEOF is not supported in cluster mode") + + def replicaof(self, *args, **kwargs) -> NoReturn: + """ + Make the server a replica of another instance, or promote it as master. + + For more information see https://redis.io/commands/replicaof + """ + raise RedisClusterException("REPLICAOF is not supported in cluster mode") + + def swapdb(self, *args, **kwargs) -> NoReturn: + """ + Swaps two Redis databases. + + For more information see https://redis.io/commands/swapdb + """ + raise RedisClusterException("SWAPDB is not supported in cluster mode") + + def cluster_myid(self, target_node: "TargetNodesT") -> ResponseT: + """ + Returns the node's id. + + :target_node: 'ClusterNode' + The node to execute the command on + + For more information check https://redis.io/commands/cluster-myid/ + """ + return self.execute_command("CLUSTER MYID", target_nodes=target_node) + + def cluster_addslots( + self, target_node: "TargetNodesT", *slots: EncodableT + ) -> ResponseT: + """ + Assign new hash slots to receiving node. Sends to specified node. + + :target_node: 'ClusterNode' + The node to execute the command on + + For more information see https://redis.io/commands/cluster-addslots + """ + return self.execute_command( + "CLUSTER ADDSLOTS", *slots, target_nodes=target_node + ) + + def cluster_addslotsrange( + self, target_node: "TargetNodesT", *slots: EncodableT + ) -> ResponseT: + """ + Similar to the CLUSTER ADDSLOTS command. + The difference between the two commands is that ADDSLOTS takes a list of slots + to assign to the node, while ADDSLOTSRANGE takes a list of slot ranges + (specified by start and end slots) to assign to the node. + + :target_node: 'ClusterNode' + The node to execute the command on + + For more information see https://redis.io/commands/cluster-addslotsrange + """ + return self.execute_command( + "CLUSTER ADDSLOTSRANGE", *slots, target_nodes=target_node + ) + + def cluster_countkeysinslot(self, slot_id: int) -> ResponseT: + """ + Return the number of local keys in the specified hash slot + Send to node based on specified slot_id + + For more information see https://redis.io/commands/cluster-countkeysinslot + """ + return self.execute_command("CLUSTER COUNTKEYSINSLOT", slot_id) + + def cluster_count_failure_report(self, node_id: str) -> ResponseT: + """ + Return the number of failure reports active for a given node + Sends to a random node + + For more information see https://redis.io/commands/cluster-count-failure-reports + """ + return self.execute_command("CLUSTER COUNT-FAILURE-REPORTS", node_id) + + def cluster_delslots(self, *slots: EncodableT) -> List[bool]: + """ + Set hash slots as unbound in the cluster. + It determines by it self what node the slot is in and sends it there + + Returns a list of the results for each processed slot. + + For more information see https://redis.io/commands/cluster-delslots + """ + return [self.execute_command("CLUSTER DELSLOTS", slot) for slot in slots] + + def cluster_delslotsrange(self, *slots: EncodableT) -> ResponseT: + """ + Similar to the CLUSTER DELSLOTS command. + The difference is that CLUSTER DELSLOTS takes a list of hash slots to remove + from the node, while CLUSTER DELSLOTSRANGE takes a list of slot ranges to remove + from the node. + + For more information see https://redis.io/commands/cluster-delslotsrange + """ + return self.execute_command("CLUSTER DELSLOTSRANGE", *slots) + + def cluster_failover( + self, target_node: "TargetNodesT", option: Optional[str] = None + ) -> ResponseT: + """ + Forces a slave to perform a manual failover of its master + Sends to specified node + + :target_node: 'ClusterNode' + The node to execute the command on + + For more information see https://redis.io/commands/cluster-failover + """ + if option: + if option.upper() not in ["FORCE", "TAKEOVER"]: + raise RedisError( + f"Invalid option for CLUSTER FAILOVER command: {option}" + ) + else: + return self.execute_command( + "CLUSTER FAILOVER", option, target_nodes=target_node + ) + else: + return self.execute_command("CLUSTER FAILOVER", target_nodes=target_node) + + def cluster_info(self, target_nodes: Optional["TargetNodesT"] = None) -> ResponseT: + """ + Provides info about Redis Cluster node state. + The command will be sent to a random node in the cluster if no target + node is specified. + + For more information see https://redis.io/commands/cluster-info + """ + return self.execute_command("CLUSTER INFO", target_nodes=target_nodes) + + def cluster_keyslot(self, key: str) -> ResponseT: + """ + Returns the hash slot of the specified key + Sends to random node in the cluster + + For more information see https://redis.io/commands/cluster-keyslot + """ + return self.execute_command("CLUSTER KEYSLOT", key) + + def cluster_meet( + self, host: str, port: int, target_nodes: Optional["TargetNodesT"] = None + ) -> ResponseT: + """ + Force a node cluster to handshake with another node. + Sends to specified node. + + For more information see https://redis.io/commands/cluster-meet + """ + return self.execute_command( + "CLUSTER MEET", host, port, target_nodes=target_nodes + ) + + def cluster_nodes(self) -> ResponseT: + """ + Get Cluster config for the node. + Sends to random node in the cluster + + For more information see https://redis.io/commands/cluster-nodes + """ + return self.execute_command("CLUSTER NODES") + + def cluster_replicate( + self, target_nodes: "TargetNodesT", node_id: str + ) -> ResponseT: + """ + Reconfigure a node as a slave of the specified master node + + For more information see https://redis.io/commands/cluster-replicate + """ + return self.execute_command( + "CLUSTER REPLICATE", node_id, target_nodes=target_nodes + ) + + def cluster_reset( + self, soft: bool = True, target_nodes: Optional["TargetNodesT"] = None + ) -> ResponseT: + """ + Reset a Redis Cluster node + + If 'soft' is True then it will send 'SOFT' argument + If 'soft' is False then it will send 'HARD' argument + + For more information see https://redis.io/commands/cluster-reset + """ + return self.execute_command( + "CLUSTER RESET", b"SOFT" if soft else b"HARD", target_nodes=target_nodes + ) + + def cluster_save_config( + self, target_nodes: Optional["TargetNodesT"] = None + ) -> ResponseT: + """ + Forces the node to save cluster state on disk + + For more information see https://redis.io/commands/cluster-saveconfig + """ + return self.execute_command("CLUSTER SAVECONFIG", target_nodes=target_nodes) + + def cluster_get_keys_in_slot(self, slot: int, num_keys: int) -> ResponseT: + """ + Returns the number of keys in the specified cluster slot + + For more information see https://redis.io/commands/cluster-getkeysinslot + """ + return self.execute_command("CLUSTER GETKEYSINSLOT", slot, num_keys) + + def cluster_set_config_epoch( + self, epoch: int, target_nodes: Optional["TargetNodesT"] = None + ) -> ResponseT: + """ + Set the configuration epoch in a new node + + For more information see https://redis.io/commands/cluster-set-config-epoch + """ + return self.execute_command( + "CLUSTER SET-CONFIG-EPOCH", epoch, target_nodes=target_nodes + ) + + def cluster_setslot( + self, target_node: "TargetNodesT", node_id: str, slot_id: int, state: str + ) -> ResponseT: + """ + Bind an hash slot to a specific node + + :target_node: 'ClusterNode' + The node to execute the command on + + For more information see https://redis.io/commands/cluster-setslot + """ + if state.upper() in ("IMPORTING", "NODE", "MIGRATING"): + return self.execute_command( + "CLUSTER SETSLOT", slot_id, state, node_id, target_nodes=target_node + ) + elif state.upper() == "STABLE": + raise RedisError('For "stable" state please use ' "cluster_setslot_stable") + else: + raise RedisError(f"Invalid slot state: {state}") + + def cluster_setslot_stable(self, slot_id: int) -> ResponseT: + """ + Clears migrating / importing state from the slot. + It determines by it self what node the slot is in and sends it there. + + For more information see https://redis.io/commands/cluster-setslot + """ + return self.execute_command("CLUSTER SETSLOT", slot_id, "STABLE") + + def cluster_replicas( + self, node_id: str, target_nodes: Optional["TargetNodesT"] = None + ) -> ResponseT: + """ + Provides a list of replica nodes replicating from the specified primary + target node. + + For more information see https://redis.io/commands/cluster-replicas + """ + return self.execute_command( + "CLUSTER REPLICAS", node_id, target_nodes=target_nodes + ) + + def cluster_slots(self, target_nodes: Optional["TargetNodesT"] = None) -> ResponseT: + """ + Get array of Cluster slot to node mappings + + For more information see https://redis.io/commands/cluster-slots + """ + return self.execute_command("CLUSTER SLOTS", target_nodes=target_nodes) + + def cluster_shards(self, target_nodes=None): + """ + Returns details about the shards of the cluster. + + For more information see https://redis.io/commands/cluster-shards + """ + return self.execute_command("CLUSTER SHARDS", target_nodes=target_nodes) + + def cluster_myshardid(self, target_nodes=None): + """ + Returns the shard ID of the node. + + For more information see https://redis.io/commands/cluster-myshardid/ + """ + return self.execute_command("CLUSTER MYSHARDID", target_nodes=target_nodes) + + def cluster_links(self, target_node: "TargetNodesT") -> ResponseT: + """ + Each node in a Redis Cluster maintains a pair of long-lived TCP link with each + peer in the cluster: One for sending outbound messages towards the peer and one + for receiving inbound messages from the peer. + + This command outputs information of all such peer links as an array. + + For more information see https://redis.io/commands/cluster-links + """ + return self.execute_command("CLUSTER LINKS", target_nodes=target_node) + + def cluster_flushslots(self, target_nodes: Optional["TargetNodesT"] = None) -> None: + raise NotImplementedError( + "CLUSTER FLUSHSLOTS is intentionally not implemented in the client." + ) + + def cluster_bumpepoch(self, target_nodes: Optional["TargetNodesT"] = None) -> None: + raise NotImplementedError( + "CLUSTER BUMPEPOCH is intentionally not implemented in the client." + ) + + def readonly(self, target_nodes: Optional["TargetNodesT"] = None) -> ResponseT: + """ + Enables read queries. + The command will be sent to the default cluster node if target_nodes is + not specified. + + For more information see https://redis.io/commands/readonly + """ + if target_nodes == "replicas" or target_nodes == "all": + # read_from_replicas will only be enabled if the READONLY command + # is sent to all replicas + self.read_from_replicas = True + return self.execute_command("READONLY", target_nodes=target_nodes) + + def readwrite(self, target_nodes: Optional["TargetNodesT"] = None) -> ResponseT: + """ + Disables read queries. + The command will be sent to the default cluster node if target_nodes is + not specified. + + For more information see https://redis.io/commands/readwrite + """ + # Reset read from replicas flag + self.read_from_replicas = False + return self.execute_command("READWRITE", target_nodes=target_nodes) + + def gears_refresh_cluster(self, **kwargs) -> ResponseT: + """ + On an OSS cluster, before executing any gears function, you must call this command. # noqa + """ + return self.execute_command("REDISGEARS_2.REFRESHCLUSTER", **kwargs) + + +class AsyncClusterManagementCommands( + ClusterManagementCommands, AsyncManagementCommands +): + """ + A class for Redis Cluster management commands + + The class inherits from Redis's core ManagementCommands class and do the + required adjustments to work with cluster mode + """ + + async def cluster_delslots(self, *slots: EncodableT) -> List[bool]: + """ + Set hash slots as unbound in the cluster. + It determines by it self what node the slot is in and sends it there + + Returns a list of the results for each processed slot. + + For more information see https://redis.io/commands/cluster-delslots + """ + return await asyncio.gather( + *( + asyncio.create_task(self.execute_command("CLUSTER DELSLOTS", slot)) + for slot in slots + ) + ) + + +class ClusterDataAccessCommands(DataAccessCommands): + """ + A class for Redis Cluster Data Access Commands + + The class inherits from Redis's core DataAccessCommand class and do the + required adjustments to work with cluster mode + """ + + def stralgo( + self, + algo: Literal["LCS"], + value1: KeyT, + value2: KeyT, + specific_argument: Union[Literal["strings"], Literal["keys"]] = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Optional[int] = None, + withmatchlen: bool = False, + **kwargs, + ) -> ResponseT: + """ + Implements complex algorithms that operate on strings. + Right now the only algorithm implemented is the LCS algorithm + (longest common substring). However new algorithms could be + implemented in the future. + + ``algo`` Right now must be LCS + ``value1`` and ``value2`` Can be two strings or two keys + ``specific_argument`` Specifying if the arguments to the algorithm + will be keys or strings. strings is the default. + ``len`` Returns just the len of the match. + ``idx`` Returns the match positions in each string. + ``minmatchlen`` Restrict the list of matches to the ones of a given + minimal length. Can be provided only when ``idx`` set to True. + ``withmatchlen`` Returns the matches with the len of the match. + Can be provided only when ``idx`` set to True. + + For more information see https://redis.io/commands/stralgo + """ + target_nodes = kwargs.pop("target_nodes", None) + if specific_argument == "strings" and target_nodes is None: + target_nodes = "default-node" + kwargs.update({"target_nodes": target_nodes}) + return super().stralgo( + algo, + value1, + value2, + specific_argument, + len, + idx, + minmatchlen, + withmatchlen, + **kwargs, + ) + + def scan_iter( + self, + match: Optional[PatternT] = None, + count: Optional[int] = None, + _type: Optional[str] = None, + **kwargs, + ) -> Iterator: + # Do the first query with cursor=0 for all nodes + cursors, data = self.scan(match=match, count=count, _type=_type, **kwargs) + yield from data + + cursors = {name: cursor for name, cursor in cursors.items() if cursor != 0} + if cursors: + # Get nodes by name + nodes = {name: self.get_node(node_name=name) for name in cursors.keys()} + + # Iterate over each node till its cursor is 0 + kwargs.pop("target_nodes", None) + while cursors: + for name, cursor in cursors.items(): + cur, data = self.scan( + cursor=cursor, + match=match, + count=count, + _type=_type, + target_nodes=nodes[name], + **kwargs, + ) + yield from data + cursors[name] = cur[name] + + cursors = { + name: cursor for name, cursor in cursors.items() if cursor != 0 + } + + +class AsyncClusterDataAccessCommands( + ClusterDataAccessCommands, AsyncDataAccessCommands +): + """ + A class for Redis Cluster Data Access Commands + + The class inherits from Redis's core DataAccessCommand class and do the + required adjustments to work with cluster mode + """ + + async def scan_iter( + self, + match: Optional[PatternT] = None, + count: Optional[int] = None, + _type: Optional[str] = None, + **kwargs, + ) -> AsyncIterator: + # Do the first query with cursor=0 for all nodes + cursors, data = await self.scan(match=match, count=count, _type=_type, **kwargs) + for value in data: + yield value + + cursors = {name: cursor for name, cursor in cursors.items() if cursor != 0} + if cursors: + # Get nodes by name + nodes = {name: self.get_node(node_name=name) for name in cursors.keys()} + + # Iterate over each node till its cursor is 0 + kwargs.pop("target_nodes", None) + while cursors: + for name, cursor in cursors.items(): + cur, data = await self.scan( + cursor=cursor, + match=match, + count=count, + _type=_type, + target_nodes=nodes[name], + **kwargs, + ) + for value in data: + yield value + cursors[name] = cur[name] + + cursors = { + name: cursor for name, cursor in cursors.items() if cursor != 0 + } + + +class RedisClusterCommands( + ClusterMultiKeyCommands, + ClusterManagementCommands, + ACLCommands, + PubSubCommands, + ClusterDataAccessCommands, + ScriptCommands, + FunctionCommands, + GearsCommands, + ModuleCommands, + RedisModuleCommands, +): + """ + A class for all Redis Cluster commands + + For key-based commands, the target node(s) will be internally determined + by the keys' hash slot. + Non-key-based commands can be executed with the 'target_nodes' argument to + target specific nodes. By default, if target_nodes is not specified, the + command will be executed on the default cluster node. + + :param :target_nodes: type can be one of the followings: + - nodes flag: ALL_NODES, PRIMARIES, REPLICAS, RANDOM + - 'ClusterNode' + - 'list(ClusterNodes)' + - 'dict(any:clusterNodes)' + + for example: + r.cluster_info(target_nodes=RedisCluster.ALL_NODES) + """ + + +class AsyncRedisClusterCommands( + AsyncClusterMultiKeyCommands, + AsyncClusterManagementCommands, + AsyncACLCommands, + AsyncClusterDataAccessCommands, + AsyncScriptCommands, + AsyncFunctionCommands, + AsyncGearsCommands, + AsyncModuleCommands, + AsyncRedisModuleCommands, +): + """ + A class for all Redis Cluster commands + + For key-based commands, the target node(s) will be internally determined + by the keys' hash slot. + Non-key-based commands can be executed with the 'target_nodes' argument to + target specific nodes. By default, if target_nodes is not specified, the + command will be executed on the default cluster node. + + :param :target_nodes: type can be one of the followings: + - nodes flag: ALL_NODES, PRIMARIES, REPLICAS, RANDOM + - 'ClusterNode' + - 'list(ClusterNodes)' + - 'dict(any:clusterNodes)' + + for example: + r.cluster_info(target_nodes=RedisCluster.ALL_NODES) + """ diff --git a/templates/skills/spotify/dependencies/redis/commands/core.py b/templates/skills/spotify/dependencies/redis/commands/core.py new file mode 100644 index 00000000..5cf6b6d8 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/core.py @@ -0,0 +1,6308 @@ +# from __future__ import annotations + +import datetime +import hashlib +import warnings +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Awaitable, + Callable, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Sequence, + Set, + Tuple, + Union, +) + +from redis.compat import Literal +from redis.exceptions import ConnectionError, DataError, NoScriptError, RedisError +from redis.typing import ( + AbsExpiryT, + AnyKeyT, + BitfieldOffsetT, + ChannelT, + CommandsProtocol, + ConsumerT, + EncodableT, + ExpiryT, + FieldT, + GroupT, + KeysT, + KeyT, + PatternT, + ScriptTextT, + StreamIdT, + TimeoutSecT, + ZScoreBoundT, +) + +from .helpers import list_or_args + +if TYPE_CHECKING: + from redis.asyncio.client import Redis as AsyncRedis + from redis.client import Redis + +ResponseT = Union[Awaitable, Any] + + +class ACLCommands(CommandsProtocol): + """ + Redis Access Control List (ACL) commands. + see: https://redis.io/topics/acl + """ + + def acl_cat(self, category: Union[str, None] = None, **kwargs) -> ResponseT: + """ + Returns a list of categories or commands within a category. + + If ``category`` is not supplied, returns a list of all categories. + If ``category`` is supplied, returns a list of all commands within + that category. + + For more information see https://redis.io/commands/acl-cat + """ + pieces: list[EncodableT] = [category] if category else [] + return self.execute_command("ACL CAT", *pieces, **kwargs) + + def acl_dryrun(self, username, *args, **kwargs): + """ + Simulate the execution of a given command by a given ``username``. + + For more information see https://redis.io/commands/acl-dryrun + """ + return self.execute_command("ACL DRYRUN", username, *args, **kwargs) + + def acl_deluser(self, *username: str, **kwargs) -> ResponseT: + """ + Delete the ACL for the specified ``username``s + + For more information see https://redis.io/commands/acl-deluser + """ + return self.execute_command("ACL DELUSER", *username, **kwargs) + + def acl_genpass(self, bits: Union[int, None] = None, **kwargs) -> ResponseT: + """Generate a random password value. + If ``bits`` is supplied then use this number of bits, rounded to + the next multiple of 4. + See: https://redis.io/commands/acl-genpass + """ + pieces = [] + if bits is not None: + try: + b = int(bits) + if b < 0 or b > 4096: + raise ValueError + pieces.append(b) + except ValueError: + raise DataError( + "genpass optionally accepts a bits argument, between 0 and 4096." + ) + return self.execute_command("ACL GENPASS", *pieces, **kwargs) + + def acl_getuser(self, username: str, **kwargs) -> ResponseT: + """ + Get the ACL details for the specified ``username``. + + If ``username`` does not exist, return None + + For more information see https://redis.io/commands/acl-getuser + """ + return self.execute_command("ACL GETUSER", username, **kwargs) + + def acl_help(self, **kwargs) -> ResponseT: + """The ACL HELP command returns helpful text describing + the different subcommands. + + For more information see https://redis.io/commands/acl-help + """ + return self.execute_command("ACL HELP", **kwargs) + + def acl_list(self, **kwargs) -> ResponseT: + """ + Return a list of all ACLs on the server + + For more information see https://redis.io/commands/acl-list + """ + return self.execute_command("ACL LIST", **kwargs) + + def acl_log(self, count: Union[int, None] = None, **kwargs) -> ResponseT: + """ + Get ACL logs as a list. + :param int count: Get logs[0:count]. + :rtype: List. + + For more information see https://redis.io/commands/acl-log + """ + args = [] + if count is not None: + if not isinstance(count, int): + raise DataError("ACL LOG count must be an integer") + args.append(count) + + return self.execute_command("ACL LOG", *args, **kwargs) + + def acl_log_reset(self, **kwargs) -> ResponseT: + """ + Reset ACL logs. + :rtype: Boolean. + + For more information see https://redis.io/commands/acl-log + """ + args = [b"RESET"] + return self.execute_command("ACL LOG", *args, **kwargs) + + def acl_load(self, **kwargs) -> ResponseT: + """ + Load ACL rules from the configured ``aclfile``. + + Note that the server must be configured with the ``aclfile`` + directive to be able to load ACL rules from an aclfile. + + For more information see https://redis.io/commands/acl-load + """ + return self.execute_command("ACL LOAD", **kwargs) + + def acl_save(self, **kwargs) -> ResponseT: + """ + Save ACL rules to the configured ``aclfile``. + + Note that the server must be configured with the ``aclfile`` + directive to be able to save ACL rules to an aclfile. + + For more information see https://redis.io/commands/acl-save + """ + return self.execute_command("ACL SAVE", **kwargs) + + def acl_setuser( + self, + username: str, + enabled: bool = False, + nopass: bool = False, + passwords: Union[str, Iterable[str], None] = None, + hashed_passwords: Union[str, Iterable[str], None] = None, + categories: Optional[Iterable[str]] = None, + commands: Optional[Iterable[str]] = None, + keys: Optional[Iterable[KeyT]] = None, + channels: Optional[Iterable[ChannelT]] = None, + selectors: Optional[Iterable[Tuple[str, KeyT]]] = None, + reset: bool = False, + reset_keys: bool = False, + reset_channels: bool = False, + reset_passwords: bool = False, + **kwargs, + ) -> ResponseT: + """ + Create or update an ACL user. + + Create or update the ACL for ``username``. If the user already exists, + the existing ACL is completely overwritten and replaced with the + specified values. + + ``enabled`` is a boolean indicating whether the user should be allowed + to authenticate or not. Defaults to ``False``. + + ``nopass`` is a boolean indicating whether the can authenticate without + a password. This cannot be True if ``passwords`` are also specified. + + ``passwords`` if specified is a list of plain text passwords + to add to or remove from the user. Each password must be prefixed with + a '+' to add or a '-' to remove. For convenience, the value of + ``passwords`` can be a simple prefixed string when adding or + removing a single password. + + ``hashed_passwords`` if specified is a list of SHA-256 hashed passwords + to add to or remove from the user. Each hashed password must be + prefixed with a '+' to add or a '-' to remove. For convenience, + the value of ``hashed_passwords`` can be a simple prefixed string when + adding or removing a single password. + + ``categories`` if specified is a list of strings representing category + permissions. Each string must be prefixed with either a '+' to add the + category permission or a '-' to remove the category permission. + + ``commands`` if specified is a list of strings representing command + permissions. Each string must be prefixed with either a '+' to add the + command permission or a '-' to remove the command permission. + + ``keys`` if specified is a list of key patterns to grant the user + access to. Keys patterns allow '*' to support wildcard matching. For + example, '*' grants access to all keys while 'cache:*' grants access + to all keys that are prefixed with 'cache:'. ``keys`` should not be + prefixed with a '~'. + + ``reset`` is a boolean indicating whether the user should be fully + reset prior to applying the new ACL. Setting this to True will + remove all existing passwords, flags and privileges from the user and + then apply the specified rules. If this is False, the user's existing + passwords, flags and privileges will be kept and any new specified + rules will be applied on top. + + ``reset_keys`` is a boolean indicating whether the user's key + permissions should be reset prior to applying any new key permissions + specified in ``keys``. If this is False, the user's existing + key permissions will be kept and any new specified key permissions + will be applied on top. + + ``reset_channels`` is a boolean indicating whether the user's channel + permissions should be reset prior to applying any new channel permissions + specified in ``channels``.If this is False, the user's existing + channel permissions will be kept and any new specified channel permissions + will be applied on top. + + ``reset_passwords`` is a boolean indicating whether to remove all + existing passwords and the 'nopass' flag from the user prior to + applying any new passwords specified in 'passwords' or + 'hashed_passwords'. If this is False, the user's existing passwords + and 'nopass' status will be kept and any new specified passwords + or hashed_passwords will be applied on top. + + For more information see https://redis.io/commands/acl-setuser + """ + encoder = self.get_encoder() + pieces: List[EncodableT] = [username] + + if reset: + pieces.append(b"reset") + + if reset_keys: + pieces.append(b"resetkeys") + + if reset_channels: + pieces.append(b"resetchannels") + + if reset_passwords: + pieces.append(b"resetpass") + + if enabled: + pieces.append(b"on") + else: + pieces.append(b"off") + + if (passwords or hashed_passwords) and nopass: + raise DataError( + "Cannot set 'nopass' and supply 'passwords' or 'hashed_passwords'" + ) + + if passwords: + # as most users will have only one password, allow remove_passwords + # to be specified as a simple string or a list + passwords = list_or_args(passwords, []) + for i, password in enumerate(passwords): + password = encoder.encode(password) + if password.startswith(b"+"): + pieces.append(b">%s" % password[1:]) + elif password.startswith(b"-"): + pieces.append(b"<%s" % password[1:]) + else: + raise DataError( + f"Password {i} must be prefixed with a " + f'"+" to add or a "-" to remove' + ) + + if hashed_passwords: + # as most users will have only one password, allow remove_passwords + # to be specified as a simple string or a list + hashed_passwords = list_or_args(hashed_passwords, []) + for i, hashed_password in enumerate(hashed_passwords): + hashed_password = encoder.encode(hashed_password) + if hashed_password.startswith(b"+"): + pieces.append(b"#%s" % hashed_password[1:]) + elif hashed_password.startswith(b"-"): + pieces.append(b"!%s" % hashed_password[1:]) + else: + raise DataError( + f"Hashed password {i} must be prefixed with a " + f'"+" to add or a "-" to remove' + ) + + if nopass: + pieces.append(b"nopass") + + if categories: + for category in categories: + category = encoder.encode(category) + # categories can be prefixed with one of (+@, +, -@, -) + if category.startswith(b"+@"): + pieces.append(category) + elif category.startswith(b"+"): + pieces.append(b"+@%s" % category[1:]) + elif category.startswith(b"-@"): + pieces.append(category) + elif category.startswith(b"-"): + pieces.append(b"-@%s" % category[1:]) + else: + raise DataError( + f'Category "{encoder.decode(category, force=True)}" ' + 'must be prefixed with "+" or "-"' + ) + if commands: + for cmd in commands: + cmd = encoder.encode(cmd) + if not cmd.startswith(b"+") and not cmd.startswith(b"-"): + raise DataError( + f'Command "{encoder.decode(cmd, force=True)}" ' + 'must be prefixed with "+" or "-"' + ) + pieces.append(cmd) + + if keys: + for key in keys: + key = encoder.encode(key) + if not key.startswith(b"%") and not key.startswith(b"~"): + key = b"~%s" % key + pieces.append(key) + + if channels: + for channel in channels: + channel = encoder.encode(channel) + pieces.append(b"&%s" % channel) + + if selectors: + for cmd, key in selectors: + cmd = encoder.encode(cmd) + if not cmd.startswith(b"+") and not cmd.startswith(b"-"): + raise DataError( + f'Command "{encoder.decode(cmd, force=True)}" ' + 'must be prefixed with "+" or "-"' + ) + + key = encoder.encode(key) + if not key.startswith(b"%") and not key.startswith(b"~"): + key = b"~%s" % key + + pieces.append(b"(%s %s)" % (cmd, key)) + + return self.execute_command("ACL SETUSER", *pieces, **kwargs) + + def acl_users(self, **kwargs) -> ResponseT: + """Returns a list of all registered users on the server. + + For more information see https://redis.io/commands/acl-users + """ + return self.execute_command("ACL USERS", **kwargs) + + def acl_whoami(self, **kwargs) -> ResponseT: + """Get the username for the current connection + + For more information see https://redis.io/commands/acl-whoami + """ + return self.execute_command("ACL WHOAMI", **kwargs) + + +AsyncACLCommands = ACLCommands + + +class ManagementCommands(CommandsProtocol): + """ + Redis management commands + """ + + def auth(self, password: str, username: Optional[str] = None, **kwargs): + """ + Authenticates the user. If you do not pass username, Redis will try to + authenticate for the "default" user. If you do pass username, it will + authenticate for the given user. + For more information see https://redis.io/commands/auth + """ + pieces = [] + if username is not None: + pieces.append(username) + pieces.append(password) + return self.execute_command("AUTH", *pieces, **kwargs) + + def bgrewriteaof(self, **kwargs): + """Tell the Redis server to rewrite the AOF file from data in memory. + + For more information see https://redis.io/commands/bgrewriteaof + """ + return self.execute_command("BGREWRITEAOF", **kwargs) + + def bgsave(self, schedule: bool = True, **kwargs) -> ResponseT: + """ + Tell the Redis server to save its data to disk. Unlike save(), + this method is asynchronous and returns immediately. + + For more information see https://redis.io/commands/bgsave + """ + pieces = [] + if schedule: + pieces.append("SCHEDULE") + return self.execute_command("BGSAVE", *pieces, **kwargs) + + def role(self) -> ResponseT: + """ + Provide information on the role of a Redis instance in + the context of replication, by returning if the instance + is currently a master, slave, or sentinel. + + For more information see https://redis.io/commands/role + """ + return self.execute_command("ROLE") + + def client_kill(self, address: str, **kwargs) -> ResponseT: + """Disconnects the client at ``address`` (ip:port) + + For more information see https://redis.io/commands/client-kill + """ + return self.execute_command("CLIENT KILL", address, **kwargs) + + def client_kill_filter( + self, + _id: Union[str, None] = None, + _type: Union[str, None] = None, + addr: Union[str, None] = None, + skipme: Union[bool, None] = None, + laddr: Union[bool, None] = None, + user: str = None, + **kwargs, + ) -> ResponseT: + """ + Disconnects client(s) using a variety of filter options + :param _id: Kills a client by its unique ID field + :param _type: Kills a client by type where type is one of 'normal', + 'master', 'slave' or 'pubsub' + :param addr: Kills a client by its 'address:port' + :param skipme: If True, then the client calling the command + will not get killed even if it is identified by one of the filter + options. If skipme is not provided, the server defaults to skipme=True + :param laddr: Kills a client by its 'local (bind) address:port' + :param user: Kills a client for a specific user name + """ + args = [] + if _type is not None: + client_types = ("normal", "master", "slave", "pubsub") + if str(_type).lower() not in client_types: + raise DataError(f"CLIENT KILL type must be one of {client_types!r}") + args.extend((b"TYPE", _type)) + if skipme is not None: + if not isinstance(skipme, bool): + raise DataError("CLIENT KILL skipme must be a bool") + if skipme: + args.extend((b"SKIPME", b"YES")) + else: + args.extend((b"SKIPME", b"NO")) + if _id is not None: + args.extend((b"ID", _id)) + if addr is not None: + args.extend((b"ADDR", addr)) + if laddr is not None: + args.extend((b"LADDR", laddr)) + if user is not None: + args.extend((b"USER", user)) + if not args: + raise DataError( + "CLIENT KILL ... ... " + " must specify at least one filter" + ) + return self.execute_command("CLIENT KILL", *args, **kwargs) + + def client_info(self, **kwargs) -> ResponseT: + """ + Returns information and statistics about the current + client connection. + + For more information see https://redis.io/commands/client-info + """ + return self.execute_command("CLIENT INFO", **kwargs) + + def client_list( + self, _type: Union[str, None] = None, client_id: List[EncodableT] = [], **kwargs + ) -> ResponseT: + """ + Returns a list of currently connected clients. + If type of client specified, only that type will be returned. + + :param _type: optional. one of the client types (normal, master, + replica, pubsub) + :param client_id: optional. a list of client ids + + For more information see https://redis.io/commands/client-list + """ + args = [] + if _type is not None: + client_types = ("normal", "master", "replica", "pubsub") + if str(_type).lower() not in client_types: + raise DataError(f"CLIENT LIST _type must be one of {client_types!r}") + args.append(b"TYPE") + args.append(_type) + if not isinstance(client_id, list): + raise DataError("client_id must be a list") + if client_id: + args.append(b"ID") + args.append(" ".join(client_id)) + return self.execute_command("CLIENT LIST", *args, **kwargs) + + def client_getname(self, **kwargs) -> ResponseT: + """ + Returns the current connection name + + For more information see https://redis.io/commands/client-getname + """ + return self.execute_command("CLIENT GETNAME", **kwargs) + + def client_getredir(self, **kwargs) -> ResponseT: + """ + Returns the ID (an integer) of the client to whom we are + redirecting tracking notifications. + + see: https://redis.io/commands/client-getredir + """ + return self.execute_command("CLIENT GETREDIR", **kwargs) + + def client_reply( + self, reply: Union[Literal["ON"], Literal["OFF"], Literal["SKIP"]], **kwargs + ) -> ResponseT: + """ + Enable and disable redis server replies. + + ``reply`` Must be ON OFF or SKIP, + ON - The default most with server replies to commands + OFF - Disable server responses to commands + SKIP - Skip the response of the immediately following command. + + Note: When setting OFF or SKIP replies, you will need a client object + with a timeout specified in seconds, and will need to catch the + TimeoutError. + The test_client_reply unit test illustrates this, and + conftest.py has a client with a timeout. + + See https://redis.io/commands/client-reply + """ + replies = ["ON", "OFF", "SKIP"] + if reply not in replies: + raise DataError(f"CLIENT REPLY must be one of {replies!r}") + return self.execute_command("CLIENT REPLY", reply, **kwargs) + + def client_id(self, **kwargs) -> ResponseT: + """ + Returns the current connection id + + For more information see https://redis.io/commands/client-id + """ + return self.execute_command("CLIENT ID", **kwargs) + + def client_tracking_on( + self, + clientid: Union[int, None] = None, + prefix: Sequence[KeyT] = [], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ) -> ResponseT: + """ + Turn on the tracking mode. + For more information about the options look at client_tracking func. + + See https://redis.io/commands/client-tracking + """ + return self.client_tracking( + True, clientid, prefix, bcast, optin, optout, noloop + ) + + def client_tracking_off( + self, + clientid: Union[int, None] = None, + prefix: Sequence[KeyT] = [], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + ) -> ResponseT: + """ + Turn off the tracking mode. + For more information about the options look at client_tracking func. + + See https://redis.io/commands/client-tracking + """ + return self.client_tracking( + False, clientid, prefix, bcast, optin, optout, noloop + ) + + def client_tracking( + self, + on: bool = True, + clientid: Union[int, None] = None, + prefix: Sequence[KeyT] = [], + bcast: bool = False, + optin: bool = False, + optout: bool = False, + noloop: bool = False, + **kwargs, + ) -> ResponseT: + """ + Enables the tracking feature of the Redis server, that is used + for server assisted client side caching. + + ``on`` indicate for tracking on or tracking off. The dafualt is on. + + ``clientid`` send invalidation messages to the connection with + the specified ID. + + ``bcast`` enable tracking in broadcasting mode. In this mode + invalidation messages are reported for all the prefixes + specified, regardless of the keys requested by the connection. + + ``optin`` when broadcasting is NOT active, normally don't track + keys in read only commands, unless they are called immediately + after a CLIENT CACHING yes command. + + ``optout`` when broadcasting is NOT active, normally track keys in + read only commands, unless they are called immediately after a + CLIENT CACHING no command. + + ``noloop`` don't send notifications about keys modified by this + connection itself. + + ``prefix`` for broadcasting, register a given key prefix, so that + notifications will be provided only for keys starting with this string. + + See https://redis.io/commands/client-tracking + """ + + if len(prefix) != 0 and bcast is False: + raise DataError("Prefix can only be used with bcast") + + pieces = ["ON"] if on else ["OFF"] + if clientid is not None: + pieces.extend(["REDIRECT", clientid]) + for p in prefix: + pieces.extend(["PREFIX", p]) + if bcast: + pieces.append("BCAST") + if optin: + pieces.append("OPTIN") + if optout: + pieces.append("OPTOUT") + if noloop: + pieces.append("NOLOOP") + + return self.execute_command("CLIENT TRACKING", *pieces) + + def client_trackinginfo(self, **kwargs) -> ResponseT: + """ + Returns the information about the current client connection's + use of the server assisted client side cache. + + See https://redis.io/commands/client-trackinginfo + """ + return self.execute_command("CLIENT TRACKINGINFO", **kwargs) + + def client_setname(self, name: str, **kwargs) -> ResponseT: + """ + Sets the current connection name + + For more information see https://redis.io/commands/client-setname + + .. note:: + This method sets client name only for **current** connection. + + If you want to set a common name for all connections managed + by this client, use ``client_name`` constructor argument. + """ + return self.execute_command("CLIENT SETNAME", name, **kwargs) + + def client_setinfo(self, attr: str, value: str, **kwargs) -> ResponseT: + """ + Sets the current connection library name or version + For mor information see https://redis.io/commands/client-setinfo + """ + return self.execute_command("CLIENT SETINFO", attr, value, **kwargs) + + def client_unblock( + self, client_id: int, error: bool = False, **kwargs + ) -> ResponseT: + """ + Unblocks a connection by its client id. + If ``error`` is True, unblocks the client with a special error message. + If ``error`` is False (default), the client is unblocked using the + regular timeout mechanism. + + For more information see https://redis.io/commands/client-unblock + """ + args = ["CLIENT UNBLOCK", int(client_id)] + if error: + args.append(b"ERROR") + return self.execute_command(*args, **kwargs) + + def client_pause(self, timeout: int, all: bool = True, **kwargs) -> ResponseT: + """ + Suspend all the Redis clients for the specified amount of time. + + + For more information see https://redis.io/commands/client-pause + + :param timeout: milliseconds to pause clients + :param all: If true (default) all client commands are blocked. + otherwise, clients are only blocked if they attempt to execute + a write command. + For the WRITE mode, some commands have special behavior: + EVAL/EVALSHA: Will block client for all scripts. + PUBLISH: Will block client. + PFCOUNT: Will block client. + WAIT: Acknowledgments will be delayed, so this command will + appear blocked. + """ + args = ["CLIENT PAUSE", str(timeout)] + if not isinstance(timeout, int): + raise DataError("CLIENT PAUSE timeout must be an integer") + if not all: + args.append("WRITE") + return self.execute_command(*args, **kwargs) + + def client_unpause(self, **kwargs) -> ResponseT: + """ + Unpause all redis clients + + For more information see https://redis.io/commands/client-unpause + """ + return self.execute_command("CLIENT UNPAUSE", **kwargs) + + def client_no_evict(self, mode: str) -> Union[Awaitable[str], str]: + """ + Sets the client eviction mode for the current connection. + + For more information see https://redis.io/commands/client-no-evict + """ + return self.execute_command("CLIENT NO-EVICT", mode) + + def client_no_touch(self, mode: str) -> Union[Awaitable[str], str]: + """ + # The command controls whether commands sent by the client will alter + # the LRU/LFU of the keys they access. + # When turned on, the current client will not change LFU/LRU stats, + # unless it sends the TOUCH command. + + For more information see https://redis.io/commands/client-no-touch + """ + return self.execute_command("CLIENT NO-TOUCH", mode) + + def command(self, **kwargs): + """ + Returns dict reply of details about all Redis commands. + + For more information see https://redis.io/commands/command + """ + return self.execute_command("COMMAND", **kwargs) + + def command_info(self, **kwargs) -> None: + raise NotImplementedError( + "COMMAND INFO is intentionally not implemented in the client." + ) + + def command_count(self, **kwargs) -> ResponseT: + return self.execute_command("COMMAND COUNT", **kwargs) + + def command_list( + self, + module: Optional[str] = None, + category: Optional[str] = None, + pattern: Optional[str] = None, + ) -> ResponseT: + """ + Return an array of the server's command names. + You can use one of the following filters: + ``module``: get the commands that belong to the module + ``category``: get the commands in the ACL category + ``pattern``: get the commands that match the given pattern + + For more information see https://redis.io/commands/command-list/ + """ + pieces = [] + if module is not None: + pieces.extend(["MODULE", module]) + if category is not None: + pieces.extend(["ACLCAT", category]) + if pattern is not None: + pieces.extend(["PATTERN", pattern]) + + if pieces: + pieces.insert(0, "FILTERBY") + + return self.execute_command("COMMAND LIST", *pieces) + + def command_getkeysandflags(self, *args: List[str]) -> List[Union[str, List[str]]]: + """ + Returns array of keys from a full Redis command and their usage flags. + + For more information see https://redis.io/commands/command-getkeysandflags + """ + return self.execute_command("COMMAND GETKEYSANDFLAGS", *args) + + def command_docs(self, *args): + """ + This function throws a NotImplementedError since it is intentionally + not supported. + """ + raise NotImplementedError( + "COMMAND DOCS is intentionally not implemented in the client." + ) + + def config_get( + self, pattern: PatternT = "*", *args: List[PatternT], **kwargs + ) -> ResponseT: + """ + Return a dictionary of configuration based on the ``pattern`` + + For more information see https://redis.io/commands/config-get + """ + return self.execute_command("CONFIG GET", pattern, *args, **kwargs) + + def config_set( + self, + name: KeyT, + value: EncodableT, + *args: List[Union[KeyT, EncodableT]], + **kwargs, + ) -> ResponseT: + """Set config item ``name`` with ``value`` + + For more information see https://redis.io/commands/config-set + """ + return self.execute_command("CONFIG SET", name, value, *args, **kwargs) + + def config_resetstat(self, **kwargs) -> ResponseT: + """ + Reset runtime statistics + + For more information see https://redis.io/commands/config-resetstat + """ + return self.execute_command("CONFIG RESETSTAT", **kwargs) + + def config_rewrite(self, **kwargs) -> ResponseT: + """ + Rewrite config file with the minimal change to reflect running config. + + For more information see https://redis.io/commands/config-rewrite + """ + return self.execute_command("CONFIG REWRITE", **kwargs) + + def dbsize(self, **kwargs) -> ResponseT: + """ + Returns the number of keys in the current database + + For more information see https://redis.io/commands/dbsize + """ + return self.execute_command("DBSIZE", **kwargs) + + def debug_object(self, key: KeyT, **kwargs) -> ResponseT: + """ + Returns version specific meta information about a given key + + For more information see https://redis.io/commands/debug-object + """ + return self.execute_command("DEBUG OBJECT", key, **kwargs) + + def debug_segfault(self, **kwargs) -> None: + raise NotImplementedError( + """ + DEBUG SEGFAULT is intentionally not implemented in the client. + + For more information see https://redis.io/commands/debug-segfault + """ + ) + + def echo(self, value: EncodableT, **kwargs) -> ResponseT: + """ + Echo the string back from the server + + For more information see https://redis.io/commands/echo + """ + return self.execute_command("ECHO", value, **kwargs) + + def flushall(self, asynchronous: bool = False, **kwargs) -> ResponseT: + """ + Delete all keys in all databases on the current host. + + ``asynchronous`` indicates whether the operation is + executed asynchronously by the server. + + For more information see https://redis.io/commands/flushall + """ + args = [] + if asynchronous: + args.append(b"ASYNC") + return self.execute_command("FLUSHALL", *args, **kwargs) + + def flushdb(self, asynchronous: bool = False, **kwargs) -> ResponseT: + """ + Delete all keys in the current database. + + ``asynchronous`` indicates whether the operation is + executed asynchronously by the server. + + For more information see https://redis.io/commands/flushdb + """ + args = [] + if asynchronous: + args.append(b"ASYNC") + return self.execute_command("FLUSHDB", *args, **kwargs) + + def sync(self) -> ResponseT: + """ + Initiates a replication stream from the master. + + For more information see https://redis.io/commands/sync + """ + from redis.client import NEVER_DECODE + + options = {} + options[NEVER_DECODE] = [] + return self.execute_command("SYNC", **options) + + def psync(self, replicationid: str, offset: int): + """ + Initiates a replication stream from the master. + Newer version for `sync`. + + For more information see https://redis.io/commands/sync + """ + from redis.client import NEVER_DECODE + + options = {} + options[NEVER_DECODE] = [] + return self.execute_command("PSYNC", replicationid, offset, **options) + + def swapdb(self, first: int, second: int, **kwargs) -> ResponseT: + """ + Swap two databases + + For more information see https://redis.io/commands/swapdb + """ + return self.execute_command("SWAPDB", first, second, **kwargs) + + def select(self, index: int, **kwargs) -> ResponseT: + """Select the Redis logical database at index. + + See: https://redis.io/commands/select + """ + return self.execute_command("SELECT", index, **kwargs) + + def info( + self, section: Union[str, None] = None, *args: List[str], **kwargs + ) -> ResponseT: + """ + Returns a dictionary containing information about the Redis server + + The ``section`` option can be used to select a specific section + of information + + The section option is not supported by older versions of Redis Server, + and will generate ResponseError + + For more information see https://redis.io/commands/info + """ + if section is None: + return self.execute_command("INFO", **kwargs) + else: + return self.execute_command("INFO", section, *args, **kwargs) + + def lastsave(self, **kwargs) -> ResponseT: + """ + Return a Python datetime object representing the last time the + Redis database was saved to disk + + For more information see https://redis.io/commands/lastsave + """ + return self.execute_command("LASTSAVE", **kwargs) + + def latency_doctor(self): + """Raise a NotImplementedError, as the client will not support LATENCY DOCTOR. + This funcion is best used within the redis-cli. + + For more information see https://redis.io/commands/latency-doctor + """ + raise NotImplementedError( + """ + LATENCY DOCTOR is intentionally not implemented in the client. + + For more information see https://redis.io/commands/latency-doctor + """ + ) + + def latency_graph(self): + """Raise a NotImplementedError, as the client will not support LATENCY GRAPH. + This funcion is best used within the redis-cli. + + For more information see https://redis.io/commands/latency-graph. + """ + raise NotImplementedError( + """ + LATENCY GRAPH is intentionally not implemented in the client. + + For more information see https://redis.io/commands/latency-graph + """ + ) + + def lolwut(self, *version_numbers: Union[str, float], **kwargs) -> ResponseT: + """ + Get the Redis version and a piece of generative computer art + + See: https://redis.io/commands/lolwut + """ + if version_numbers: + return self.execute_command("LOLWUT VERSION", *version_numbers, **kwargs) + else: + return self.execute_command("LOLWUT", **kwargs) + + def reset(self) -> ResponseT: + """Perform a full reset on the connection's server side contenxt. + + See: https://redis.io/commands/reset + """ + return self.execute_command("RESET") + + def migrate( + self, + host: str, + port: int, + keys: KeysT, + destination_db: int, + timeout: int, + copy: bool = False, + replace: bool = False, + auth: Union[str, None] = None, + **kwargs, + ) -> ResponseT: + """ + Migrate 1 or more keys from the current Redis server to a different + server specified by the ``host``, ``port`` and ``destination_db``. + + The ``timeout``, specified in milliseconds, indicates the maximum + time the connection between the two servers can be idle before the + command is interrupted. + + If ``copy`` is True, the specified ``keys`` are NOT deleted from + the source server. + + If ``replace`` is True, this operation will overwrite the keys + on the destination server if they exist. + + If ``auth`` is specified, authenticate to the destination server with + the password provided. + + For more information see https://redis.io/commands/migrate + """ + keys = list_or_args(keys, []) + if not keys: + raise DataError("MIGRATE requires at least one key") + pieces = [] + if copy: + pieces.append(b"COPY") + if replace: + pieces.append(b"REPLACE") + if auth: + pieces.append(b"AUTH") + pieces.append(auth) + pieces.append(b"KEYS") + pieces.extend(keys) + return self.execute_command( + "MIGRATE", host, port, "", destination_db, timeout, *pieces, **kwargs + ) + + def object(self, infotype: str, key: KeyT, **kwargs) -> ResponseT: + """ + Return the encoding, idletime, or refcount about the key + """ + return self.execute_command( + "OBJECT", infotype, key, infotype=infotype, **kwargs + ) + + def memory_doctor(self, **kwargs) -> None: + raise NotImplementedError( + """ + MEMORY DOCTOR is intentionally not implemented in the client. + + For more information see https://redis.io/commands/memory-doctor + """ + ) + + def memory_help(self, **kwargs) -> None: + raise NotImplementedError( + """ + MEMORY HELP is intentionally not implemented in the client. + + For more information see https://redis.io/commands/memory-help + """ + ) + + def memory_stats(self, **kwargs) -> ResponseT: + """ + Return a dictionary of memory stats + + For more information see https://redis.io/commands/memory-stats + """ + return self.execute_command("MEMORY STATS", **kwargs) + + def memory_malloc_stats(self, **kwargs) -> ResponseT: + """ + Return an internal statistics report from the memory allocator. + + See: https://redis.io/commands/memory-malloc-stats + """ + return self.execute_command("MEMORY MALLOC-STATS", **kwargs) + + def memory_usage( + self, key: KeyT, samples: Union[int, None] = None, **kwargs + ) -> ResponseT: + """ + Return the total memory usage for key, its value and associated + administrative overheads. + + For nested data structures, ``samples`` is the number of elements to + sample. If left unspecified, the server's default is 5. Use 0 to sample + all elements. + + For more information see https://redis.io/commands/memory-usage + """ + args = [] + if isinstance(samples, int): + args.extend([b"SAMPLES", samples]) + return self.execute_command("MEMORY USAGE", key, *args, **kwargs) + + def memory_purge(self, **kwargs) -> ResponseT: + """ + Attempts to purge dirty pages for reclamation by allocator + + For more information see https://redis.io/commands/memory-purge + """ + return self.execute_command("MEMORY PURGE", **kwargs) + + def latency_histogram(self, *args): + """ + This function throws a NotImplementedError since it is intentionally + not supported. + """ + raise NotImplementedError( + "LATENCY HISTOGRAM is intentionally not implemented in the client." + ) + + def latency_history(self, event: str) -> ResponseT: + """ + Returns the raw data of the ``event``'s latency spikes time series. + + For more information see https://redis.io/commands/latency-history + """ + return self.execute_command("LATENCY HISTORY", event) + + def latency_latest(self) -> ResponseT: + """ + Reports the latest latency events logged. + + For more information see https://redis.io/commands/latency-latest + """ + return self.execute_command("LATENCY LATEST") + + def latency_reset(self, *events: str) -> ResponseT: + """ + Resets the latency spikes time series of all, or only some, events. + + For more information see https://redis.io/commands/latency-reset + """ + return self.execute_command("LATENCY RESET", *events) + + def ping(self, **kwargs) -> ResponseT: + """ + Ping the Redis server + + For more information see https://redis.io/commands/ping + """ + return self.execute_command("PING", **kwargs) + + def quit(self, **kwargs) -> ResponseT: + """ + Ask the server to close the connection. + + For more information see https://redis.io/commands/quit + """ + return self.execute_command("QUIT", **kwargs) + + def replicaof(self, *args, **kwargs) -> ResponseT: + """ + Update the replication settings of a redis replica, on the fly. + + Examples of valid arguments include: + + NO ONE (set no replication) + host port (set to the host and port of a redis server) + + For more information see https://redis.io/commands/replicaof + """ + return self.execute_command("REPLICAOF", *args, **kwargs) + + def save(self, **kwargs) -> ResponseT: + """ + Tell the Redis server to save its data to disk, + blocking until the save is complete + + For more information see https://redis.io/commands/save + """ + return self.execute_command("SAVE", **kwargs) + + def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs, + ) -> None: + """Shutdown the Redis server. If Redis has persistence configured, + data will be flushed before shutdown. + It is possible to specify modifiers to alter the behavior of the command: + ``save`` will force a DB saving operation even if no save points are configured. + ``nosave`` will prevent a DB saving operation even if one or more save points + are configured. + ``now`` skips waiting for lagging replicas, i.e. it bypasses the first step in + the shutdown sequence. + ``force`` ignores any errors that would normally prevent the server from exiting + ``abort`` cancels an ongoing shutdown and cannot be combined with other flags. + + For more information see https://redis.io/commands/shutdown + """ + if save and nosave: + raise DataError("SHUTDOWN save and nosave cannot both be set") + args = ["SHUTDOWN"] + if save: + args.append("SAVE") + if nosave: + args.append("NOSAVE") + if now: + args.append("NOW") + if force: + args.append("FORCE") + if abort: + args.append("ABORT") + try: + self.execute_command(*args, **kwargs) + except ConnectionError: + # a ConnectionError here is expected + return + raise RedisError("SHUTDOWN seems to have failed.") + + def slaveof( + self, host: Union[str, None] = None, port: Union[int, None] = None, **kwargs + ) -> ResponseT: + """ + Set the server to be a replicated slave of the instance identified + by the ``host`` and ``port``. If called without arguments, the + instance is promoted to a master instead. + + For more information see https://redis.io/commands/slaveof + """ + if host is None and port is None: + return self.execute_command("SLAVEOF", b"NO", b"ONE", **kwargs) + return self.execute_command("SLAVEOF", host, port, **kwargs) + + def slowlog_get(self, num: Union[int, None] = None, **kwargs) -> ResponseT: + """ + Get the entries from the slowlog. If ``num`` is specified, get the + most recent ``num`` items. + + For more information see https://redis.io/commands/slowlog-get + """ + from redis.client import NEVER_DECODE + + args = ["SLOWLOG GET"] + if num is not None: + args.append(num) + decode_responses = self.get_connection_kwargs().get("decode_responses", False) + if decode_responses is True: + kwargs[NEVER_DECODE] = [] + return self.execute_command(*args, **kwargs) + + def slowlog_len(self, **kwargs) -> ResponseT: + """ + Get the number of items in the slowlog + + For more information see https://redis.io/commands/slowlog-len + """ + return self.execute_command("SLOWLOG LEN", **kwargs) + + def slowlog_reset(self, **kwargs) -> ResponseT: + """ + Remove all items in the slowlog + + For more information see https://redis.io/commands/slowlog-reset + """ + return self.execute_command("SLOWLOG RESET", **kwargs) + + def time(self, **kwargs) -> ResponseT: + """ + Returns the server time as a 2-item tuple of ints: + (seconds since epoch, microseconds into this second). + + For more information see https://redis.io/commands/time + """ + return self.execute_command("TIME", **kwargs) + + def wait(self, num_replicas: int, timeout: int, **kwargs) -> ResponseT: + """ + Redis synchronous replication + That returns the number of replicas that processed the query when + we finally have at least ``num_replicas``, or when the ``timeout`` was + reached. + + For more information see https://redis.io/commands/wait + """ + return self.execute_command("WAIT", num_replicas, timeout, **kwargs) + + def waitaof( + self, num_local: int, num_replicas: int, timeout: int, **kwargs + ) -> ResponseT: + """ + This command blocks the current client until all previous write + commands by that client are acknowledged as having been fsynced + to the AOF of the local Redis and/or at least the specified number + of replicas. + + For more information see https://redis.io/commands/waitaof + """ + return self.execute_command( + "WAITAOF", num_local, num_replicas, timeout, **kwargs + ) + + def hello(self): + """ + This function throws a NotImplementedError since it is intentionally + not supported. + """ + raise NotImplementedError( + "HELLO is intentionally not implemented in the client." + ) + + def failover(self): + """ + This function throws a NotImplementedError since it is intentionally + not supported. + """ + raise NotImplementedError( + "FAILOVER is intentionally not implemented in the client." + ) + + +AsyncManagementCommands = ManagementCommands + + +class AsyncManagementCommands(ManagementCommands): + async def command_info(self, **kwargs) -> None: + return super().command_info(**kwargs) + + async def debug_segfault(self, **kwargs) -> None: + return super().debug_segfault(**kwargs) + + async def memory_doctor(self, **kwargs) -> None: + return super().memory_doctor(**kwargs) + + async def memory_help(self, **kwargs) -> None: + return super().memory_help(**kwargs) + + async def shutdown( + self, + save: bool = False, + nosave: bool = False, + now: bool = False, + force: bool = False, + abort: bool = False, + **kwargs, + ) -> None: + """Shutdown the Redis server. If Redis has persistence configured, + data will be flushed before shutdown. If the "save" option is set, + a data flush will be attempted even if there is no persistence + configured. If the "nosave" option is set, no data flush will be + attempted. The "save" and "nosave" options cannot both be set. + + For more information see https://redis.io/commands/shutdown + """ + if save and nosave: + raise DataError("SHUTDOWN save and nosave cannot both be set") + args = ["SHUTDOWN"] + if save: + args.append("SAVE") + if nosave: + args.append("NOSAVE") + if now: + args.append("NOW") + if force: + args.append("FORCE") + if abort: + args.append("ABORT") + try: + await self.execute_command(*args, **kwargs) + except ConnectionError: + # a ConnectionError here is expected + return + raise RedisError("SHUTDOWN seems to have failed.") + + +class BitFieldOperation: + """ + Command builder for BITFIELD commands. + """ + + def __init__( + self, + client: Union["Redis", "AsyncRedis"], + key: str, + default_overflow: Union[str, None] = None, + ): + self.client = client + self.key = key + self._default_overflow = default_overflow + # for typing purposes, run the following in constructor and in reset() + self.operations: list[tuple[EncodableT, ...]] = [] + self._last_overflow = "WRAP" + self.reset() + + def reset(self): + """ + Reset the state of the instance to when it was constructed + """ + self.operations = [] + self._last_overflow = "WRAP" + self.overflow(self._default_overflow or self._last_overflow) + + def overflow(self, overflow: str): + """ + Update the overflow algorithm of successive INCRBY operations + :param overflow: Overflow algorithm, one of WRAP, SAT, FAIL. See the + Redis docs for descriptions of these algorithmsself. + :returns: a :py:class:`BitFieldOperation` instance. + """ + overflow = overflow.upper() + if overflow != self._last_overflow: + self._last_overflow = overflow + self.operations.append(("OVERFLOW", overflow)) + return self + + def incrby( + self, + fmt: str, + offset: BitfieldOffsetT, + increment: int, + overflow: Union[str, None] = None, + ): + """ + Increment a bitfield by a given amount. + :param fmt: format-string for the bitfield being updated, e.g. 'u8' + for an unsigned 8-bit integer. + :param offset: offset (in number of bits). If prefixed with a + '#', this is an offset multiplier, e.g. given the arguments + fmt='u8', offset='#2', the offset will be 16. + :param int increment: value to increment the bitfield by. + :param str overflow: overflow algorithm. Defaults to WRAP, but other + acceptable values are SAT and FAIL. See the Redis docs for + descriptions of these algorithms. + :returns: a :py:class:`BitFieldOperation` instance. + """ + if overflow is not None: + self.overflow(overflow) + + self.operations.append(("INCRBY", fmt, offset, increment)) + return self + + def get(self, fmt: str, offset: BitfieldOffsetT): + """ + Get the value of a given bitfield. + :param fmt: format-string for the bitfield being read, e.g. 'u8' for + an unsigned 8-bit integer. + :param offset: offset (in number of bits). If prefixed with a + '#', this is an offset multiplier, e.g. given the arguments + fmt='u8', offset='#2', the offset will be 16. + :returns: a :py:class:`BitFieldOperation` instance. + """ + self.operations.append(("GET", fmt, offset)) + return self + + def set(self, fmt: str, offset: BitfieldOffsetT, value: int): + """ + Set the value of a given bitfield. + :param fmt: format-string for the bitfield being read, e.g. 'u8' for + an unsigned 8-bit integer. + :param offset: offset (in number of bits). If prefixed with a + '#', this is an offset multiplier, e.g. given the arguments + fmt='u8', offset='#2', the offset will be 16. + :param int value: value to set at the given position. + :returns: a :py:class:`BitFieldOperation` instance. + """ + self.operations.append(("SET", fmt, offset, value)) + return self + + @property + def command(self): + cmd = ["BITFIELD", self.key] + for ops in self.operations: + cmd.extend(ops) + return cmd + + def execute(self) -> ResponseT: + """ + Execute the operation(s) in a single BITFIELD command. The return value + is a list of values corresponding to each operation. If the client + used to create this instance was a pipeline, the list of values + will be present within the pipeline's execute. + """ + command = self.command + self.reset() + return self.client.execute_command(*command) + + +class BasicKeyCommands(CommandsProtocol): + """ + Redis basic key-based commands + """ + + def append(self, key: KeyT, value: EncodableT) -> ResponseT: + """ + Appends the string ``value`` to the value at ``key``. If ``key`` + doesn't already exist, create it with a value of ``value``. + Returns the new length of the value at ``key``. + + For more information see https://redis.io/commands/append + """ + return self.execute_command("APPEND", key, value) + + def bitcount( + self, + key: KeyT, + start: Union[int, None] = None, + end: Union[int, None] = None, + mode: Optional[str] = None, + ) -> ResponseT: + """ + Returns the count of set bits in the value of ``key``. Optional + ``start`` and ``end`` parameters indicate which bytes to consider + + For more information see https://redis.io/commands/bitcount + """ + params = [key] + if start is not None and end is not None: + params.append(start) + params.append(end) + elif (start is not None and end is None) or (end is not None and start is None): + raise DataError("Both start and end must be specified") + if mode is not None: + params.append(mode) + return self.execute_command("BITCOUNT", *params) + + def bitfield( + self: Union["Redis", "AsyncRedis"], + key: KeyT, + default_overflow: Union[str, None] = None, + ) -> BitFieldOperation: + """ + Return a BitFieldOperation instance to conveniently construct one or + more bitfield operations on ``key``. + + For more information see https://redis.io/commands/bitfield + """ + return BitFieldOperation(self, key, default_overflow=default_overflow) + + def bitfield_ro( + self: Union["Redis", "AsyncRedis"], + key: KeyT, + encoding: str, + offset: BitfieldOffsetT, + items: Optional[list] = None, + ) -> ResponseT: + """ + Return an array of the specified bitfield values + where the first value is found using ``encoding`` and ``offset`` + parameters and remaining values are result of corresponding + encoding/offset pairs in optional list ``items`` + Read-only variant of the BITFIELD command. + + For more information see https://redis.io/commands/bitfield_ro + """ + params = [key, "GET", encoding, offset] + + items = items or [] + for encoding, offset in items: + params.extend(["GET", encoding, offset]) + return self.execute_command("BITFIELD_RO", *params) + + def bitop(self, operation: str, dest: KeyT, *keys: KeyT) -> ResponseT: + """ + Perform a bitwise operation using ``operation`` between ``keys`` and + store the result in ``dest``. + + For more information see https://redis.io/commands/bitop + """ + return self.execute_command("BITOP", operation, dest, *keys) + + def bitpos( + self, + key: KeyT, + bit: int, + start: Union[int, None] = None, + end: Union[int, None] = None, + mode: Optional[str] = None, + ) -> ResponseT: + """ + Return the position of the first bit set to 1 or 0 in a string. + ``start`` and ``end`` defines search range. The range is interpreted + as a range of bytes and not a range of bits, so start=0 and end=2 + means to look at the first three bytes. + + For more information see https://redis.io/commands/bitpos + """ + if bit not in (0, 1): + raise DataError("bit must be 0 or 1") + params = [key, bit] + + start is not None and params.append(start) + + if start is not None and end is not None: + params.append(end) + elif start is None and end is not None: + raise DataError("start argument is not set, when end is specified") + + if mode is not None: + params.append(mode) + return self.execute_command("BITPOS", *params) + + def copy( + self, + source: str, + destination: str, + destination_db: Union[str, None] = None, + replace: bool = False, + ) -> ResponseT: + """ + Copy the value stored in the ``source`` key to the ``destination`` key. + + ``destination_db`` an alternative destination database. By default, + the ``destination`` key is created in the source Redis database. + + ``replace`` whether the ``destination`` key should be removed before + copying the value to it. By default, the value is not copied if + the ``destination`` key already exists. + + For more information see https://redis.io/commands/copy + """ + params = [source, destination] + if destination_db is not None: + params.extend(["DB", destination_db]) + if replace: + params.append("REPLACE") + return self.execute_command("COPY", *params) + + def decrby(self, name: KeyT, amount: int = 1) -> ResponseT: + """ + Decrements the value of ``key`` by ``amount``. If no key exists, + the value will be initialized as 0 - ``amount`` + + For more information see https://redis.io/commands/decrby + """ + return self.execute_command("DECRBY", name, amount) + + decr = decrby + + def delete(self, *names: KeyT) -> ResponseT: + """ + Delete one or more keys specified by ``names`` + """ + return self.execute_command("DEL", *names) + + def __delitem__(self, name: KeyT): + self.delete(name) + + def dump(self, name: KeyT) -> ResponseT: + """ + Return a serialized version of the value stored at the specified key. + If key does not exist a nil bulk reply is returned. + + For more information see https://redis.io/commands/dump + """ + from redis.client import NEVER_DECODE + + options = {} + options[NEVER_DECODE] = [] + return self.execute_command("DUMP", name, **options) + + def exists(self, *names: KeyT) -> ResponseT: + """ + Returns the number of ``names`` that exist + + For more information see https://redis.io/commands/exists + """ + return self.execute_command("EXISTS", *names) + + __contains__ = exists + + def expire( + self, + name: KeyT, + time: ExpiryT, + nx: bool = False, + xx: bool = False, + gt: bool = False, + lt: bool = False, + ) -> ResponseT: + """ + Set an expire flag on key ``name`` for ``time`` seconds with given + ``option``. ``time`` can be represented by an integer or a Python timedelta + object. + + Valid options are: + NX -> Set expiry only when the key has no expiry + XX -> Set expiry only when the key has an existing expiry + GT -> Set expiry only when the new expiry is greater than current one + LT -> Set expiry only when the new expiry is less than current one + + For more information see https://redis.io/commands/expire + """ + if isinstance(time, datetime.timedelta): + time = int(time.total_seconds()) + + exp_option = list() + if nx: + exp_option.append("NX") + if xx: + exp_option.append("XX") + if gt: + exp_option.append("GT") + if lt: + exp_option.append("LT") + + return self.execute_command("EXPIRE", name, time, *exp_option) + + def expireat( + self, + name: KeyT, + when: AbsExpiryT, + nx: bool = False, + xx: bool = False, + gt: bool = False, + lt: bool = False, + ) -> ResponseT: + """ + Set an expire flag on key ``name`` with given ``option``. ``when`` + can be represented as an integer indicating unix time or a Python + datetime object. + + Valid options are: + -> NX -- Set expiry only when the key has no expiry + -> XX -- Set expiry only when the key has an existing expiry + -> GT -- Set expiry only when the new expiry is greater than current one + -> LT -- Set expiry only when the new expiry is less than current one + + For more information see https://redis.io/commands/expireat + """ + if isinstance(when, datetime.datetime): + when = int(when.timestamp()) + + exp_option = list() + if nx: + exp_option.append("NX") + if xx: + exp_option.append("XX") + if gt: + exp_option.append("GT") + if lt: + exp_option.append("LT") + + return self.execute_command("EXPIREAT", name, when, *exp_option) + + def expiretime(self, key: str) -> int: + """ + Returns the absolute Unix timestamp (since January 1, 1970) in seconds + at which the given key will expire. + + For more information see https://redis.io/commands/expiretime + """ + return self.execute_command("EXPIRETIME", key) + + def get(self, name: KeyT) -> ResponseT: + """ + Return the value at key ``name``, or None if the key doesn't exist + + For more information see https://redis.io/commands/get + """ + return self.execute_command("GET", name) + + def getdel(self, name: KeyT) -> ResponseT: + """ + Get the value at key ``name`` and delete the key. This command + is similar to GET, except for the fact that it also deletes + the key on success (if and only if the key's value type + is a string). + + For more information see https://redis.io/commands/getdel + """ + return self.execute_command("GETDEL", name) + + def getex( + self, + name: KeyT, + ex: Union[ExpiryT, None] = None, + px: Union[ExpiryT, None] = None, + exat: Union[AbsExpiryT, None] = None, + pxat: Union[AbsExpiryT, None] = None, + persist: bool = False, + ) -> ResponseT: + """ + Get the value of key and optionally set its expiration. + GETEX is similar to GET, but is a write command with + additional options. All time parameters can be given as + datetime.timedelta or integers. + + ``ex`` sets an expire flag on key ``name`` for ``ex`` seconds. + + ``px`` sets an expire flag on key ``name`` for ``px`` milliseconds. + + ``exat`` sets an expire flag on key ``name`` for ``ex`` seconds, + specified in unix time. + + ``pxat`` sets an expire flag on key ``name`` for ``ex`` milliseconds, + specified in unix time. + + ``persist`` remove the time to live associated with ``name``. + + For more information see https://redis.io/commands/getex + """ + + opset = {ex, px, exat, pxat} + if len(opset) > 2 or len(opset) > 1 and persist: + raise DataError( + "``ex``, ``px``, ``exat``, ``pxat``, " + "and ``persist`` are mutually exclusive." + ) + + pieces: list[EncodableT] = [] + # similar to set command + if ex is not None: + pieces.append("EX") + if isinstance(ex, datetime.timedelta): + ex = int(ex.total_seconds()) + pieces.append(ex) + if px is not None: + pieces.append("PX") + if isinstance(px, datetime.timedelta): + px = int(px.total_seconds() * 1000) + pieces.append(px) + # similar to pexpireat command + if exat is not None: + pieces.append("EXAT") + if isinstance(exat, datetime.datetime): + exat = int(exat.timestamp()) + pieces.append(exat) + if pxat is not None: + pieces.append("PXAT") + if isinstance(pxat, datetime.datetime): + pxat = int(pxat.timestamp() * 1000) + pieces.append(pxat) + if persist: + pieces.append("PERSIST") + + return self.execute_command("GETEX", name, *pieces) + + def __getitem__(self, name: KeyT): + """ + Return the value at key ``name``, raises a KeyError if the key + doesn't exist. + """ + value = self.get(name) + if value is not None: + return value + raise KeyError(name) + + def getbit(self, name: KeyT, offset: int) -> ResponseT: + """ + Returns an integer indicating the value of ``offset`` in ``name`` + + For more information see https://redis.io/commands/getbit + """ + return self.execute_command("GETBIT", name, offset) + + def getrange(self, key: KeyT, start: int, end: int) -> ResponseT: + """ + Returns the substring of the string value stored at ``key``, + determined by the offsets ``start`` and ``end`` (both are inclusive) + + For more information see https://redis.io/commands/getrange + """ + return self.execute_command("GETRANGE", key, start, end) + + def getset(self, name: KeyT, value: EncodableT) -> ResponseT: + """ + Sets the value at key ``name`` to ``value`` + and returns the old value at key ``name`` atomically. + + As per Redis 6.2, GETSET is considered deprecated. + Please use SET with GET parameter in new code. + + For more information see https://redis.io/commands/getset + """ + return self.execute_command("GETSET", name, value) + + def incrby(self, name: KeyT, amount: int = 1) -> ResponseT: + """ + Increments the value of ``key`` by ``amount``. If no key exists, + the value will be initialized as ``amount`` + + For more information see https://redis.io/commands/incrby + """ + return self.execute_command("INCRBY", name, amount) + + incr = incrby + + def incrbyfloat(self, name: KeyT, amount: float = 1.0) -> ResponseT: + """ + Increments the value at key ``name`` by floating ``amount``. + If no key exists, the value will be initialized as ``amount`` + + For more information see https://redis.io/commands/incrbyfloat + """ + return self.execute_command("INCRBYFLOAT", name, amount) + + def keys(self, pattern: PatternT = "*", **kwargs) -> ResponseT: + """ + Returns a list of keys matching ``pattern`` + + For more information see https://redis.io/commands/keys + """ + return self.execute_command("KEYS", pattern, **kwargs) + + def lmove( + self, first_list: str, second_list: str, src: str = "LEFT", dest: str = "RIGHT" + ) -> ResponseT: + """ + Atomically returns and removes the first/last element of a list, + pushing it as the first/last element on the destination list. + Returns the element being popped and pushed. + + For more information see https://redis.io/commands/lmove + """ + params = [first_list, second_list, src, dest] + return self.execute_command("LMOVE", *params) + + def blmove( + self, + first_list: str, + second_list: str, + timeout: int, + src: str = "LEFT", + dest: str = "RIGHT", + ) -> ResponseT: + """ + Blocking version of lmove. + + For more information see https://redis.io/commands/blmove + """ + params = [first_list, second_list, src, dest, timeout] + return self.execute_command("BLMOVE", *params) + + def mget(self, keys: KeysT, *args: EncodableT) -> ResponseT: + """ + Returns a list of values ordered identically to ``keys`` + + For more information see https://redis.io/commands/mget + """ + from redis.client import EMPTY_RESPONSE + + args = list_or_args(keys, args) + options = {} + if not args: + options[EMPTY_RESPONSE] = [] + return self.execute_command("MGET", *args, **options) + + def mset(self, mapping: Mapping[AnyKeyT, EncodableT]) -> ResponseT: + """ + Sets key/values based on a mapping. Mapping is a dictionary of + key/value pairs. Both keys and values should be strings or types that + can be cast to a string via str(). + + For more information see https://redis.io/commands/mset + """ + items = [] + for pair in mapping.items(): + items.extend(pair) + return self.execute_command("MSET", *items) + + def msetnx(self, mapping: Mapping[AnyKeyT, EncodableT]) -> ResponseT: + """ + Sets key/values based on a mapping if none of the keys are already set. + Mapping is a dictionary of key/value pairs. Both keys and values + should be strings or types that can be cast to a string via str(). + Returns a boolean indicating if the operation was successful. + + For more information see https://redis.io/commands/msetnx + """ + items = [] + for pair in mapping.items(): + items.extend(pair) + return self.execute_command("MSETNX", *items) + + def move(self, name: KeyT, db: int) -> ResponseT: + """ + Moves the key ``name`` to a different Redis database ``db`` + + For more information see https://redis.io/commands/move + """ + return self.execute_command("MOVE", name, db) + + def persist(self, name: KeyT) -> ResponseT: + """ + Removes an expiration on ``name`` + + For more information see https://redis.io/commands/persist + """ + return self.execute_command("PERSIST", name) + + def pexpire( + self, + name: KeyT, + time: ExpiryT, + nx: bool = False, + xx: bool = False, + gt: bool = False, + lt: bool = False, + ) -> ResponseT: + """ + Set an expire flag on key ``name`` for ``time`` milliseconds + with given ``option``. ``time`` can be represented by an + integer or a Python timedelta object. + + Valid options are: + NX -> Set expiry only when the key has no expiry + XX -> Set expiry only when the key has an existing expiry + GT -> Set expiry only when the new expiry is greater than current one + LT -> Set expiry only when the new expiry is less than current one + + For more information see https://redis.io/commands/pexpire + """ + if isinstance(time, datetime.timedelta): + time = int(time.total_seconds() * 1000) + + exp_option = list() + if nx: + exp_option.append("NX") + if xx: + exp_option.append("XX") + if gt: + exp_option.append("GT") + if lt: + exp_option.append("LT") + return self.execute_command("PEXPIRE", name, time, *exp_option) + + def pexpireat( + self, + name: KeyT, + when: AbsExpiryT, + nx: bool = False, + xx: bool = False, + gt: bool = False, + lt: bool = False, + ) -> ResponseT: + """ + Set an expire flag on key ``name`` with given ``option``. ``when`` + can be represented as an integer representing unix time in + milliseconds (unix time * 1000) or a Python datetime object. + + Valid options are: + NX -> Set expiry only when the key has no expiry + XX -> Set expiry only when the key has an existing expiry + GT -> Set expiry only when the new expiry is greater than current one + LT -> Set expiry only when the new expiry is less than current one + + For more information see https://redis.io/commands/pexpireat + """ + if isinstance(when, datetime.datetime): + when = int(when.timestamp() * 1000) + exp_option = list() + if nx: + exp_option.append("NX") + if xx: + exp_option.append("XX") + if gt: + exp_option.append("GT") + if lt: + exp_option.append("LT") + return self.execute_command("PEXPIREAT", name, when, *exp_option) + + def pexpiretime(self, key: str) -> int: + """ + Returns the absolute Unix timestamp (since January 1, 1970) in milliseconds + at which the given key will expire. + + For more information see https://redis.io/commands/pexpiretime + """ + return self.execute_command("PEXPIRETIME", key) + + def psetex(self, name: KeyT, time_ms: ExpiryT, value: EncodableT): + """ + Set the value of key ``name`` to ``value`` that expires in ``time_ms`` + milliseconds. ``time_ms`` can be represented by an integer or a Python + timedelta object + + For more information see https://redis.io/commands/psetex + """ + if isinstance(time_ms, datetime.timedelta): + time_ms = int(time_ms.total_seconds() * 1000) + return self.execute_command("PSETEX", name, time_ms, value) + + def pttl(self, name: KeyT) -> ResponseT: + """ + Returns the number of milliseconds until the key ``name`` will expire + + For more information see https://redis.io/commands/pttl + """ + return self.execute_command("PTTL", name) + + def hrandfield( + self, key: str, count: int = None, withvalues: bool = False + ) -> ResponseT: + """ + Return a random field from the hash value stored at key. + + count: if the argument is positive, return an array of distinct fields. + If called with a negative count, the behavior changes and the command + is allowed to return the same field multiple times. In this case, + the number of returned fields is the absolute value of the + specified count. + withvalues: The optional WITHVALUES modifier changes the reply so it + includes the respective values of the randomly selected hash fields. + + For more information see https://redis.io/commands/hrandfield + """ + params = [] + if count is not None: + params.append(count) + if withvalues: + params.append("WITHVALUES") + + return self.execute_command("HRANDFIELD", key, *params) + + def randomkey(self, **kwargs) -> ResponseT: + """ + Returns the name of a random key + + For more information see https://redis.io/commands/randomkey + """ + return self.execute_command("RANDOMKEY", **kwargs) + + def rename(self, src: KeyT, dst: KeyT) -> ResponseT: + """ + Rename key ``src`` to ``dst`` + + For more information see https://redis.io/commands/rename + """ + return self.execute_command("RENAME", src, dst) + + def renamenx(self, src: KeyT, dst: KeyT): + """ + Rename key ``src`` to ``dst`` if ``dst`` doesn't already exist + + For more information see https://redis.io/commands/renamenx + """ + return self.execute_command("RENAMENX", src, dst) + + def restore( + self, + name: KeyT, + ttl: float, + value: EncodableT, + replace: bool = False, + absttl: bool = False, + idletime: Union[int, None] = None, + frequency: Union[int, None] = None, + ) -> ResponseT: + """ + Create a key using the provided serialized value, previously obtained + using DUMP. + + ``replace`` allows an existing key on ``name`` to be overridden. If + it's not specified an error is raised on collision. + + ``absttl`` if True, specified ``ttl`` should represent an absolute Unix + timestamp in milliseconds in which the key will expire. (Redis 5.0 or + greater). + + ``idletime`` Used for eviction, this is the number of seconds the + key must be idle, prior to execution. + + ``frequency`` Used for eviction, this is the frequency counter of + the object stored at the key, prior to execution. + + For more information see https://redis.io/commands/restore + """ + params = [name, ttl, value] + if replace: + params.append("REPLACE") + if absttl: + params.append("ABSTTL") + if idletime is not None: + params.append("IDLETIME") + try: + params.append(int(idletime)) + except ValueError: + raise DataError("idletimemust be an integer") + + if frequency is not None: + params.append("FREQ") + try: + params.append(int(frequency)) + except ValueError: + raise DataError("frequency must be an integer") + + return self.execute_command("RESTORE", *params) + + def set( + self, + name: KeyT, + value: EncodableT, + ex: Union[ExpiryT, None] = None, + px: Union[ExpiryT, None] = None, + nx: bool = False, + xx: bool = False, + keepttl: bool = False, + get: bool = False, + exat: Union[AbsExpiryT, None] = None, + pxat: Union[AbsExpiryT, None] = None, + ) -> ResponseT: + """ + Set the value at key ``name`` to ``value`` + + ``ex`` sets an expire flag on key ``name`` for ``ex`` seconds. + + ``px`` sets an expire flag on key ``name`` for ``px`` milliseconds. + + ``nx`` if set to True, set the value at key ``name`` to ``value`` only + if it does not exist. + + ``xx`` if set to True, set the value at key ``name`` to ``value`` only + if it already exists. + + ``keepttl`` if True, retain the time to live associated with the key. + (Available since Redis 6.0) + + ``get`` if True, set the value at key ``name`` to ``value`` and return + the old value stored at key, or None if the key did not exist. + (Available since Redis 6.2) + + ``exat`` sets an expire flag on key ``name`` for ``ex`` seconds, + specified in unix time. + + ``pxat`` sets an expire flag on key ``name`` for ``ex`` milliseconds, + specified in unix time. + + For more information see https://redis.io/commands/set + """ + pieces: list[EncodableT] = [name, value] + options = {} + if ex is not None: + pieces.append("EX") + if isinstance(ex, datetime.timedelta): + pieces.append(int(ex.total_seconds())) + elif isinstance(ex, int): + pieces.append(ex) + elif isinstance(ex, str) and ex.isdigit(): + pieces.append(int(ex)) + else: + raise DataError("ex must be datetime.timedelta or int") + if px is not None: + pieces.append("PX") + if isinstance(px, datetime.timedelta): + pieces.append(int(px.total_seconds() * 1000)) + elif isinstance(px, int): + pieces.append(px) + else: + raise DataError("px must be datetime.timedelta or int") + if exat is not None: + pieces.append("EXAT") + if isinstance(exat, datetime.datetime): + exat = int(exat.timestamp()) + pieces.append(exat) + if pxat is not None: + pieces.append("PXAT") + if isinstance(pxat, datetime.datetime): + pxat = int(pxat.timestamp() * 1000) + pieces.append(pxat) + if keepttl: + pieces.append("KEEPTTL") + + if nx: + pieces.append("NX") + if xx: + pieces.append("XX") + + if get: + pieces.append("GET") + options["get"] = True + + return self.execute_command("SET", *pieces, **options) + + def __setitem__(self, name: KeyT, value: EncodableT): + self.set(name, value) + + def setbit(self, name: KeyT, offset: int, value: int) -> ResponseT: + """ + Flag the ``offset`` in ``name`` as ``value``. Returns an integer + indicating the previous value of ``offset``. + + For more information see https://redis.io/commands/setbit + """ + value = value and 1 or 0 + return self.execute_command("SETBIT", name, offset, value) + + def setex(self, name: KeyT, time: ExpiryT, value: EncodableT) -> ResponseT: + """ + Set the value of key ``name`` to ``value`` that expires in ``time`` + seconds. ``time`` can be represented by an integer or a Python + timedelta object. + + For more information see https://redis.io/commands/setex + """ + if isinstance(time, datetime.timedelta): + time = int(time.total_seconds()) + return self.execute_command("SETEX", name, time, value) + + def setnx(self, name: KeyT, value: EncodableT) -> ResponseT: + """ + Set the value of key ``name`` to ``value`` if key doesn't exist + + For more information see https://redis.io/commands/setnx + """ + return self.execute_command("SETNX", name, value) + + def setrange(self, name: KeyT, offset: int, value: EncodableT) -> ResponseT: + """ + Overwrite bytes in the value of ``name`` starting at ``offset`` with + ``value``. If ``offset`` plus the length of ``value`` exceeds the + length of the original value, the new value will be larger than before. + If ``offset`` exceeds the length of the original value, null bytes + will be used to pad between the end of the previous value and the start + of what's being injected. + + Returns the length of the new string. + + For more information see https://redis.io/commands/setrange + """ + return self.execute_command("SETRANGE", name, offset, value) + + def stralgo( + self, + algo: Literal["LCS"], + value1: KeyT, + value2: KeyT, + specific_argument: Union[Literal["strings"], Literal["keys"]] = "strings", + len: bool = False, + idx: bool = False, + minmatchlen: Union[int, None] = None, + withmatchlen: bool = False, + **kwargs, + ) -> ResponseT: + """ + Implements complex algorithms that operate on strings. + Right now the only algorithm implemented is the LCS algorithm + (longest common substring). However new algorithms could be + implemented in the future. + + ``algo`` Right now must be LCS + ``value1`` and ``value2`` Can be two strings or two keys + ``specific_argument`` Specifying if the arguments to the algorithm + will be keys or strings. strings is the default. + ``len`` Returns just the len of the match. + ``idx`` Returns the match positions in each string. + ``minmatchlen`` Restrict the list of matches to the ones of a given + minimal length. Can be provided only when ``idx`` set to True. + ``withmatchlen`` Returns the matches with the len of the match. + Can be provided only when ``idx`` set to True. + + For more information see https://redis.io/commands/stralgo + """ + # check validity + supported_algo = ["LCS"] + if algo not in supported_algo: + supported_algos_str = ", ".join(supported_algo) + raise DataError(f"The supported algorithms are: {supported_algos_str}") + if specific_argument not in ["keys", "strings"]: + raise DataError("specific_argument can be only keys or strings") + if len and idx: + raise DataError("len and idx cannot be provided together.") + + pieces: list[EncodableT] = [algo, specific_argument.upper(), value1, value2] + if len: + pieces.append(b"LEN") + if idx: + pieces.append(b"IDX") + try: + int(minmatchlen) + pieces.extend([b"MINMATCHLEN", minmatchlen]) + except TypeError: + pass + if withmatchlen: + pieces.append(b"WITHMATCHLEN") + + return self.execute_command( + "STRALGO", + *pieces, + len=len, + idx=idx, + minmatchlen=minmatchlen, + withmatchlen=withmatchlen, + **kwargs, + ) + + def strlen(self, name: KeyT) -> ResponseT: + """ + Return the number of bytes stored in the value of ``name`` + + For more information see https://redis.io/commands/strlen + """ + return self.execute_command("STRLEN", name) + + def substr(self, name: KeyT, start: int, end: int = -1) -> ResponseT: + """ + Return a substring of the string at key ``name``. ``start`` and ``end`` + are 0-based integers specifying the portion of the string to return. + """ + return self.execute_command("SUBSTR", name, start, end) + + def touch(self, *args: KeyT) -> ResponseT: + """ + Alters the last access time of a key(s) ``*args``. A key is ignored + if it does not exist. + + For more information see https://redis.io/commands/touch + """ + return self.execute_command("TOUCH", *args) + + def ttl(self, name: KeyT) -> ResponseT: + """ + Returns the number of seconds until the key ``name`` will expire + + For more information see https://redis.io/commands/ttl + """ + return self.execute_command("TTL", name) + + def type(self, name: KeyT) -> ResponseT: + """ + Returns the type of key ``name`` + + For more information see https://redis.io/commands/type + """ + return self.execute_command("TYPE", name) + + def watch(self, *names: KeyT) -> None: + """ + Watches the values at keys ``names``, or None if the key doesn't exist + + For more information see https://redis.io/commands/watch + """ + warnings.warn(DeprecationWarning("Call WATCH from a Pipeline object")) + + def unwatch(self) -> None: + """ + Unwatches the value at key ``name``, or None of the key doesn't exist + + For more information see https://redis.io/commands/unwatch + """ + warnings.warn(DeprecationWarning("Call UNWATCH from a Pipeline object")) + + def unlink(self, *names: KeyT) -> ResponseT: + """ + Unlink one or more keys specified by ``names`` + + For more information see https://redis.io/commands/unlink + """ + return self.execute_command("UNLINK", *names) + + def lcs( + self, + key1: str, + key2: str, + len: Optional[bool] = False, + idx: Optional[bool] = False, + minmatchlen: Optional[int] = 0, + withmatchlen: Optional[bool] = False, + ) -> Union[str, int, list]: + """ + Find the longest common subsequence between ``key1`` and ``key2``. + If ``len`` is true the length of the match will will be returned. + If ``idx`` is true the match position in each strings will be returned. + ``minmatchlen`` restrict the list of matches to the ones of + the given ``minmatchlen``. + If ``withmatchlen`` the length of the match also will be returned. + For more information see https://redis.io/commands/lcs + """ + pieces = [key1, key2] + if len: + pieces.append("LEN") + if idx: + pieces.append("IDX") + if minmatchlen != 0: + pieces.extend(["MINMATCHLEN", minmatchlen]) + if withmatchlen: + pieces.append("WITHMATCHLEN") + return self.execute_command("LCS", *pieces) + + +class AsyncBasicKeyCommands(BasicKeyCommands): + def __delitem__(self, name: KeyT): + raise TypeError("Async Redis client does not support class deletion") + + def __contains__(self, name: KeyT): + raise TypeError("Async Redis client does not support class inclusion") + + def __getitem__(self, name: KeyT): + raise TypeError("Async Redis client does not support class retrieval") + + def __setitem__(self, name: KeyT, value: EncodableT): + raise TypeError("Async Redis client does not support class assignment") + + async def watch(self, *names: KeyT) -> None: + return super().watch(*names) + + async def unwatch(self) -> None: + return super().unwatch() + + +class ListCommands(CommandsProtocol): + """ + Redis commands for List data type. + see: https://redis.io/topics/data-types#lists + """ + + def blpop( + self, keys: List, timeout: Optional[int] = 0 + ) -> Union[Awaitable[list], list]: + """ + LPOP a value off of the first non-empty list + named in the ``keys`` list. + + If none of the lists in ``keys`` has a value to LPOP, then block + for ``timeout`` seconds, or until a value gets pushed on to one + of the lists. + + If timeout is 0, then block indefinitely. + + For more information see https://redis.io/commands/blpop + """ + if timeout is None: + timeout = 0 + keys = list_or_args(keys, None) + keys.append(timeout) + return self.execute_command("BLPOP", *keys) + + def brpop( + self, keys: List, timeout: Optional[int] = 0 + ) -> Union[Awaitable[list], list]: + """ + RPOP a value off of the first non-empty list + named in the ``keys`` list. + + If none of the lists in ``keys`` has a value to RPOP, then block + for ``timeout`` seconds, or until a value gets pushed on to one + of the lists. + + If timeout is 0, then block indefinitely. + + For more information see https://redis.io/commands/brpop + """ + if timeout is None: + timeout = 0 + keys = list_or_args(keys, None) + keys.append(timeout) + return self.execute_command("BRPOP", *keys) + + def brpoplpush( + self, src: str, dst: str, timeout: Optional[int] = 0 + ) -> Union[Awaitable[Optional[str]], Optional[str]]: + """ + Pop a value off the tail of ``src``, push it on the head of ``dst`` + and then return it. + + This command blocks until a value is in ``src`` or until ``timeout`` + seconds elapse, whichever is first. A ``timeout`` value of 0 blocks + forever. + + For more information see https://redis.io/commands/brpoplpush + """ + if timeout is None: + timeout = 0 + return self.execute_command("BRPOPLPUSH", src, dst, timeout) + + def blmpop( + self, + timeout: float, + numkeys: int, + *args: List[str], + direction: str, + count: Optional[int] = 1, + ) -> Optional[list]: + """ + Pop ``count`` values (default 1) from first non-empty in the list + of provided key names. + + When all lists are empty this command blocks the connection until another + client pushes to it or until the timeout, timeout of 0 blocks indefinitely + + For more information see https://redis.io/commands/blmpop + """ + args = [timeout, numkeys, *args, direction, "COUNT", count] + + return self.execute_command("BLMPOP", *args) + + def lmpop( + self, + num_keys: int, + *args: List[str], + direction: str, + count: Optional[int] = 1, + ) -> Union[Awaitable[list], list]: + """ + Pop ``count`` values (default 1) first non-empty list key from the list + of args provided key names. + + For more information see https://redis.io/commands/lmpop + """ + args = [num_keys] + list(args) + [direction] + if count != 1: + args.extend(["COUNT", count]) + + return self.execute_command("LMPOP", *args) + + def lindex( + self, name: str, index: int + ) -> Union[Awaitable[Optional[str]], Optional[str]]: + """ + Return the item from list ``name`` at position ``index`` + + Negative indexes are supported and will return an item at the + end of the list + + For more information see https://redis.io/commands/lindex + """ + return self.execute_command("LINDEX", name, index) + + def linsert( + self, name: str, where: str, refvalue: str, value: str + ) -> Union[Awaitable[int], int]: + """ + Insert ``value`` in list ``name`` either immediately before or after + [``where``] ``refvalue`` + + Returns the new length of the list on success or -1 if ``refvalue`` + is not in the list. + + For more information see https://redis.io/commands/linsert + """ + return self.execute_command("LINSERT", name, where, refvalue, value) + + def llen(self, name: str) -> Union[Awaitable[int], int]: + """ + Return the length of the list ``name`` + + For more information see https://redis.io/commands/llen + """ + return self.execute_command("LLEN", name) + + def lpop( + self, + name: str, + count: Optional[int] = None, + ) -> Union[Awaitable[Union[str, List, None]], Union[str, List, None]]: + """ + Removes and returns the first elements of the list ``name``. + + By default, the command pops a single element from the beginning of + the list. When provided with the optional ``count`` argument, the reply + will consist of up to count elements, depending on the list's length. + + For more information see https://redis.io/commands/lpop + """ + if count is not None: + return self.execute_command("LPOP", name, count) + else: + return self.execute_command("LPOP", name) + + def lpush(self, name: str, *values: FieldT) -> Union[Awaitable[int], int]: + """ + Push ``values`` onto the head of the list ``name`` + + For more information see https://redis.io/commands/lpush + """ + return self.execute_command("LPUSH", name, *values) + + def lpushx(self, name: str, *values: FieldT) -> Union[Awaitable[int], int]: + """ + Push ``value`` onto the head of the list ``name`` if ``name`` exists + + For more information see https://redis.io/commands/lpushx + """ + return self.execute_command("LPUSHX", name, *values) + + def lrange(self, name: str, start: int, end: int) -> Union[Awaitable[list], list]: + """ + Return a slice of the list ``name`` between + position ``start`` and ``end`` + + ``start`` and ``end`` can be negative numbers just like + Python slicing notation + + For more information see https://redis.io/commands/lrange + """ + return self.execute_command("LRANGE", name, start, end) + + def lrem(self, name: str, count: int, value: str) -> Union[Awaitable[int], int]: + """ + Remove the first ``count`` occurrences of elements equal to ``value`` + from the list stored at ``name``. + + The count argument influences the operation in the following ways: + count > 0: Remove elements equal to value moving from head to tail. + count < 0: Remove elements equal to value moving from tail to head. + count = 0: Remove all elements equal to value. + + For more information see https://redis.io/commands/lrem + """ + return self.execute_command("LREM", name, count, value) + + def lset(self, name: str, index: int, value: str) -> Union[Awaitable[str], str]: + """ + Set element at ``index`` of list ``name`` to ``value`` + + For more information see https://redis.io/commands/lset + """ + return self.execute_command("LSET", name, index, value) + + def ltrim(self, name: str, start: int, end: int) -> Union[Awaitable[str], str]: + """ + Trim the list ``name``, removing all values not within the slice + between ``start`` and ``end`` + + ``start`` and ``end`` can be negative numbers just like + Python slicing notation + + For more information see https://redis.io/commands/ltrim + """ + return self.execute_command("LTRIM", name, start, end) + + def rpop( + self, + name: str, + count: Optional[int] = None, + ) -> Union[Awaitable[Union[str, List, None]], Union[str, List, None]]: + """ + Removes and returns the last elements of the list ``name``. + + By default, the command pops a single element from the end of the list. + When provided with the optional ``count`` argument, the reply will + consist of up to count elements, depending on the list's length. + + For more information see https://redis.io/commands/rpop + """ + if count is not None: + return self.execute_command("RPOP", name, count) + else: + return self.execute_command("RPOP", name) + + def rpoplpush(self, src: str, dst: str) -> Union[Awaitable[str], str]: + """ + RPOP a value off of the ``src`` list and atomically LPUSH it + on to the ``dst`` list. Returns the value. + + For more information see https://redis.io/commands/rpoplpush + """ + return self.execute_command("RPOPLPUSH", src, dst) + + def rpush(self, name: str, *values: FieldT) -> Union[Awaitable[int], int]: + """ + Push ``values`` onto the tail of the list ``name`` + + For more information see https://redis.io/commands/rpush + """ + return self.execute_command("RPUSH", name, *values) + + def rpushx(self, name: str, *values: str) -> Union[Awaitable[int], int]: + """ + Push ``value`` onto the tail of the list ``name`` if ``name`` exists + + For more information see https://redis.io/commands/rpushx + """ + return self.execute_command("RPUSHX", name, *values) + + def lpos( + self, + name: str, + value: str, + rank: Optional[int] = None, + count: Optional[int] = None, + maxlen: Optional[int] = None, + ) -> Union[str, List, None]: + """ + Get position of ``value`` within the list ``name`` + + If specified, ``rank`` indicates the "rank" of the first element to + return in case there are multiple copies of ``value`` in the list. + By default, LPOS returns the position of the first occurrence of + ``value`` in the list. When ``rank`` 2, LPOS returns the position of + the second ``value`` in the list. If ``rank`` is negative, LPOS + searches the list in reverse. For example, -1 would return the + position of the last occurrence of ``value`` and -2 would return the + position of the next to last occurrence of ``value``. + + If specified, ``count`` indicates that LPOS should return a list of + up to ``count`` positions. A ``count`` of 2 would return a list of + up to 2 positions. A ``count`` of 0 returns a list of all positions + matching ``value``. When ``count`` is specified and but ``value`` + does not exist in the list, an empty list is returned. + + If specified, ``maxlen`` indicates the maximum number of list + elements to scan. A ``maxlen`` of 1000 will only return the + position(s) of items within the first 1000 entries in the list. + A ``maxlen`` of 0 (the default) will scan the entire list. + + For more information see https://redis.io/commands/lpos + """ + pieces: list[EncodableT] = [name, value] + if rank is not None: + pieces.extend(["RANK", rank]) + + if count is not None: + pieces.extend(["COUNT", count]) + + if maxlen is not None: + pieces.extend(["MAXLEN", maxlen]) + + return self.execute_command("LPOS", *pieces) + + def sort( + self, + name: str, + start: Optional[int] = None, + num: Optional[int] = None, + by: Optional[str] = None, + get: Optional[List[str]] = None, + desc: bool = False, + alpha: bool = False, + store: Optional[str] = None, + groups: Optional[bool] = False, + ) -> Union[List, int]: + """ + Sort and return the list, set or sorted set at ``name``. + + ``start`` and ``num`` allow for paging through the sorted data + + ``by`` allows using an external key to weight and sort the items. + Use an "*" to indicate where in the key the item value is located + + ``get`` allows for returning items from external keys rather than the + sorted data itself. Use an "*" to indicate where in the key + the item value is located + + ``desc`` allows for reversing the sort + + ``alpha`` allows for sorting lexicographically rather than numerically + + ``store`` allows for storing the result of the sort into + the key ``store`` + + ``groups`` if set to True and if ``get`` contains at least two + elements, sort will return a list of tuples, each containing the + values fetched from the arguments to ``get``. + + For more information see https://redis.io/commands/sort + """ + if (start is not None and num is None) or (num is not None and start is None): + raise DataError("``start`` and ``num`` must both be specified") + + pieces: list[EncodableT] = [name] + if by is not None: + pieces.extend([b"BY", by]) + if start is not None and num is not None: + pieces.extend([b"LIMIT", start, num]) + if get is not None: + # If get is a string assume we want to get a single value. + # Otherwise assume it's an interable and we want to get multiple + # values. We can't just iterate blindly because strings are + # iterable. + if isinstance(get, (bytes, str)): + pieces.extend([b"GET", get]) + else: + for g in get: + pieces.extend([b"GET", g]) + if desc: + pieces.append(b"DESC") + if alpha: + pieces.append(b"ALPHA") + if store is not None: + pieces.extend([b"STORE", store]) + if groups: + if not get or isinstance(get, (bytes, str)) or len(get) < 2: + raise DataError( + 'when using "groups" the "get" argument ' + "must be specified and contain at least " + "two keys" + ) + + options = {"groups": len(get) if groups else None} + return self.execute_command("SORT", *pieces, **options) + + def sort_ro( + self, + key: str, + start: Optional[int] = None, + num: Optional[int] = None, + by: Optional[str] = None, + get: Optional[List[str]] = None, + desc: bool = False, + alpha: bool = False, + ) -> list: + """ + Returns the elements contained in the list, set or sorted set at key. + (read-only variant of the SORT command) + + ``start`` and ``num`` allow for paging through the sorted data + + ``by`` allows using an external key to weight and sort the items. + Use an "*" to indicate where in the key the item value is located + + ``get`` allows for returning items from external keys rather than the + sorted data itself. Use an "*" to indicate where in the key + the item value is located + + ``desc`` allows for reversing the sort + + ``alpha`` allows for sorting lexicographically rather than numerically + + For more information see https://redis.io/commands/sort_ro + """ + return self.sort( + key, start=start, num=num, by=by, get=get, desc=desc, alpha=alpha + ) + + +AsyncListCommands = ListCommands + + +class ScanCommands(CommandsProtocol): + """ + Redis SCAN commands. + see: https://redis.io/commands/scan + """ + + def scan( + self, + cursor: int = 0, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + _type: Union[str, None] = None, + **kwargs, + ) -> ResponseT: + """ + Incrementally return lists of key names. Also return a cursor + indicating the scan position. + + ``match`` allows for filtering the keys by pattern + + ``count`` provides a hint to Redis about the number of keys to + return per batch. + + ``_type`` filters the returned values by a particular Redis type. + Stock Redis instances allow for the following types: + HASH, LIST, SET, STREAM, STRING, ZSET + Additionally, Redis modules can expose other types as well. + + For more information see https://redis.io/commands/scan + """ + pieces: list[EncodableT] = [cursor] + if match is not None: + pieces.extend([b"MATCH", match]) + if count is not None: + pieces.extend([b"COUNT", count]) + if _type is not None: + pieces.extend([b"TYPE", _type]) + return self.execute_command("SCAN", *pieces, **kwargs) + + def scan_iter( + self, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + _type: Union[str, None] = None, + **kwargs, + ) -> Iterator: + """ + Make an iterator using the SCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` provides a hint to Redis about the number of keys to + return per batch. + + ``_type`` filters the returned values by a particular Redis type. + Stock Redis instances allow for the following types: + HASH, LIST, SET, STREAM, STRING, ZSET + Additionally, Redis modules can expose other types as well. + """ + cursor = "0" + while cursor != 0: + cursor, data = self.scan( + cursor=cursor, match=match, count=count, _type=_type, **kwargs + ) + yield from data + + def sscan( + self, + name: KeyT, + cursor: int = 0, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + ) -> ResponseT: + """ + Incrementally return lists of elements in a set. Also return a cursor + indicating the scan position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + + For more information see https://redis.io/commands/sscan + """ + pieces: list[EncodableT] = [name, cursor] + if match is not None: + pieces.extend([b"MATCH", match]) + if count is not None: + pieces.extend([b"COUNT", count]) + return self.execute_command("SSCAN", *pieces) + + def sscan_iter( + self, + name: KeyT, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + ) -> Iterator: + """ + Make an iterator using the SSCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + """ + cursor = "0" + while cursor != 0: + cursor, data = self.sscan(name, cursor=cursor, match=match, count=count) + yield from data + + def hscan( + self, + name: KeyT, + cursor: int = 0, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + ) -> ResponseT: + """ + Incrementally return key/value slices in a hash. Also return a cursor + indicating the scan position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + + For more information see https://redis.io/commands/hscan + """ + pieces: list[EncodableT] = [name, cursor] + if match is not None: + pieces.extend([b"MATCH", match]) + if count is not None: + pieces.extend([b"COUNT", count]) + return self.execute_command("HSCAN", *pieces) + + def hscan_iter( + self, + name: str, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + ) -> Iterator: + """ + Make an iterator using the HSCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + """ + cursor = "0" + while cursor != 0: + cursor, data = self.hscan(name, cursor=cursor, match=match, count=count) + yield from data.items() + + def zscan( + self, + name: KeyT, + cursor: int = 0, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + score_cast_func: Union[type, Callable] = float, + ) -> ResponseT: + """ + Incrementally return lists of elements in a sorted set. Also return a + cursor indicating the scan position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + + ``score_cast_func`` a callable used to cast the score return value + + For more information see https://redis.io/commands/zscan + """ + pieces = [name, cursor] + if match is not None: + pieces.extend([b"MATCH", match]) + if count is not None: + pieces.extend([b"COUNT", count]) + options = {"score_cast_func": score_cast_func} + return self.execute_command("ZSCAN", *pieces, **options) + + def zscan_iter( + self, + name: KeyT, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + score_cast_func: Union[type, Callable] = float, + ) -> Iterator: + """ + Make an iterator using the ZSCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + + ``score_cast_func`` a callable used to cast the score return value + """ + cursor = "0" + while cursor != 0: + cursor, data = self.zscan( + name, + cursor=cursor, + match=match, + count=count, + score_cast_func=score_cast_func, + ) + yield from data + + +class AsyncScanCommands(ScanCommands): + async def scan_iter( + self, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + _type: Union[str, None] = None, + **kwargs, + ) -> AsyncIterator: + """ + Make an iterator using the SCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` provides a hint to Redis about the number of keys to + return per batch. + + ``_type`` filters the returned values by a particular Redis type. + Stock Redis instances allow for the following types: + HASH, LIST, SET, STREAM, STRING, ZSET + Additionally, Redis modules can expose other types as well. + """ + cursor = "0" + while cursor != 0: + cursor, data = await self.scan( + cursor=cursor, match=match, count=count, _type=_type, **kwargs + ) + for d in data: + yield d + + async def sscan_iter( + self, + name: KeyT, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + ) -> AsyncIterator: + """ + Make an iterator using the SSCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + """ + cursor = "0" + while cursor != 0: + cursor, data = await self.sscan( + name, cursor=cursor, match=match, count=count + ) + for d in data: + yield d + + async def hscan_iter( + self, + name: str, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + ) -> AsyncIterator: + """ + Make an iterator using the HSCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + """ + cursor = "0" + while cursor != 0: + cursor, data = await self.hscan( + name, cursor=cursor, match=match, count=count + ) + for it in data.items(): + yield it + + async def zscan_iter( + self, + name: KeyT, + match: Union[PatternT, None] = None, + count: Union[int, None] = None, + score_cast_func: Union[type, Callable] = float, + ) -> AsyncIterator: + """ + Make an iterator using the ZSCAN command so that the client doesn't + need to remember the cursor position. + + ``match`` allows for filtering the keys by pattern + + ``count`` allows for hint the minimum number of returns + + ``score_cast_func`` a callable used to cast the score return value + """ + cursor = "0" + while cursor != 0: + cursor, data = await self.zscan( + name, + cursor=cursor, + match=match, + count=count, + score_cast_func=score_cast_func, + ) + for d in data: + yield d + + +class SetCommands(CommandsProtocol): + """ + Redis commands for Set data type. + see: https://redis.io/topics/data-types#sets + """ + + def sadd(self, name: str, *values: FieldT) -> Union[Awaitable[int], int]: + """ + Add ``value(s)`` to set ``name`` + + For more information see https://redis.io/commands/sadd + """ + return self.execute_command("SADD", name, *values) + + def scard(self, name: str) -> Union[Awaitable[int], int]: + """ + Return the number of elements in set ``name`` + + For more information see https://redis.io/commands/scard + """ + return self.execute_command("SCARD", name) + + def sdiff(self, keys: List, *args: List) -> Union[Awaitable[list], list]: + """ + Return the difference of sets specified by ``keys`` + + For more information see https://redis.io/commands/sdiff + """ + args = list_or_args(keys, args) + return self.execute_command("SDIFF", *args) + + def sdiffstore( + self, dest: str, keys: List, *args: List + ) -> Union[Awaitable[int], int]: + """ + Store the difference of sets specified by ``keys`` into a new + set named ``dest``. Returns the number of keys in the new set. + + For more information see https://redis.io/commands/sdiffstore + """ + args = list_or_args(keys, args) + return self.execute_command("SDIFFSTORE", dest, *args) + + def sinter(self, keys: List, *args: List) -> Union[Awaitable[list], list]: + """ + Return the intersection of sets specified by ``keys`` + + For more information see https://redis.io/commands/sinter + """ + args = list_or_args(keys, args) + return self.execute_command("SINTER", *args) + + def sintercard( + self, numkeys: int, keys: List[str], limit: int = 0 + ) -> Union[Awaitable[int], int]: + """ + Return the cardinality of the intersect of multiple sets specified by ``keys`. + + When LIMIT provided (defaults to 0 and means unlimited), if the intersection + cardinality reaches limit partway through the computation, the algorithm will + exit and yield limit as the cardinality + + For more information see https://redis.io/commands/sintercard + """ + args = [numkeys, *keys, "LIMIT", limit] + return self.execute_command("SINTERCARD", *args) + + def sinterstore( + self, dest: str, keys: List, *args: List + ) -> Union[Awaitable[int], int]: + """ + Store the intersection of sets specified by ``keys`` into a new + set named ``dest``. Returns the number of keys in the new set. + + For more information see https://redis.io/commands/sinterstore + """ + args = list_or_args(keys, args) + return self.execute_command("SINTERSTORE", dest, *args) + + def sismember( + self, name: str, value: str + ) -> Union[Awaitable[Union[Literal[0], Literal[1]]], Union[Literal[0], Literal[1]]]: + """ + Return whether ``value`` is a member of set ``name``: + - 1 if the value is a member of the set. + - 0 if the value is not a member of the set or if key does not exist. + + For more information see https://redis.io/commands/sismember + """ + return self.execute_command("SISMEMBER", name, value) + + def smembers(self, name: str) -> Union[Awaitable[Set], Set]: + """ + Return all members of the set ``name`` + + For more information see https://redis.io/commands/smembers + """ + return self.execute_command("SMEMBERS", name) + + def smismember( + self, name: str, values: List, *args: List + ) -> Union[ + Awaitable[List[Union[Literal[0], Literal[1]]]], + List[Union[Literal[0], Literal[1]]], + ]: + """ + Return whether each value in ``values`` is a member of the set ``name`` + as a list of ``int`` in the order of ``values``: + - 1 if the value is a member of the set. + - 0 if the value is not a member of the set or if key does not exist. + + For more information see https://redis.io/commands/smismember + """ + args = list_or_args(values, args) + return self.execute_command("SMISMEMBER", name, *args) + + def smove(self, src: str, dst: str, value: str) -> Union[Awaitable[bool], bool]: + """ + Move ``value`` from set ``src`` to set ``dst`` atomically + + For more information see https://redis.io/commands/smove + """ + return self.execute_command("SMOVE", src, dst, value) + + def spop(self, name: str, count: Optional[int] = None) -> Union[str, List, None]: + """ + Remove and return a random member of set ``name`` + + For more information see https://redis.io/commands/spop + """ + args = (count is not None) and [count] or [] + return self.execute_command("SPOP", name, *args) + + def srandmember( + self, name: str, number: Optional[int] = None + ) -> Union[str, List, None]: + """ + If ``number`` is None, returns a random member of set ``name``. + + If ``number`` is supplied, returns a list of ``number`` random + members of set ``name``. Note this is only available when running + Redis 2.6+. + + For more information see https://redis.io/commands/srandmember + """ + args = (number is not None) and [number] or [] + return self.execute_command("SRANDMEMBER", name, *args) + + def srem(self, name: str, *values: FieldT) -> Union[Awaitable[int], int]: + """ + Remove ``values`` from set ``name`` + + For more information see https://redis.io/commands/srem + """ + return self.execute_command("SREM", name, *values) + + def sunion(self, keys: List, *args: List) -> Union[Awaitable[List], List]: + """ + Return the union of sets specified by ``keys`` + + For more information see https://redis.io/commands/sunion + """ + args = list_or_args(keys, args) + return self.execute_command("SUNION", *args) + + def sunionstore( + self, dest: str, keys: List, *args: List + ) -> Union[Awaitable[int], int]: + """ + Store the union of sets specified by ``keys`` into a new + set named ``dest``. Returns the number of keys in the new set. + + For more information see https://redis.io/commands/sunionstore + """ + args = list_or_args(keys, args) + return self.execute_command("SUNIONSTORE", dest, *args) + + +AsyncSetCommands = SetCommands + + +class StreamCommands(CommandsProtocol): + """ + Redis commands for Stream data type. + see: https://redis.io/topics/streams-intro + """ + + def xack(self, name: KeyT, groupname: GroupT, *ids: StreamIdT) -> ResponseT: + """ + Acknowledges the successful processing of one or more messages. + name: name of the stream. + groupname: name of the consumer group. + *ids: message ids to acknowledge. + + For more information see https://redis.io/commands/xack + """ + return self.execute_command("XACK", name, groupname, *ids) + + def xadd( + self, + name: KeyT, + fields: Dict[FieldT, EncodableT], + id: StreamIdT = "*", + maxlen: Union[int, None] = None, + approximate: bool = True, + nomkstream: bool = False, + minid: Union[StreamIdT, None] = None, + limit: Union[int, None] = None, + ) -> ResponseT: + """ + Add to a stream. + name: name of the stream + fields: dict of field/value pairs to insert into the stream + id: Location to insert this record. By default it is appended. + maxlen: truncate old stream members beyond this size. + Can't be specified with minid. + approximate: actual stream length may be slightly more than maxlen + nomkstream: When set to true, do not make a stream + minid: the minimum id in the stream to query. + Can't be specified with maxlen. + limit: specifies the maximum number of entries to retrieve + + For more information see https://redis.io/commands/xadd + """ + pieces: list[EncodableT] = [] + if maxlen is not None and minid is not None: + raise DataError("Only one of ```maxlen``` or ```minid``` may be specified") + + if maxlen is not None: + if not isinstance(maxlen, int) or maxlen < 0: + raise DataError("XADD maxlen must be non-negative integer") + pieces.append(b"MAXLEN") + if approximate: + pieces.append(b"~") + pieces.append(str(maxlen)) + if minid is not None: + pieces.append(b"MINID") + if approximate: + pieces.append(b"~") + pieces.append(minid) + if limit is not None: + pieces.extend([b"LIMIT", limit]) + if nomkstream: + pieces.append(b"NOMKSTREAM") + pieces.append(id) + if not isinstance(fields, dict) or len(fields) == 0: + raise DataError("XADD fields must be a non-empty dict") + for pair in fields.items(): + pieces.extend(pair) + return self.execute_command("XADD", name, *pieces) + + def xautoclaim( + self, + name: KeyT, + groupname: GroupT, + consumername: ConsumerT, + min_idle_time: int, + start_id: StreamIdT = "0-0", + count: Union[int, None] = None, + justid: bool = False, + ) -> ResponseT: + """ + Transfers ownership of pending stream entries that match the specified + criteria. Conceptually, equivalent to calling XPENDING and then XCLAIM, + but provides a more straightforward way to deal with message delivery + failures via SCAN-like semantics. + name: name of the stream. + groupname: name of the consumer group. + consumername: name of a consumer that claims the message. + min_idle_time: filter messages that were idle less than this amount of + milliseconds. + start_id: filter messages with equal or greater ID. + count: optional integer, upper limit of the number of entries that the + command attempts to claim. Set to 100 by default. + justid: optional boolean, false by default. Return just an array of IDs + of messages successfully claimed, without returning the actual message + + For more information see https://redis.io/commands/xautoclaim + """ + try: + if int(min_idle_time) < 0: + raise DataError( + "XAUTOCLAIM min_idle_time must be a nonnegative integer" + ) + except TypeError: + pass + + kwargs = {} + pieces = [name, groupname, consumername, min_idle_time, start_id] + + try: + if int(count) < 0: + raise DataError("XPENDING count must be a integer >= 0") + pieces.extend([b"COUNT", count]) + except TypeError: + pass + if justid: + pieces.append(b"JUSTID") + kwargs["parse_justid"] = True + + return self.execute_command("XAUTOCLAIM", *pieces, **kwargs) + + def xclaim( + self, + name: KeyT, + groupname: GroupT, + consumername: ConsumerT, + min_idle_time: int, + message_ids: Union[List[StreamIdT], Tuple[StreamIdT]], + idle: Union[int, None] = None, + time: Union[int, None] = None, + retrycount: Union[int, None] = None, + force: bool = False, + justid: bool = False, + ) -> ResponseT: + """ + Changes the ownership of a pending message. + + name: name of the stream. + + groupname: name of the consumer group. + + consumername: name of a consumer that claims the message. + + min_idle_time: filter messages that were idle less than this amount of + milliseconds + + message_ids: non-empty list or tuple of message IDs to claim + + idle: optional. Set the idle time (last time it was delivered) of the + message in ms + + time: optional integer. This is the same as idle but instead of a + relative amount of milliseconds, it sets the idle time to a specific + Unix time (in milliseconds). + + retrycount: optional integer. set the retry counter to the specified + value. This counter is incremented every time a message is delivered + again. + + force: optional boolean, false by default. Creates the pending message + entry in the PEL even if certain specified IDs are not already in the + PEL assigned to a different client. + + justid: optional boolean, false by default. Return just an array of IDs + of messages successfully claimed, without returning the actual message + + For more information see https://redis.io/commands/xclaim + """ + if not isinstance(min_idle_time, int) or min_idle_time < 0: + raise DataError("XCLAIM min_idle_time must be a non negative integer") + if not isinstance(message_ids, (list, tuple)) or not message_ids: + raise DataError( + "XCLAIM message_ids must be a non empty list or " + "tuple of message IDs to claim" + ) + + kwargs = {} + pieces: list[EncodableT] = [name, groupname, consumername, str(min_idle_time)] + pieces.extend(list(message_ids)) + + if idle is not None: + if not isinstance(idle, int): + raise DataError("XCLAIM idle must be an integer") + pieces.extend((b"IDLE", str(idle))) + if time is not None: + if not isinstance(time, int): + raise DataError("XCLAIM time must be an integer") + pieces.extend((b"TIME", str(time))) + if retrycount is not None: + if not isinstance(retrycount, int): + raise DataError("XCLAIM retrycount must be an integer") + pieces.extend((b"RETRYCOUNT", str(retrycount))) + + if force: + if not isinstance(force, bool): + raise DataError("XCLAIM force must be a boolean") + pieces.append(b"FORCE") + if justid: + if not isinstance(justid, bool): + raise DataError("XCLAIM justid must be a boolean") + pieces.append(b"JUSTID") + kwargs["parse_justid"] = True + return self.execute_command("XCLAIM", *pieces, **kwargs) + + def xdel(self, name: KeyT, *ids: StreamIdT) -> ResponseT: + """ + Deletes one or more messages from a stream. + name: name of the stream. + *ids: message ids to delete. + + For more information see https://redis.io/commands/xdel + """ + return self.execute_command("XDEL", name, *ids) + + def xgroup_create( + self, + name: KeyT, + groupname: GroupT, + id: StreamIdT = "$", + mkstream: bool = False, + entries_read: Optional[int] = None, + ) -> ResponseT: + """ + Create a new consumer group associated with a stream. + name: name of the stream. + groupname: name of the consumer group. + id: ID of the last item in the stream to consider already delivered. + + For more information see https://redis.io/commands/xgroup-create + """ + pieces: list[EncodableT] = ["XGROUP CREATE", name, groupname, id] + if mkstream: + pieces.append(b"MKSTREAM") + if entries_read is not None: + pieces.extend(["ENTRIESREAD", entries_read]) + + return self.execute_command(*pieces) + + def xgroup_delconsumer( + self, name: KeyT, groupname: GroupT, consumername: ConsumerT + ) -> ResponseT: + """ + Remove a specific consumer from a consumer group. + Returns the number of pending messages that the consumer had before it + was deleted. + name: name of the stream. + groupname: name of the consumer group. + consumername: name of consumer to delete + + For more information see https://redis.io/commands/xgroup-delconsumer + """ + return self.execute_command("XGROUP DELCONSUMER", name, groupname, consumername) + + def xgroup_destroy(self, name: KeyT, groupname: GroupT) -> ResponseT: + """ + Destroy a consumer group. + name: name of the stream. + groupname: name of the consumer group. + + For more information see https://redis.io/commands/xgroup-destroy + """ + return self.execute_command("XGROUP DESTROY", name, groupname) + + def xgroup_createconsumer( + self, name: KeyT, groupname: GroupT, consumername: ConsumerT + ) -> ResponseT: + """ + Consumers in a consumer group are auto-created every time a new + consumer name is mentioned by some command. + They can be explicitly created by using this command. + name: name of the stream. + groupname: name of the consumer group. + consumername: name of consumer to create. + + See: https://redis.io/commands/xgroup-createconsumer + """ + return self.execute_command( + "XGROUP CREATECONSUMER", name, groupname, consumername + ) + + def xgroup_setid( + self, + name: KeyT, + groupname: GroupT, + id: StreamIdT, + entries_read: Optional[int] = None, + ) -> ResponseT: + """ + Set the consumer group last delivered ID to something else. + name: name of the stream. + groupname: name of the consumer group. + id: ID of the last item in the stream to consider already delivered. + + For more information see https://redis.io/commands/xgroup-setid + """ + pieces = [name, groupname, id] + if entries_read is not None: + pieces.extend(["ENTRIESREAD", entries_read]) + return self.execute_command("XGROUP SETID", *pieces) + + def xinfo_consumers(self, name: KeyT, groupname: GroupT) -> ResponseT: + """ + Returns general information about the consumers in the group. + name: name of the stream. + groupname: name of the consumer group. + + For more information see https://redis.io/commands/xinfo-consumers + """ + return self.execute_command("XINFO CONSUMERS", name, groupname) + + def xinfo_groups(self, name: KeyT) -> ResponseT: + """ + Returns general information about the consumer groups of the stream. + name: name of the stream. + + For more information see https://redis.io/commands/xinfo-groups + """ + return self.execute_command("XINFO GROUPS", name) + + def xinfo_stream(self, name: KeyT, full: bool = False) -> ResponseT: + """ + Returns general information about the stream. + name: name of the stream. + full: optional boolean, false by default. Return full summary + + For more information see https://redis.io/commands/xinfo-stream + """ + pieces = [name] + options = {} + if full: + pieces.append(b"FULL") + options = {"full": full} + return self.execute_command("XINFO STREAM", *pieces, **options) + + def xlen(self, name: KeyT) -> ResponseT: + """ + Returns the number of elements in a given stream. + + For more information see https://redis.io/commands/xlen + """ + return self.execute_command("XLEN", name) + + def xpending(self, name: KeyT, groupname: GroupT) -> ResponseT: + """ + Returns information about pending messages of a group. + name: name of the stream. + groupname: name of the consumer group. + + For more information see https://redis.io/commands/xpending + """ + return self.execute_command("XPENDING", name, groupname) + + def xpending_range( + self, + name: KeyT, + groupname: GroupT, + min: StreamIdT, + max: StreamIdT, + count: int, + consumername: Union[ConsumerT, None] = None, + idle: Union[int, None] = None, + ) -> ResponseT: + """ + Returns information about pending messages, in a range. + + name: name of the stream. + groupname: name of the consumer group. + idle: available from version 6.2. filter entries by their + idle-time, given in milliseconds (optional). + min: minimum stream ID. + max: maximum stream ID. + count: number of messages to return + consumername: name of a consumer to filter by (optional). + """ + if {min, max, count} == {None}: + if idle is not None or consumername is not None: + raise DataError( + "if XPENDING is provided with idle time" + " or consumername, it must be provided" + " with min, max and count parameters" + ) + return self.xpending(name, groupname) + + pieces = [name, groupname] + if min is None or max is None or count is None: + raise DataError( + "XPENDING must be provided with min, max " + "and count parameters, or none of them." + ) + # idle + try: + if int(idle) < 0: + raise DataError("XPENDING idle must be a integer >= 0") + pieces.extend(["IDLE", idle]) + except TypeError: + pass + # count + try: + if int(count) < 0: + raise DataError("XPENDING count must be a integer >= 0") + pieces.extend([min, max, count]) + except TypeError: + pass + # consumername + if consumername: + pieces.append(consumername) + + return self.execute_command("XPENDING", *pieces, parse_detail=True) + + def xrange( + self, + name: KeyT, + min: StreamIdT = "-", + max: StreamIdT = "+", + count: Union[int, None] = None, + ) -> ResponseT: + """ + Read stream values within an interval. + + name: name of the stream. + + start: first stream ID. defaults to '-', + meaning the earliest available. + + finish: last stream ID. defaults to '+', + meaning the latest available. + + count: if set, only return this many items, beginning with the + earliest available. + + For more information see https://redis.io/commands/xrange + """ + pieces = [min, max] + if count is not None: + if not isinstance(count, int) or count < 1: + raise DataError("XRANGE count must be a positive integer") + pieces.append(b"COUNT") + pieces.append(str(count)) + + return self.execute_command("XRANGE", name, *pieces) + + def xread( + self, + streams: Dict[KeyT, StreamIdT], + count: Union[int, None] = None, + block: Union[int, None] = None, + ) -> ResponseT: + """ + Block and monitor multiple streams for new data. + + streams: a dict of stream names to stream IDs, where + IDs indicate the last ID already seen. + + count: if set, only return this many items, beginning with the + earliest available. + + block: number of milliseconds to wait, if nothing already present. + + For more information see https://redis.io/commands/xread + """ + pieces = [] + if block is not None: + if not isinstance(block, int) or block < 0: + raise DataError("XREAD block must be a non-negative integer") + pieces.append(b"BLOCK") + pieces.append(str(block)) + if count is not None: + if not isinstance(count, int) or count < 1: + raise DataError("XREAD count must be a positive integer") + pieces.append(b"COUNT") + pieces.append(str(count)) + if not isinstance(streams, dict) or len(streams) == 0: + raise DataError("XREAD streams must be a non empty dict") + pieces.append(b"STREAMS") + keys, values = zip(*streams.items()) + pieces.extend(keys) + pieces.extend(values) + return self.execute_command("XREAD", *pieces) + + def xreadgroup( + self, + groupname: str, + consumername: str, + streams: Dict[KeyT, StreamIdT], + count: Union[int, None] = None, + block: Union[int, None] = None, + noack: bool = False, + ) -> ResponseT: + """ + Read from a stream via a consumer group. + + groupname: name of the consumer group. + + consumername: name of the requesting consumer. + + streams: a dict of stream names to stream IDs, where + IDs indicate the last ID already seen. + + count: if set, only return this many items, beginning with the + earliest available. + + block: number of milliseconds to wait, if nothing already present. + noack: do not add messages to the PEL + + For more information see https://redis.io/commands/xreadgroup + """ + pieces: list[EncodableT] = [b"GROUP", groupname, consumername] + if count is not None: + if not isinstance(count, int) or count < 1: + raise DataError("XREADGROUP count must be a positive integer") + pieces.append(b"COUNT") + pieces.append(str(count)) + if block is not None: + if not isinstance(block, int) or block < 0: + raise DataError("XREADGROUP block must be a non-negative integer") + pieces.append(b"BLOCK") + pieces.append(str(block)) + if noack: + pieces.append(b"NOACK") + if not isinstance(streams, dict) or len(streams) == 0: + raise DataError("XREADGROUP streams must be a non empty dict") + pieces.append(b"STREAMS") + pieces.extend(streams.keys()) + pieces.extend(streams.values()) + return self.execute_command("XREADGROUP", *pieces) + + def xrevrange( + self, + name: KeyT, + max: StreamIdT = "+", + min: StreamIdT = "-", + count: Union[int, None] = None, + ) -> ResponseT: + """ + Read stream values within an interval, in reverse order. + + name: name of the stream + + start: first stream ID. defaults to '+', + meaning the latest available. + + finish: last stream ID. defaults to '-', + meaning the earliest available. + + count: if set, only return this many items, beginning with the + latest available. + + For more information see https://redis.io/commands/xrevrange + """ + pieces: list[EncodableT] = [max, min] + if count is not None: + if not isinstance(count, int) or count < 1: + raise DataError("XREVRANGE count must be a positive integer") + pieces.append(b"COUNT") + pieces.append(str(count)) + + return self.execute_command("XREVRANGE", name, *pieces) + + def xtrim( + self, + name: KeyT, + maxlen: Union[int, None] = None, + approximate: bool = True, + minid: Union[StreamIdT, None] = None, + limit: Union[int, None] = None, + ) -> ResponseT: + """ + Trims old messages from a stream. + name: name of the stream. + maxlen: truncate old stream messages beyond this size + Can't be specified with minid. + approximate: actual stream length may be slightly more than maxlen + minid: the minimum id in the stream to query + Can't be specified with maxlen. + limit: specifies the maximum number of entries to retrieve + + For more information see https://redis.io/commands/xtrim + """ + pieces: list[EncodableT] = [] + if maxlen is not None and minid is not None: + raise DataError("Only one of ``maxlen`` or ``minid`` may be specified") + + if maxlen is None and minid is None: + raise DataError("One of ``maxlen`` or ``minid`` must be specified") + + if maxlen is not None: + pieces.append(b"MAXLEN") + if minid is not None: + pieces.append(b"MINID") + if approximate: + pieces.append(b"~") + if maxlen is not None: + pieces.append(maxlen) + if minid is not None: + pieces.append(minid) + if limit is not None: + pieces.append(b"LIMIT") + pieces.append(limit) + + return self.execute_command("XTRIM", name, *pieces) + + +AsyncStreamCommands = StreamCommands + + +class SortedSetCommands(CommandsProtocol): + """ + Redis commands for Sorted Sets data type. + see: https://redis.io/topics/data-types-intro#redis-sorted-sets + """ + + def zadd( + self, + name: KeyT, + mapping: Mapping[AnyKeyT, EncodableT], + nx: bool = False, + xx: bool = False, + ch: bool = False, + incr: bool = False, + gt: bool = False, + lt: bool = False, + ) -> ResponseT: + """ + Set any number of element-name, score pairs to the key ``name``. Pairs + are specified as a dict of element-names keys to score values. + + ``nx`` forces ZADD to only create new elements and not to update + scores for elements that already exist. + + ``xx`` forces ZADD to only update scores of elements that already + exist. New elements will not be added. + + ``ch`` modifies the return value to be the numbers of elements changed. + Changed elements include new elements that were added and elements + whose scores changed. + + ``incr`` modifies ZADD to behave like ZINCRBY. In this mode only a + single element/score pair can be specified and the score is the amount + the existing score will be incremented by. When using this mode the + return value of ZADD will be the new score of the element. + + ``LT`` Only update existing elements if the new score is less than + the current score. This flag doesn't prevent adding new elements. + + ``GT`` Only update existing elements if the new score is greater than + the current score. This flag doesn't prevent adding new elements. + + The return value of ZADD varies based on the mode specified. With no + options, ZADD returns the number of new elements added to the sorted + set. + + ``NX``, ``LT``, and ``GT`` are mutually exclusive options. + + See: https://redis.io/commands/ZADD + """ + if not mapping: + raise DataError("ZADD requires at least one element/score pair") + if nx and xx: + raise DataError("ZADD allows either 'nx' or 'xx', not both") + if gt and lt: + raise DataError("ZADD allows either 'gt' or 'lt', not both") + if incr and len(mapping) != 1: + raise DataError( + "ZADD option 'incr' only works when passing a " + "single element/score pair" + ) + if nx and (gt or lt): + raise DataError("Only one of 'nx', 'lt', or 'gr' may be defined.") + + pieces: list[EncodableT] = [] + options = {} + if nx: + pieces.append(b"NX") + if xx: + pieces.append(b"XX") + if ch: + pieces.append(b"CH") + if incr: + pieces.append(b"INCR") + options["as_score"] = True + if gt: + pieces.append(b"GT") + if lt: + pieces.append(b"LT") + for pair in mapping.items(): + pieces.append(pair[1]) + pieces.append(pair[0]) + return self.execute_command("ZADD", name, *pieces, **options) + + def zcard(self, name: KeyT) -> ResponseT: + """ + Return the number of elements in the sorted set ``name`` + + For more information see https://redis.io/commands/zcard + """ + return self.execute_command("ZCARD", name) + + def zcount(self, name: KeyT, min: ZScoreBoundT, max: ZScoreBoundT) -> ResponseT: + """ + Returns the number of elements in the sorted set at key ``name`` with + a score between ``min`` and ``max``. + + For more information see https://redis.io/commands/zcount + """ + return self.execute_command("ZCOUNT", name, min, max) + + def zdiff(self, keys: KeysT, withscores: bool = False) -> ResponseT: + """ + Returns the difference between the first and all successive input + sorted sets provided in ``keys``. + + For more information see https://redis.io/commands/zdiff + """ + pieces = [len(keys), *keys] + if withscores: + pieces.append("WITHSCORES") + return self.execute_command("ZDIFF", *pieces) + + def zdiffstore(self, dest: KeyT, keys: KeysT) -> ResponseT: + """ + Computes the difference between the first and all successive input + sorted sets provided in ``keys`` and stores the result in ``dest``. + + For more information see https://redis.io/commands/zdiffstore + """ + pieces = [len(keys), *keys] + return self.execute_command("ZDIFFSTORE", dest, *pieces) + + def zincrby(self, name: KeyT, amount: float, value: EncodableT) -> ResponseT: + """ + Increment the score of ``value`` in sorted set ``name`` by ``amount`` + + For more information see https://redis.io/commands/zincrby + """ + return self.execute_command("ZINCRBY", name, amount, value) + + def zinter( + self, keys: KeysT, aggregate: Union[str, None] = None, withscores: bool = False + ) -> ResponseT: + """ + Return the intersect of multiple sorted sets specified by ``keys``. + With the ``aggregate`` option, it is possible to specify how the + results of the union are aggregated. This option defaults to SUM, + where the score of an element is summed across the inputs where it + exists. When this option is set to either MIN or MAX, the resulting + set will contain the minimum or maximum score of an element across + the inputs where it exists. + + For more information see https://redis.io/commands/zinter + """ + return self._zaggregate("ZINTER", None, keys, aggregate, withscores=withscores) + + def zinterstore( + self, + dest: KeyT, + keys: Union[Sequence[KeyT], Mapping[AnyKeyT, float]], + aggregate: Union[str, None] = None, + ) -> ResponseT: + """ + Intersect multiple sorted sets specified by ``keys`` into a new + sorted set, ``dest``. Scores in the destination will be aggregated + based on the ``aggregate``. This option defaults to SUM, where the + score of an element is summed across the inputs where it exists. + When this option is set to either MIN or MAX, the resulting set will + contain the minimum or maximum score of an element across the inputs + where it exists. + + For more information see https://redis.io/commands/zinterstore + """ + return self._zaggregate("ZINTERSTORE", dest, keys, aggregate) + + def zintercard( + self, numkeys: int, keys: List[str], limit: int = 0 + ) -> Union[Awaitable[int], int]: + """ + Return the cardinality of the intersect of multiple sorted sets + specified by ``keys`. + When LIMIT provided (defaults to 0 and means unlimited), if the intersection + cardinality reaches limit partway through the computation, the algorithm will + exit and yield limit as the cardinality + + For more information see https://redis.io/commands/zintercard + """ + args = [numkeys, *keys, "LIMIT", limit] + return self.execute_command("ZINTERCARD", *args) + + def zlexcount(self, name, min, max): + """ + Return the number of items in the sorted set ``name`` between the + lexicographical range ``min`` and ``max``. + + For more information see https://redis.io/commands/zlexcount + """ + return self.execute_command("ZLEXCOUNT", name, min, max) + + def zpopmax(self, name: KeyT, count: Union[int, None] = None) -> ResponseT: + """ + Remove and return up to ``count`` members with the highest scores + from the sorted set ``name``. + + For more information see https://redis.io/commands/zpopmax + """ + args = (count is not None) and [count] or [] + options = {"withscores": True} + return self.execute_command("ZPOPMAX", name, *args, **options) + + def zpopmin(self, name: KeyT, count: Union[int, None] = None) -> ResponseT: + """ + Remove and return up to ``count`` members with the lowest scores + from the sorted set ``name``. + + For more information see https://redis.io/commands/zpopmin + """ + args = (count is not None) and [count] or [] + options = {"withscores": True} + return self.execute_command("ZPOPMIN", name, *args, **options) + + def zrandmember( + self, key: KeyT, count: int = None, withscores: bool = False + ) -> ResponseT: + """ + Return a random element from the sorted set value stored at key. + + ``count`` if the argument is positive, return an array of distinct + fields. If called with a negative count, the behavior changes and + the command is allowed to return the same field multiple times. + In this case, the number of returned fields is the absolute value + of the specified count. + + ``withscores`` The optional WITHSCORES modifier changes the reply so it + includes the respective scores of the randomly selected elements from + the sorted set. + + For more information see https://redis.io/commands/zrandmember + """ + params = [] + if count is not None: + params.append(count) + if withscores: + params.append("WITHSCORES") + + return self.execute_command("ZRANDMEMBER", key, *params) + + def bzpopmax(self, keys: KeysT, timeout: TimeoutSecT = 0) -> ResponseT: + """ + ZPOPMAX a value off of the first non-empty sorted set + named in the ``keys`` list. + + If none of the sorted sets in ``keys`` has a value to ZPOPMAX, + then block for ``timeout`` seconds, or until a member gets added + to one of the sorted sets. + + If timeout is 0, then block indefinitely. + + For more information see https://redis.io/commands/bzpopmax + """ + if timeout is None: + timeout = 0 + keys = list_or_args(keys, None) + keys.append(timeout) + return self.execute_command("BZPOPMAX", *keys) + + def bzpopmin(self, keys: KeysT, timeout: TimeoutSecT = 0) -> ResponseT: + """ + ZPOPMIN a value off of the first non-empty sorted set + named in the ``keys`` list. + + If none of the sorted sets in ``keys`` has a value to ZPOPMIN, + then block for ``timeout`` seconds, or until a member gets added + to one of the sorted sets. + + If timeout is 0, then block indefinitely. + + For more information see https://redis.io/commands/bzpopmin + """ + if timeout is None: + timeout = 0 + keys: list[EncodableT] = list_or_args(keys, None) + keys.append(timeout) + return self.execute_command("BZPOPMIN", *keys) + + def zmpop( + self, + num_keys: int, + keys: List[str], + min: Optional[bool] = False, + max: Optional[bool] = False, + count: Optional[int] = 1, + ) -> Union[Awaitable[list], list]: + """ + Pop ``count`` values (default 1) off of the first non-empty sorted set + named in the ``keys`` list. + For more information see https://redis.io/commands/zmpop + """ + args = [num_keys] + keys + if (min and max) or (not min and not max): + raise DataError + elif min: + args.append("MIN") + else: + args.append("MAX") + if count != 1: + args.extend(["COUNT", count]) + + return self.execute_command("ZMPOP", *args) + + def bzmpop( + self, + timeout: float, + numkeys: int, + keys: List[str], + min: Optional[bool] = False, + max: Optional[bool] = False, + count: Optional[int] = 1, + ) -> Optional[list]: + """ + Pop ``count`` values (default 1) off of the first non-empty sorted set + named in the ``keys`` list. + + If none of the sorted sets in ``keys`` has a value to pop, + then block for ``timeout`` seconds, or until a member gets added + to one of the sorted sets. + + If timeout is 0, then block indefinitely. + + For more information see https://redis.io/commands/bzmpop + """ + args = [timeout, numkeys, *keys] + if (min and max) or (not min and not max): + raise DataError("Either min or max, but not both must be set") + elif min: + args.append("MIN") + else: + args.append("MAX") + args.extend(["COUNT", count]) + + return self.execute_command("BZMPOP", *args) + + def _zrange( + self, + command, + dest: Union[KeyT, None], + name: KeyT, + start: int, + end: int, + desc: bool = False, + byscore: bool = False, + bylex: bool = False, + withscores: bool = False, + score_cast_func: Union[type, Callable, None] = float, + offset: Union[int, None] = None, + num: Union[int, None] = None, + ) -> ResponseT: + if byscore and bylex: + raise DataError("``byscore`` and ``bylex`` can not be specified together.") + if (offset is not None and num is None) or (num is not None and offset is None): + raise DataError("``offset`` and ``num`` must both be specified.") + if bylex and withscores: + raise DataError( + "``withscores`` not supported in combination with ``bylex``." + ) + pieces = [command] + if dest: + pieces.append(dest) + pieces.extend([name, start, end]) + if byscore: + pieces.append("BYSCORE") + if bylex: + pieces.append("BYLEX") + if desc: + pieces.append("REV") + if offset is not None and num is not None: + pieces.extend(["LIMIT", offset, num]) + if withscores: + pieces.append("WITHSCORES") + options = {"withscores": withscores, "score_cast_func": score_cast_func} + return self.execute_command(*pieces, **options) + + def zrange( + self, + name: KeyT, + start: int, + end: int, + desc: bool = False, + withscores: bool = False, + score_cast_func: Union[type, Callable] = float, + byscore: bool = False, + bylex: bool = False, + offset: int = None, + num: int = None, + ) -> ResponseT: + """ + Return a range of values from sorted set ``name`` between + ``start`` and ``end`` sorted in ascending order. + + ``start`` and ``end`` can be negative, indicating the end of the range. + + ``desc`` a boolean indicating whether to sort the results in reversed + order. + + ``withscores`` indicates to return the scores along with the values. + The return type is a list of (value, score) pairs. + + ``score_cast_func`` a callable used to cast the score return value. + + ``byscore`` when set to True, returns the range of elements from the + sorted set having scores equal or between ``start`` and ``end``. + + ``bylex`` when set to True, returns the range of elements from the + sorted set between the ``start`` and ``end`` lexicographical closed + range intervals. + Valid ``start`` and ``end`` must start with ( or [, in order to specify + whether the range interval is exclusive or inclusive, respectively. + + ``offset`` and ``num`` are specified, then return a slice of the range. + Can't be provided when using ``bylex``. + + For more information see https://redis.io/commands/zrange + """ + # Need to support ``desc`` also when using old redis version + # because it was supported in 3.5.3 (of redis-py) + if not byscore and not bylex and (offset is None and num is None) and desc: + return self.zrevrange(name, start, end, withscores, score_cast_func) + + return self._zrange( + "ZRANGE", + None, + name, + start, + end, + desc, + byscore, + bylex, + withscores, + score_cast_func, + offset, + num, + ) + + def zrevrange( + self, + name: KeyT, + start: int, + end: int, + withscores: bool = False, + score_cast_func: Union[type, Callable] = float, + ) -> ResponseT: + """ + Return a range of values from sorted set ``name`` between + ``start`` and ``end`` sorted in descending order. + + ``start`` and ``end`` can be negative, indicating the end of the range. + + ``withscores`` indicates to return the scores along with the values + The return type is a list of (value, score) pairs + + ``score_cast_func`` a callable used to cast the score return value + + For more information see https://redis.io/commands/zrevrange + """ + pieces = ["ZREVRANGE", name, start, end] + if withscores: + pieces.append(b"WITHSCORES") + options = {"withscores": withscores, "score_cast_func": score_cast_func} + return self.execute_command(*pieces, **options) + + def zrangestore( + self, + dest: KeyT, + name: KeyT, + start: int, + end: int, + byscore: bool = False, + bylex: bool = False, + desc: bool = False, + offset: Union[int, None] = None, + num: Union[int, None] = None, + ) -> ResponseT: + """ + Stores in ``dest`` the result of a range of values from sorted set + ``name`` between ``start`` and ``end`` sorted in ascending order. + + ``start`` and ``end`` can be negative, indicating the end of the range. + + ``byscore`` when set to True, returns the range of elements from the + sorted set having scores equal or between ``start`` and ``end``. + + ``bylex`` when set to True, returns the range of elements from the + sorted set between the ``start`` and ``end`` lexicographical closed + range intervals. + Valid ``start`` and ``end`` must start with ( or [, in order to specify + whether the range interval is exclusive or inclusive, respectively. + + ``desc`` a boolean indicating whether to sort the results in reversed + order. + + ``offset`` and ``num`` are specified, then return a slice of the range. + Can't be provided when using ``bylex``. + + For more information see https://redis.io/commands/zrangestore + """ + return self._zrange( + "ZRANGESTORE", + dest, + name, + start, + end, + desc, + byscore, + bylex, + False, + None, + offset, + num, + ) + + def zrangebylex( + self, + name: KeyT, + min: EncodableT, + max: EncodableT, + start: Union[int, None] = None, + num: Union[int, None] = None, + ) -> ResponseT: + """ + Return the lexicographical range of values from sorted set ``name`` + between ``min`` and ``max``. + + If ``start`` and ``num`` are specified, then return a slice of the + range. + + For more information see https://redis.io/commands/zrangebylex + """ + if (start is not None and num is None) or (num is not None and start is None): + raise DataError("``start`` and ``num`` must both be specified") + pieces = ["ZRANGEBYLEX", name, min, max] + if start is not None and num is not None: + pieces.extend([b"LIMIT", start, num]) + return self.execute_command(*pieces) + + def zrevrangebylex( + self, + name: KeyT, + max: EncodableT, + min: EncodableT, + start: Union[int, None] = None, + num: Union[int, None] = None, + ) -> ResponseT: + """ + Return the reversed lexicographical range of values from sorted set + ``name`` between ``max`` and ``min``. + + If ``start`` and ``num`` are specified, then return a slice of the + range. + + For more information see https://redis.io/commands/zrevrangebylex + """ + if (start is not None and num is None) or (num is not None and start is None): + raise DataError("``start`` and ``num`` must both be specified") + pieces = ["ZREVRANGEBYLEX", name, max, min] + if start is not None and num is not None: + pieces.extend(["LIMIT", start, num]) + return self.execute_command(*pieces) + + def zrangebyscore( + self, + name: KeyT, + min: ZScoreBoundT, + max: ZScoreBoundT, + start: Union[int, None] = None, + num: Union[int, None] = None, + withscores: bool = False, + score_cast_func: Union[type, Callable] = float, + ) -> ResponseT: + """ + Return a range of values from the sorted set ``name`` with scores + between ``min`` and ``max``. + + If ``start`` and ``num`` are specified, then return a slice + of the range. + + ``withscores`` indicates to return the scores along with the values. + The return type is a list of (value, score) pairs + + `score_cast_func`` a callable used to cast the score return value + + For more information see https://redis.io/commands/zrangebyscore + """ + if (start is not None and num is None) or (num is not None and start is None): + raise DataError("``start`` and ``num`` must both be specified") + pieces = ["ZRANGEBYSCORE", name, min, max] + if start is not None and num is not None: + pieces.extend(["LIMIT", start, num]) + if withscores: + pieces.append("WITHSCORES") + options = {"withscores": withscores, "score_cast_func": score_cast_func} + return self.execute_command(*pieces, **options) + + def zrevrangebyscore( + self, + name: KeyT, + max: ZScoreBoundT, + min: ZScoreBoundT, + start: Union[int, None] = None, + num: Union[int, None] = None, + withscores: bool = False, + score_cast_func: Union[type, Callable] = float, + ): + """ + Return a range of values from the sorted set ``name`` with scores + between ``min`` and ``max`` in descending order. + + If ``start`` and ``num`` are specified, then return a slice + of the range. + + ``withscores`` indicates to return the scores along with the values. + The return type is a list of (value, score) pairs + + ``score_cast_func`` a callable used to cast the score return value + + For more information see https://redis.io/commands/zrevrangebyscore + """ + if (start is not None and num is None) or (num is not None and start is None): + raise DataError("``start`` and ``num`` must both be specified") + pieces = ["ZREVRANGEBYSCORE", name, max, min] + if start is not None and num is not None: + pieces.extend(["LIMIT", start, num]) + if withscores: + pieces.append("WITHSCORES") + options = {"withscores": withscores, "score_cast_func": score_cast_func} + return self.execute_command(*pieces, **options) + + def zrank( + self, + name: KeyT, + value: EncodableT, + withscore: bool = False, + ) -> ResponseT: + """ + Returns a 0-based value indicating the rank of ``value`` in sorted set + ``name``. + The optional WITHSCORE argument supplements the command's + reply with the score of the element returned. + + For more information see https://redis.io/commands/zrank + """ + if withscore: + return self.execute_command("ZRANK", name, value, "WITHSCORE") + return self.execute_command("ZRANK", name, value) + + def zrem(self, name: KeyT, *values: FieldT) -> ResponseT: + """ + Remove member ``values`` from sorted set ``name`` + + For more information see https://redis.io/commands/zrem + """ + return self.execute_command("ZREM", name, *values) + + def zremrangebylex(self, name: KeyT, min: EncodableT, max: EncodableT) -> ResponseT: + """ + Remove all elements in the sorted set ``name`` between the + lexicographical range specified by ``min`` and ``max``. + + Returns the number of elements removed. + + For more information see https://redis.io/commands/zremrangebylex + """ + return self.execute_command("ZREMRANGEBYLEX", name, min, max) + + def zremrangebyrank(self, name: KeyT, min: int, max: int) -> ResponseT: + """ + Remove all elements in the sorted set ``name`` with ranks between + ``min`` and ``max``. Values are 0-based, ordered from smallest score + to largest. Values can be negative indicating the highest scores. + Returns the number of elements removed + + For more information see https://redis.io/commands/zremrangebyrank + """ + return self.execute_command("ZREMRANGEBYRANK", name, min, max) + + def zremrangebyscore( + self, name: KeyT, min: ZScoreBoundT, max: ZScoreBoundT + ) -> ResponseT: + """ + Remove all elements in the sorted set ``name`` with scores + between ``min`` and ``max``. Returns the number of elements removed. + + For more information see https://redis.io/commands/zremrangebyscore + """ + return self.execute_command("ZREMRANGEBYSCORE", name, min, max) + + def zrevrank( + self, + name: KeyT, + value: EncodableT, + withscore: bool = False, + ) -> ResponseT: + """ + Returns a 0-based value indicating the descending rank of + ``value`` in sorted set ``name``. + The optional ``withscore`` argument supplements the command's + reply with the score of the element returned. + + For more information see https://redis.io/commands/zrevrank + """ + if withscore: + return self.execute_command("ZREVRANK", name, value, "WITHSCORE") + return self.execute_command("ZREVRANK", name, value) + + def zscore(self, name: KeyT, value: EncodableT) -> ResponseT: + """ + Return the score of element ``value`` in sorted set ``name`` + + For more information see https://redis.io/commands/zscore + """ + return self.execute_command("ZSCORE", name, value) + + def zunion( + self, + keys: Union[Sequence[KeyT], Mapping[AnyKeyT, float]], + aggregate: Union[str, None] = None, + withscores: bool = False, + ) -> ResponseT: + """ + Return the union of multiple sorted sets specified by ``keys``. + ``keys`` can be provided as dictionary of keys and their weights. + Scores will be aggregated based on the ``aggregate``, or SUM if + none is provided. + + For more information see https://redis.io/commands/zunion + """ + return self._zaggregate("ZUNION", None, keys, aggregate, withscores=withscores) + + def zunionstore( + self, + dest: KeyT, + keys: Union[Sequence[KeyT], Mapping[AnyKeyT, float]], + aggregate: Union[str, None] = None, + ) -> ResponseT: + """ + Union multiple sorted sets specified by ``keys`` into + a new sorted set, ``dest``. Scores in the destination will be + aggregated based on the ``aggregate``, or SUM if none is provided. + + For more information see https://redis.io/commands/zunionstore + """ + return self._zaggregate("ZUNIONSTORE", dest, keys, aggregate) + + def zmscore(self, key: KeyT, members: List[str]) -> ResponseT: + """ + Returns the scores associated with the specified members + in the sorted set stored at key. + ``members`` should be a list of the member name. + Return type is a list of score. + If the member does not exist, a None will be returned + in corresponding position. + + For more information see https://redis.io/commands/zmscore + """ + if not members: + raise DataError("ZMSCORE members must be a non-empty list") + pieces = [key] + members + return self.execute_command("ZMSCORE", *pieces) + + def _zaggregate( + self, + command: str, + dest: Union[KeyT, None], + keys: Union[Sequence[KeyT], Mapping[AnyKeyT, float]], + aggregate: Union[str, None] = None, + **options, + ) -> ResponseT: + pieces: list[EncodableT] = [command] + if dest is not None: + pieces.append(dest) + pieces.append(len(keys)) + if isinstance(keys, dict): + keys, weights = keys.keys(), keys.values() + else: + weights = None + pieces.extend(keys) + if weights: + pieces.append(b"WEIGHTS") + pieces.extend(weights) + if aggregate: + if aggregate.upper() in ["SUM", "MIN", "MAX"]: + pieces.append(b"AGGREGATE") + pieces.append(aggregate) + else: + raise DataError("aggregate can be sum, min or max.") + if options.get("withscores", False): + pieces.append(b"WITHSCORES") + return self.execute_command(*pieces, **options) + + +AsyncSortedSetCommands = SortedSetCommands + + +class HyperlogCommands(CommandsProtocol): + """ + Redis commands of HyperLogLogs data type. + see: https://redis.io/topics/data-types-intro#hyperloglogs + """ + + def pfadd(self, name: KeyT, *values: FieldT) -> ResponseT: + """ + Adds the specified elements to the specified HyperLogLog. + + For more information see https://redis.io/commands/pfadd + """ + return self.execute_command("PFADD", name, *values) + + def pfcount(self, *sources: KeyT) -> ResponseT: + """ + Return the approximated cardinality of + the set observed by the HyperLogLog at key(s). + + For more information see https://redis.io/commands/pfcount + """ + return self.execute_command("PFCOUNT", *sources) + + def pfmerge(self, dest: KeyT, *sources: KeyT) -> ResponseT: + """ + Merge N different HyperLogLogs into a single one. + + For more information see https://redis.io/commands/pfmerge + """ + return self.execute_command("PFMERGE", dest, *sources) + + +AsyncHyperlogCommands = HyperlogCommands + + +class HashCommands(CommandsProtocol): + """ + Redis commands for Hash data type. + see: https://redis.io/topics/data-types-intro#redis-hashes + """ + + def hdel(self, name: str, *keys: str) -> Union[Awaitable[int], int]: + """ + Delete ``keys`` from hash ``name`` + + For more information see https://redis.io/commands/hdel + """ + return self.execute_command("HDEL", name, *keys) + + def hexists(self, name: str, key: str) -> Union[Awaitable[bool], bool]: + """ + Returns a boolean indicating if ``key`` exists within hash ``name`` + + For more information see https://redis.io/commands/hexists + """ + return self.execute_command("HEXISTS", name, key) + + def hget( + self, name: str, key: str + ) -> Union[Awaitable[Optional[str]], Optional[str]]: + """ + Return the value of ``key`` within the hash ``name`` + + For more information see https://redis.io/commands/hget + """ + return self.execute_command("HGET", name, key) + + def hgetall(self, name: str) -> Union[Awaitable[dict], dict]: + """ + Return a Python dict of the hash's name/value pairs + + For more information see https://redis.io/commands/hgetall + """ + return self.execute_command("HGETALL", name) + + def hincrby( + self, name: str, key: str, amount: int = 1 + ) -> Union[Awaitable[int], int]: + """ + Increment the value of ``key`` in hash ``name`` by ``amount`` + + For more information see https://redis.io/commands/hincrby + """ + return self.execute_command("HINCRBY", name, key, amount) + + def hincrbyfloat( + self, name: str, key: str, amount: float = 1.0 + ) -> Union[Awaitable[float], float]: + """ + Increment the value of ``key`` in hash ``name`` by floating ``amount`` + + For more information see https://redis.io/commands/hincrbyfloat + """ + return self.execute_command("HINCRBYFLOAT", name, key, amount) + + def hkeys(self, name: str) -> Union[Awaitable[List], List]: + """ + Return the list of keys within hash ``name`` + + For more information see https://redis.io/commands/hkeys + """ + return self.execute_command("HKEYS", name) + + def hlen(self, name: str) -> Union[Awaitable[int], int]: + """ + Return the number of elements in hash ``name`` + + For more information see https://redis.io/commands/hlen + """ + return self.execute_command("HLEN", name) + + def hset( + self, + name: str, + key: Optional[str] = None, + value: Optional[str] = None, + mapping: Optional[dict] = None, + items: Optional[list] = None, + ) -> Union[Awaitable[int], int]: + """ + Set ``key`` to ``value`` within hash ``name``, + ``mapping`` accepts a dict of key/value pairs that will be + added to hash ``name``. + ``items`` accepts a list of key/value pairs that will be + added to hash ``name``. + Returns the number of fields that were added. + + For more information see https://redis.io/commands/hset + """ + if key is None and not mapping and not items: + raise DataError("'hset' with no key value pairs") + pieces = [] + if items: + pieces.extend(items) + if key is not None: + pieces.extend((key, value)) + if mapping: + for pair in mapping.items(): + pieces.extend(pair) + + return self.execute_command("HSET", name, *pieces) + + def hsetnx(self, name: str, key: str, value: str) -> Union[Awaitable[bool], bool]: + """ + Set ``key`` to ``value`` within hash ``name`` if ``key`` does not + exist. Returns 1 if HSETNX created a field, otherwise 0. + + For more information see https://redis.io/commands/hsetnx + """ + return self.execute_command("HSETNX", name, key, value) + + def hmset(self, name: str, mapping: dict) -> Union[Awaitable[str], str]: + """ + Set key to value within hash ``name`` for each corresponding + key and value from the ``mapping`` dict. + + For more information see https://redis.io/commands/hmset + """ + warnings.warn( + f"{self.__class__.__name__}.hmset() is deprecated. " + f"Use {self.__class__.__name__}.hset() instead.", + DeprecationWarning, + stacklevel=2, + ) + if not mapping: + raise DataError("'hmset' with 'mapping' of length 0") + items = [] + for pair in mapping.items(): + items.extend(pair) + return self.execute_command("HMSET", name, *items) + + def hmget(self, name: str, keys: List, *args: List) -> Union[Awaitable[List], List]: + """ + Returns a list of values ordered identically to ``keys`` + + For more information see https://redis.io/commands/hmget + """ + args = list_or_args(keys, args) + return self.execute_command("HMGET", name, *args) + + def hvals(self, name: str) -> Union[Awaitable[List], List]: + """ + Return the list of values within hash ``name`` + + For more information see https://redis.io/commands/hvals + """ + return self.execute_command("HVALS", name) + + def hstrlen(self, name: str, key: str) -> Union[Awaitable[int], int]: + """ + Return the number of bytes stored in the value of ``key`` + within hash ``name`` + + For more information see https://redis.io/commands/hstrlen + """ + return self.execute_command("HSTRLEN", name, key) + + +AsyncHashCommands = HashCommands + + +class Script: + """ + An executable Lua script object returned by ``register_script`` + """ + + def __init__(self, registered_client: "Redis", script: ScriptTextT): + self.registered_client = registered_client + self.script = script + # Precalculate and store the SHA1 hex digest of the script. + + if isinstance(script, str): + # We need the encoding from the client in order to generate an + # accurate byte representation of the script + try: + encoder = registered_client.connection_pool.get_encoder() + except AttributeError: + # Cluster + encoder = registered_client.get_encoder() + script = encoder.encode(script) + self.sha = hashlib.sha1(script).hexdigest() + + def __call__( + self, + keys: Union[Sequence[KeyT], None] = None, + args: Union[Iterable[EncodableT], None] = None, + client: Union["Redis", None] = None, + ): + """Execute the script, passing any required ``args``""" + keys = keys or [] + args = args or [] + if client is None: + client = self.registered_client + args = tuple(keys) + tuple(args) + # make sure the Redis server knows about the script + from redis.client import Pipeline + + if isinstance(client, Pipeline): + # Make sure the pipeline can register the script before executing. + client.scripts.add(self) + try: + return client.evalsha(self.sha, len(keys), *args) + except NoScriptError: + # Maybe the client is pointed to a different server than the client + # that created this instance? + # Overwrite the sha just in case there was a discrepancy. + self.sha = client.script_load(self.script) + return client.evalsha(self.sha, len(keys), *args) + + +class AsyncScript: + """ + An executable Lua script object returned by ``register_script`` + """ + + def __init__(self, registered_client: "AsyncRedis", script: ScriptTextT): + self.registered_client = registered_client + self.script = script + # Precalculate and store the SHA1 hex digest of the script. + + if isinstance(script, str): + # We need the encoding from the client in order to generate an + # accurate byte representation of the script + try: + encoder = registered_client.connection_pool.get_encoder() + except AttributeError: + # Cluster + encoder = registered_client.get_encoder() + script = encoder.encode(script) + self.sha = hashlib.sha1(script).hexdigest() + + async def __call__( + self, + keys: Union[Sequence[KeyT], None] = None, + args: Union[Iterable[EncodableT], None] = None, + client: Union["AsyncRedis", None] = None, + ): + """Execute the script, passing any required ``args``""" + keys = keys or [] + args = args or [] + if client is None: + client = self.registered_client + args = tuple(keys) + tuple(args) + # make sure the Redis server knows about the script + from redis.asyncio.client import Pipeline + + if isinstance(client, Pipeline): + # Make sure the pipeline can register the script before executing. + client.scripts.add(self) + try: + return await client.evalsha(self.sha, len(keys), *args) + except NoScriptError: + # Maybe the client is pointed to a different server than the client + # that created this instance? + # Overwrite the sha just in case there was a discrepancy. + self.sha = await client.script_load(self.script) + return await client.evalsha(self.sha, len(keys), *args) + + +class PubSubCommands(CommandsProtocol): + """ + Redis PubSub commands. + see https://redis.io/topics/pubsub + """ + + def publish(self, channel: ChannelT, message: EncodableT, **kwargs) -> ResponseT: + """ + Publish ``message`` on ``channel``. + Returns the number of subscribers the message was delivered to. + + For more information see https://redis.io/commands/publish + """ + return self.execute_command("PUBLISH", channel, message, **kwargs) + + def spublish(self, shard_channel: ChannelT, message: EncodableT) -> ResponseT: + """ + Posts a message to the given shard channel. + Returns the number of clients that received the message + + For more information see https://redis.io/commands/spublish + """ + return self.execute_command("SPUBLISH", shard_channel, message) + + def pubsub_channels(self, pattern: PatternT = "*", **kwargs) -> ResponseT: + """ + Return a list of channels that have at least one subscriber + + For more information see https://redis.io/commands/pubsub-channels + """ + return self.execute_command("PUBSUB CHANNELS", pattern, **kwargs) + + def pubsub_shardchannels(self, pattern: PatternT = "*", **kwargs) -> ResponseT: + """ + Return a list of shard_channels that have at least one subscriber + + For more information see https://redis.io/commands/pubsub-shardchannels + """ + return self.execute_command("PUBSUB SHARDCHANNELS", pattern, **kwargs) + + def pubsub_numpat(self, **kwargs) -> ResponseT: + """ + Returns the number of subscriptions to patterns + + For more information see https://redis.io/commands/pubsub-numpat + """ + return self.execute_command("PUBSUB NUMPAT", **kwargs) + + def pubsub_numsub(self, *args: ChannelT, **kwargs) -> ResponseT: + """ + Return a list of (channel, number of subscribers) tuples + for each channel given in ``*args`` + + For more information see https://redis.io/commands/pubsub-numsub + """ + return self.execute_command("PUBSUB NUMSUB", *args, **kwargs) + + def pubsub_shardnumsub(self, *args: ChannelT, **kwargs) -> ResponseT: + """ + Return a list of (shard_channel, number of subscribers) tuples + for each channel given in ``*args`` + + For more information see https://redis.io/commands/pubsub-shardnumsub + """ + return self.execute_command("PUBSUB SHARDNUMSUB", *args, **kwargs) + + +AsyncPubSubCommands = PubSubCommands + + +class ScriptCommands(CommandsProtocol): + """ + Redis Lua script commands. see: + https://redis.com/ebook/part-3-next-steps/chapter-11-scripting-redis-with-lua/ + """ + + def _eval( + self, command: str, script: str, numkeys: int, *keys_and_args: str + ) -> Union[Awaitable[str], str]: + return self.execute_command(command, script, numkeys, *keys_and_args) + + def eval( + self, script: str, numkeys: int, *keys_and_args: str + ) -> Union[Awaitable[str], str]: + """ + Execute the Lua ``script``, specifying the ``numkeys`` the script + will touch and the key names and argument values in ``keys_and_args``. + Returns the result of the script. + + In practice, use the object returned by ``register_script``. This + function exists purely for Redis API completion. + + For more information see https://redis.io/commands/eval + """ + return self._eval("EVAL", script, numkeys, *keys_and_args) + + def eval_ro( + self, script: str, numkeys: int, *keys_and_args: str + ) -> Union[Awaitable[str], str]: + """ + The read-only variant of the EVAL command + + Execute the read-only Lua ``script`` specifying the ``numkeys`` the script + will touch and the key names and argument values in ``keys_and_args``. + Returns the result of the script. + + For more information see https://redis.io/commands/eval_ro + """ + return self._eval("EVAL_RO", script, numkeys, *keys_and_args) + + def _evalsha( + self, command: str, sha: str, numkeys: int, *keys_and_args: list + ) -> Union[Awaitable[str], str]: + return self.execute_command(command, sha, numkeys, *keys_and_args) + + def evalsha( + self, sha: str, numkeys: int, *keys_and_args: str + ) -> Union[Awaitable[str], str]: + """ + Use the ``sha`` to execute a Lua script already registered via EVAL + or SCRIPT LOAD. Specify the ``numkeys`` the script will touch and the + key names and argument values in ``keys_and_args``. Returns the result + of the script. + + In practice, use the object returned by ``register_script``. This + function exists purely for Redis API completion. + + For more information see https://redis.io/commands/evalsha + """ + return self._evalsha("EVALSHA", sha, numkeys, *keys_and_args) + + def evalsha_ro( + self, sha: str, numkeys: int, *keys_and_args: str + ) -> Union[Awaitable[str], str]: + """ + The read-only variant of the EVALSHA command + + Use the ``sha`` to execute a read-only Lua script already registered via EVAL + or SCRIPT LOAD. Specify the ``numkeys`` the script will touch and the + key names and argument values in ``keys_and_args``. Returns the result + of the script. + + For more information see https://redis.io/commands/evalsha_ro + """ + return self._evalsha("EVALSHA_RO", sha, numkeys, *keys_and_args) + + def script_exists(self, *args: str) -> ResponseT: + """ + Check if a script exists in the script cache by specifying the SHAs of + each script as ``args``. Returns a list of boolean values indicating if + if each already script exists in the cache. + + For more information see https://redis.io/commands/script-exists + """ + return self.execute_command("SCRIPT EXISTS", *args) + + def script_debug(self, *args) -> None: + raise NotImplementedError( + "SCRIPT DEBUG is intentionally not implemented in the client." + ) + + def script_flush( + self, sync_type: Union[Literal["SYNC"], Literal["ASYNC"]] = None + ) -> ResponseT: + """Flush all scripts from the script cache. + + ``sync_type`` is by default SYNC (synchronous) but it can also be + ASYNC. + + For more information see https://redis.io/commands/script-flush + """ + + # Redis pre 6 had no sync_type. + if sync_type not in ["SYNC", "ASYNC", None]: + raise DataError( + "SCRIPT FLUSH defaults to SYNC in redis > 6.2, or " + "accepts SYNC/ASYNC. For older versions, " + "of redis leave as None." + ) + if sync_type is None: + pieces = [] + else: + pieces = [sync_type] + return self.execute_command("SCRIPT FLUSH", *pieces) + + def script_kill(self) -> ResponseT: + """ + Kill the currently executing Lua script + + For more information see https://redis.io/commands/script-kill + """ + return self.execute_command("SCRIPT KILL") + + def script_load(self, script: ScriptTextT) -> ResponseT: + """ + Load a Lua ``script`` into the script cache. Returns the SHA. + + For more information see https://redis.io/commands/script-load + """ + return self.execute_command("SCRIPT LOAD", script) + + def register_script(self: "Redis", script: ScriptTextT) -> Script: + """ + Register a Lua ``script`` specifying the ``keys`` it will touch. + Returns a Script object that is callable and hides the complexity of + deal with scripts, keys, and shas. This is the preferred way to work + with Lua scripts. + """ + return Script(self, script) + + +class AsyncScriptCommands(ScriptCommands): + async def script_debug(self, *args) -> None: + return super().script_debug() + + def register_script(self: "AsyncRedis", script: ScriptTextT) -> AsyncScript: + """ + Register a Lua ``script`` specifying the ``keys`` it will touch. + Returns a Script object that is callable and hides the complexity of + deal with scripts, keys, and shas. This is the preferred way to work + with Lua scripts. + """ + return AsyncScript(self, script) + + +class GeoCommands(CommandsProtocol): + """ + Redis Geospatial commands. + see: https://redis.com/redis-best-practices/indexing-patterns/geospatial/ + """ + + def geoadd( + self, + name: KeyT, + values: Sequence[EncodableT], + nx: bool = False, + xx: bool = False, + ch: bool = False, + ) -> ResponseT: + """ + Add the specified geospatial items to the specified key identified + by the ``name`` argument. The Geospatial items are given as ordered + members of the ``values`` argument, each item or place is formed by + the triad longitude, latitude and name. + + Note: You can use ZREM to remove elements. + + ``nx`` forces ZADD to only create new elements and not to update + scores for elements that already exist. + + ``xx`` forces ZADD to only update scores of elements that already + exist. New elements will not be added. + + ``ch`` modifies the return value to be the numbers of elements changed. + Changed elements include new elements that were added and elements + whose scores changed. + + For more information see https://redis.io/commands/geoadd + """ + if nx and xx: + raise DataError("GEOADD allows either 'nx' or 'xx', not both") + if len(values) % 3 != 0: + raise DataError("GEOADD requires places with lon, lat and name values") + pieces = [name] + if nx: + pieces.append("NX") + if xx: + pieces.append("XX") + if ch: + pieces.append("CH") + pieces.extend(values) + return self.execute_command("GEOADD", *pieces) + + def geodist( + self, name: KeyT, place1: FieldT, place2: FieldT, unit: Union[str, None] = None + ) -> ResponseT: + """ + Return the distance between ``place1`` and ``place2`` members of the + ``name`` key. + The units must be one of the following : m, km mi, ft. By default + meters are used. + + For more information see https://redis.io/commands/geodist + """ + pieces: list[EncodableT] = [name, place1, place2] + if unit and unit not in ("m", "km", "mi", "ft"): + raise DataError("GEODIST invalid unit") + elif unit: + pieces.append(unit) + return self.execute_command("GEODIST", *pieces) + + def geohash(self, name: KeyT, *values: FieldT) -> ResponseT: + """ + Return the geo hash string for each item of ``values`` members of + the specified key identified by the ``name`` argument. + + For more information see https://redis.io/commands/geohash + """ + return self.execute_command("GEOHASH", name, *values) + + def geopos(self, name: KeyT, *values: FieldT) -> ResponseT: + """ + Return the positions of each item of ``values`` as members of + the specified key identified by the ``name`` argument. Each position + is represented by the pairs lon and lat. + + For more information see https://redis.io/commands/geopos + """ + return self.execute_command("GEOPOS", name, *values) + + def georadius( + self, + name: KeyT, + longitude: float, + latitude: float, + radius: float, + unit: Union[str, None] = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Union[int, None] = None, + sort: Union[str, None] = None, + store: Union[KeyT, None] = None, + store_dist: Union[KeyT, None] = None, + any: bool = False, + ) -> ResponseT: + """ + Return the members of the specified key identified by the + ``name`` argument which are within the borders of the area specified + with the ``latitude`` and ``longitude`` location and the maximum + distance from the center specified by the ``radius`` value. + + The units must be one of the following : m, km mi, ft. By default + + ``withdist`` indicates to return the distances of each place. + + ``withcoord`` indicates to return the latitude and longitude of + each place. + + ``withhash`` indicates to return the geohash string of each place. + + ``count`` indicates to return the number of elements up to N. + + ``sort`` indicates to return the places in a sorted way, ASC for + nearest to fairest and DESC for fairest to nearest. + + ``store`` indicates to save the places names in a sorted set named + with a specific key, each element of the destination sorted set is + populated with the score got from the original geo sorted set. + + ``store_dist`` indicates to save the places names in a sorted set + named with a specific key, instead of ``store`` the sorted set + destination score is set with the distance. + + For more information see https://redis.io/commands/georadius + """ + return self._georadiusgeneric( + "GEORADIUS", + name, + longitude, + latitude, + radius, + unit=unit, + withdist=withdist, + withcoord=withcoord, + withhash=withhash, + count=count, + sort=sort, + store=store, + store_dist=store_dist, + any=any, + ) + + def georadiusbymember( + self, + name: KeyT, + member: FieldT, + radius: float, + unit: Union[str, None] = None, + withdist: bool = False, + withcoord: bool = False, + withhash: bool = False, + count: Union[int, None] = None, + sort: Union[str, None] = None, + store: Union[KeyT, None] = None, + store_dist: Union[KeyT, None] = None, + any: bool = False, + ) -> ResponseT: + """ + This command is exactly like ``georadius`` with the sole difference + that instead of taking, as the center of the area to query, a longitude + and latitude value, it takes the name of a member already existing + inside the geospatial index represented by the sorted set. + + For more information see https://redis.io/commands/georadiusbymember + """ + return self._georadiusgeneric( + "GEORADIUSBYMEMBER", + name, + member, + radius, + unit=unit, + withdist=withdist, + withcoord=withcoord, + withhash=withhash, + count=count, + sort=sort, + store=store, + store_dist=store_dist, + any=any, + ) + + def _georadiusgeneric( + self, command: str, *args: EncodableT, **kwargs: Union[EncodableT, None] + ) -> ResponseT: + pieces = list(args) + if kwargs["unit"] and kwargs["unit"] not in ("m", "km", "mi", "ft"): + raise DataError("GEORADIUS invalid unit") + elif kwargs["unit"]: + pieces.append(kwargs["unit"]) + else: + pieces.append("m") + + if kwargs["any"] and kwargs["count"] is None: + raise DataError("``any`` can't be provided without ``count``") + + for arg_name, byte_repr in ( + ("withdist", "WITHDIST"), + ("withcoord", "WITHCOORD"), + ("withhash", "WITHHASH"), + ): + if kwargs[arg_name]: + pieces.append(byte_repr) + + if kwargs["count"] is not None: + pieces.extend(["COUNT", kwargs["count"]]) + if kwargs["any"]: + pieces.append("ANY") + + if kwargs["sort"]: + if kwargs["sort"] == "ASC": + pieces.append("ASC") + elif kwargs["sort"] == "DESC": + pieces.append("DESC") + else: + raise DataError("GEORADIUS invalid sort") + + if kwargs["store"] and kwargs["store_dist"]: + raise DataError("GEORADIUS store and store_dist cant be set together") + + if kwargs["store"]: + pieces.extend([b"STORE", kwargs["store"]]) + + if kwargs["store_dist"]: + pieces.extend([b"STOREDIST", kwargs["store_dist"]]) + + return self.execute_command(command, *pieces, **kwargs) + + def geosearch( + self, + name: KeyT, + member: Union[FieldT, None] = None, + longitude: Union[float, None] = None, + latitude: Union[float, None] = None, + unit: str = "m", + radius: Union[float, None] = None, + width: Union[float, None] = None, + height: Union[float, None] = None, + sort: Union[str, None] = None, + count: Union[int, None] = None, + any: bool = False, + withcoord: bool = False, + withdist: bool = False, + withhash: bool = False, + ) -> ResponseT: + """ + Return the members of specified key identified by the + ``name`` argument, which are within the borders of the + area specified by a given shape. This command extends the + GEORADIUS command, so in addition to searching within circular + areas, it supports searching within rectangular areas. + + This command should be used in place of the deprecated + GEORADIUS and GEORADIUSBYMEMBER commands. + + ``member`` Use the position of the given existing + member in the sorted set. Can't be given with ``longitude`` + and ``latitude``. + + ``longitude`` and ``latitude`` Use the position given by + this coordinates. Can't be given with ``member`` + ``radius`` Similar to GEORADIUS, search inside circular + area according the given radius. Can't be given with + ``height`` and ``width``. + ``height`` and ``width`` Search inside an axis-aligned + rectangle, determined by the given height and width. + Can't be given with ``radius`` + + ``unit`` must be one of the following : m, km, mi, ft. + `m` for meters (the default value), `km` for kilometers, + `mi` for miles and `ft` for feet. + + ``sort`` indicates to return the places in a sorted way, + ASC for nearest to furthest and DESC for furthest to nearest. + + ``count`` limit the results to the first count matching items. + + ``any`` is set to True, the command will return as soon as + enough matches are found. Can't be provided without ``count`` + + ``withdist`` indicates to return the distances of each place. + ``withcoord`` indicates to return the latitude and longitude of + each place. + + ``withhash`` indicates to return the geohash string of each place. + + For more information see https://redis.io/commands/geosearch + """ + + return self._geosearchgeneric( + "GEOSEARCH", + name, + member=member, + longitude=longitude, + latitude=latitude, + unit=unit, + radius=radius, + width=width, + height=height, + sort=sort, + count=count, + any=any, + withcoord=withcoord, + withdist=withdist, + withhash=withhash, + store=None, + store_dist=None, + ) + + def geosearchstore( + self, + dest: KeyT, + name: KeyT, + member: Union[FieldT, None] = None, + longitude: Union[float, None] = None, + latitude: Union[float, None] = None, + unit: str = "m", + radius: Union[float, None] = None, + width: Union[float, None] = None, + height: Union[float, None] = None, + sort: Union[str, None] = None, + count: Union[int, None] = None, + any: bool = False, + storedist: bool = False, + ) -> ResponseT: + """ + This command is like GEOSEARCH, but stores the result in + ``dest``. By default, it stores the results in the destination + sorted set with their geospatial information. + if ``store_dist`` set to True, the command will stores the + items in a sorted set populated with their distance from the + center of the circle or box, as a floating-point number. + + For more information see https://redis.io/commands/geosearchstore + """ + return self._geosearchgeneric( + "GEOSEARCHSTORE", + dest, + name, + member=member, + longitude=longitude, + latitude=latitude, + unit=unit, + radius=radius, + width=width, + height=height, + sort=sort, + count=count, + any=any, + withcoord=None, + withdist=None, + withhash=None, + store=None, + store_dist=storedist, + ) + + def _geosearchgeneric( + self, command: str, *args: EncodableT, **kwargs: Union[EncodableT, None] + ) -> ResponseT: + pieces = list(args) + + # FROMMEMBER or FROMLONLAT + if kwargs["member"] is None: + if kwargs["longitude"] is None or kwargs["latitude"] is None: + raise DataError("GEOSEARCH must have member or longitude and latitude") + if kwargs["member"]: + if kwargs["longitude"] or kwargs["latitude"]: + raise DataError( + "GEOSEARCH member and longitude or latitude cant be set together" + ) + pieces.extend([b"FROMMEMBER", kwargs["member"]]) + if kwargs["longitude"] is not None and kwargs["latitude"] is not None: + pieces.extend([b"FROMLONLAT", kwargs["longitude"], kwargs["latitude"]]) + + # BYRADIUS or BYBOX + if kwargs["radius"] is None: + if kwargs["width"] is None or kwargs["height"] is None: + raise DataError("GEOSEARCH must have radius or width and height") + if kwargs["unit"] is None: + raise DataError("GEOSEARCH must have unit") + if kwargs["unit"].lower() not in ("m", "km", "mi", "ft"): + raise DataError("GEOSEARCH invalid unit") + if kwargs["radius"]: + if kwargs["width"] or kwargs["height"]: + raise DataError( + "GEOSEARCH radius and width or height cant be set together" + ) + pieces.extend([b"BYRADIUS", kwargs["radius"], kwargs["unit"]]) + if kwargs["width"] and kwargs["height"]: + pieces.extend([b"BYBOX", kwargs["width"], kwargs["height"], kwargs["unit"]]) + + # sort + if kwargs["sort"]: + if kwargs["sort"].upper() == "ASC": + pieces.append(b"ASC") + elif kwargs["sort"].upper() == "DESC": + pieces.append(b"DESC") + else: + raise DataError("GEOSEARCH invalid sort") + + # count any + if kwargs["count"]: + pieces.extend([b"COUNT", kwargs["count"]]) + if kwargs["any"]: + pieces.append(b"ANY") + elif kwargs["any"]: + raise DataError("GEOSEARCH ``any`` can't be provided without count") + + # other properties + for arg_name, byte_repr in ( + ("withdist", b"WITHDIST"), + ("withcoord", b"WITHCOORD"), + ("withhash", b"WITHHASH"), + ("store_dist", b"STOREDIST"), + ): + if kwargs[arg_name]: + pieces.append(byte_repr) + + return self.execute_command(command, *pieces, **kwargs) + + +AsyncGeoCommands = GeoCommands + + +class ModuleCommands(CommandsProtocol): + """ + Redis Module commands. + see: https://redis.io/topics/modules-intro + """ + + def module_load(self, path, *args) -> ResponseT: + """ + Loads the module from ``path``. + Passes all ``*args`` to the module, during loading. + Raises ``ModuleError`` if a module is not found at ``path``. + + For more information see https://redis.io/commands/module-load + """ + return self.execute_command("MODULE LOAD", path, *args) + + def module_loadex( + self, + path: str, + options: Optional[List[str]] = None, + args: Optional[List[str]] = None, + ) -> ResponseT: + """ + Loads a module from a dynamic library at runtime with configuration directives. + + For more information see https://redis.io/commands/module-loadex + """ + pieces = [] + if options is not None: + pieces.append("CONFIG") + pieces.extend(options) + if args is not None: + pieces.append("ARGS") + pieces.extend(args) + + return self.execute_command("MODULE LOADEX", path, *pieces) + + def module_unload(self, name) -> ResponseT: + """ + Unloads the module ``name``. + Raises ``ModuleError`` if ``name`` is not in loaded modules. + + For more information see https://redis.io/commands/module-unload + """ + return self.execute_command("MODULE UNLOAD", name) + + def module_list(self) -> ResponseT: + """ + Returns a list of dictionaries containing the name and version of + all loaded modules. + + For more information see https://redis.io/commands/module-list + """ + return self.execute_command("MODULE LIST") + + def command_info(self) -> None: + raise NotImplementedError( + "COMMAND INFO is intentionally not implemented in the client." + ) + + def command_count(self) -> ResponseT: + return self.execute_command("COMMAND COUNT") + + def command_getkeys(self, *args) -> ResponseT: + return self.execute_command("COMMAND GETKEYS", *args) + + def command(self) -> ResponseT: + return self.execute_command("COMMAND") + + +class Script: + """ + An executable Lua script object returned by ``register_script`` + """ + + def __init__(self, registered_client, script): + self.registered_client = registered_client + self.script = script + # Precalculate and store the SHA1 hex digest of the script. + + if isinstance(script, str): + # We need the encoding from the client in order to generate an + # accurate byte representation of the script + encoder = self.get_encoder() + script = encoder.encode(script) + self.sha = hashlib.sha1(script).hexdigest() + + def __call__(self, keys=[], args=[], client=None): + "Execute the script, passing any required ``args``" + if client is None: + client = self.registered_client + args = tuple(keys) + tuple(args) + # make sure the Redis server knows about the script + from redis.client import Pipeline + + if isinstance(client, Pipeline): + # Make sure the pipeline can register the script before executing. + client.scripts.add(self) + try: + return client.evalsha(self.sha, len(keys), *args) + except NoScriptError: + # Maybe the client is pointed to a different server than the client + # that created this instance? + # Overwrite the sha just in case there was a discrepancy. + self.sha = client.script_load(self.script) + return client.evalsha(self.sha, len(keys), *args) + + def get_encoder(self): + """Get the encoder to encode string scripts into bytes.""" + try: + return self.registered_client.get_encoder() + except AttributeError: + # DEPRECATED + # In version <=4.1.2, this was the code we used to get the encoder. + # However, after 4.1.2 we added support for scripting in clustered + # redis. ClusteredRedis doesn't have a `.connection_pool` attribute + # so we changed the Script class to use + # `self.registered_client.get_encoder` (see above). + # However, that is technically a breaking change, as consumers who + # use Scripts directly might inject a `registered_client` that + # doesn't have a `.get_encoder` field. This try/except prevents us + # from breaking backward-compatibility. Ideally, it would be + # removed in the next major release. + return self.registered_client.connection_pool.get_encoder() + + +class AsyncModuleCommands(ModuleCommands): + async def command_info(self) -> None: + return super().command_info() + + +class ClusterCommands(CommandsProtocol): + """ + Class for Redis Cluster commands + """ + + def cluster(self, cluster_arg, *args, **kwargs) -> ResponseT: + return self.execute_command(f"CLUSTER {cluster_arg.upper()}", *args, **kwargs) + + def readwrite(self, **kwargs) -> ResponseT: + """ + Disables read queries for a connection to a Redis Cluster slave node. + + For more information see https://redis.io/commands/readwrite + """ + return self.execute_command("READWRITE", **kwargs) + + def readonly(self, **kwargs) -> ResponseT: + """ + Enables read queries for a connection to a Redis Cluster replica node. + + For more information see https://redis.io/commands/readonly + """ + return self.execute_command("READONLY", **kwargs) + + +AsyncClusterCommands = ClusterCommands + + +class FunctionCommands: + """ + Redis Function commands + """ + + def function_load( + self, code: str, replace: Optional[bool] = False + ) -> Union[Awaitable[str], str]: + """ + Load a library to Redis. + :param code: the source code (must start with + Shebang statement that provides a metadata about the library) + :param replace: changes the behavior to overwrite the existing library + with the new contents. + Return the library name that was loaded. + + For more information see https://redis.io/commands/function-load + """ + pieces = ["REPLACE"] if replace else [] + pieces.append(code) + return self.execute_command("FUNCTION LOAD", *pieces) + + def function_delete(self, library: str) -> Union[Awaitable[str], str]: + """ + Delete the library called ``library`` and all its functions. + + For more information see https://redis.io/commands/function-delete + """ + return self.execute_command("FUNCTION DELETE", library) + + def function_flush(self, mode: str = "SYNC") -> Union[Awaitable[str], str]: + """ + Deletes all the libraries. + + For more information see https://redis.io/commands/function-flush + """ + return self.execute_command("FUNCTION FLUSH", mode) + + def function_list( + self, library: Optional[str] = "*", withcode: Optional[bool] = False + ) -> Union[Awaitable[List], List]: + """ + Return information about the functions and libraries. + :param library: pecify a pattern for matching library names + :param withcode: cause the server to include the libraries source + implementation in the reply + """ + args = ["LIBRARYNAME", library] + if withcode: + args.append("WITHCODE") + return self.execute_command("FUNCTION LIST", *args) + + def _fcall( + self, command: str, function, numkeys: int, *keys_and_args: Optional[List] + ) -> Union[Awaitable[str], str]: + return self.execute_command(command, function, numkeys, *keys_and_args) + + def fcall( + self, function, numkeys: int, *keys_and_args: Optional[List] + ) -> Union[Awaitable[str], str]: + """ + Invoke a function. + + For more information see https://redis.io/commands/fcall + """ + return self._fcall("FCALL", function, numkeys, *keys_and_args) + + def fcall_ro( + self, function, numkeys: int, *keys_and_args: Optional[List] + ) -> Union[Awaitable[str], str]: + """ + This is a read-only variant of the FCALL command that cannot + execute commands that modify data. + + For more information see https://redis.io/commands/fcal_ro + """ + return self._fcall("FCALL_RO", function, numkeys, *keys_and_args) + + def function_dump(self) -> Union[Awaitable[str], str]: + """ + Return the serialized payload of loaded libraries. + + For more information see https://redis.io/commands/function-dump + """ + from redis.client import NEVER_DECODE + + options = {} + options[NEVER_DECODE] = [] + + return self.execute_command("FUNCTION DUMP", **options) + + def function_restore( + self, payload: str, policy: Optional[str] = "APPEND" + ) -> Union[Awaitable[str], str]: + """ + Restore libraries from the serialized ``payload``. + You can use the optional policy argument to provide a policy + for handling existing libraries. + + For more information see https://redis.io/commands/function-restore + """ + return self.execute_command("FUNCTION RESTORE", payload, policy) + + def function_kill(self) -> Union[Awaitable[str], str]: + """ + Kill a function that is currently executing. + + For more information see https://redis.io/commands/function-kill + """ + return self.execute_command("FUNCTION KILL") + + def function_stats(self) -> Union[Awaitable[List], List]: + """ + Return information about the function that's currently running + and information about the available execution engines. + + For more information see https://redis.io/commands/function-stats + """ + return self.execute_command("FUNCTION STATS") + + +AsyncFunctionCommands = FunctionCommands + + +class GearsCommands: + def tfunction_load( + self, lib_code: str, replace: bool = False, config: Union[str, None] = None + ) -> ResponseT: + """ + Load a new library to RedisGears. + + ``lib_code`` - the library code. + ``config`` - a string representation of a JSON object + that will be provided to the library on load time, + for more information refer to + https://github.com/RedisGears/RedisGears/blob/master/docs/function_advance_topics.md#library-configuration + ``replace`` - an optional argument, instructs RedisGears to replace the + function if its already exists + + For more information see https://redis.io/commands/tfunction-load/ + """ + pieces = [] + if replace: + pieces.append("REPLACE") + if config is not None: + pieces.extend(["CONFIG", config]) + pieces.append(lib_code) + return self.execute_command("TFUNCTION LOAD", *pieces) + + def tfunction_delete(self, lib_name: str) -> ResponseT: + """ + Delete a library from RedisGears. + + ``lib_name`` the library name to delete. + + For more information see https://redis.io/commands/tfunction-delete/ + """ + return self.execute_command("TFUNCTION DELETE", lib_name) + + def tfunction_list( + self, + with_code: bool = False, + verbose: int = 0, + lib_name: Union[str, None] = None, + ) -> ResponseT: + """ + List the functions with additional information about each function. + + ``with_code`` Show libraries code. + ``verbose`` output verbosity level, higher number will increase verbosity level + ``lib_name`` specifying a library name (can be used multiple times to show multiple libraries in a single command) # noqa + + For more information see https://redis.io/commands/tfunction-list/ + """ + pieces = [] + if with_code: + pieces.append("WITHCODE") + if verbose >= 1 and verbose <= 3: + pieces.append("v" * verbose) + else: + raise DataError("verbose can be 1, 2 or 3") + if lib_name is not None: + pieces.append("LIBRARY") + pieces.append(lib_name) + + return self.execute_command("TFUNCTION LIST", *pieces) + + def _tfcall( + self, + lib_name: str, + func_name: str, + keys: KeysT = None, + _async: bool = False, + *args: List, + ) -> ResponseT: + pieces = [f"{lib_name}.{func_name}"] + if keys is not None: + pieces.append(len(keys)) + pieces.extend(keys) + else: + pieces.append(0) + if args is not None: + pieces.extend(args) + if _async: + return self.execute_command("TFCALLASYNC", *pieces) + return self.execute_command("TFCALL", *pieces) + + def tfcall( + self, + lib_name: str, + func_name: str, + keys: KeysT = None, + *args: List, + ) -> ResponseT: + """ + Invoke a function. + + ``lib_name`` - the library name contains the function. + ``func_name`` - the function name to run. + ``keys`` - the keys that will be touched by the function. + ``args`` - Additional argument to pass to the function. + + For more information see https://redis.io/commands/tfcall/ + """ + return self._tfcall(lib_name, func_name, keys, False, *args) + + def tfcall_async( + self, + lib_name: str, + func_name: str, + keys: KeysT = None, + *args: List, + ) -> ResponseT: + """ + Invoke an async function (coroutine). + + ``lib_name`` - the library name contains the function. + ``func_name`` - the function name to run. + ``keys`` - the keys that will be touched by the function. + ``args`` - Additional argument to pass to the function. + + For more information see https://redis.io/commands/tfcall/ + """ + return self._tfcall(lib_name, func_name, keys, True, *args) + + +AsyncGearsCommands = GearsCommands + + +class DataAccessCommands( + BasicKeyCommands, + HyperlogCommands, + HashCommands, + GeoCommands, + ListCommands, + ScanCommands, + SetCommands, + StreamCommands, + SortedSetCommands, +): + """ + A class containing all of the implemented data access redis commands. + This class is to be used as a mixin for synchronous Redis clients. + """ + + +class AsyncDataAccessCommands( + AsyncBasicKeyCommands, + AsyncHyperlogCommands, + AsyncHashCommands, + AsyncGeoCommands, + AsyncListCommands, + AsyncScanCommands, + AsyncSetCommands, + AsyncStreamCommands, + AsyncSortedSetCommands, +): + """ + A class containing all of the implemented data access redis commands. + This class is to be used as a mixin for asynchronous Redis clients. + """ + + +class CoreCommands( + ACLCommands, + ClusterCommands, + DataAccessCommands, + ManagementCommands, + ModuleCommands, + PubSubCommands, + ScriptCommands, + FunctionCommands, + GearsCommands, +): + """ + A class containing all of the implemented redis commands. This class is + to be used as a mixin for synchronous Redis clients. + """ + + +class AsyncCoreCommands( + AsyncACLCommands, + AsyncClusterCommands, + AsyncDataAccessCommands, + AsyncManagementCommands, + AsyncModuleCommands, + AsyncPubSubCommands, + AsyncScriptCommands, + AsyncFunctionCommands, + AsyncGearsCommands, +): + """ + A class containing all of the implemented redis commands. This class is + to be used as a mixin for asynchronous Redis clients. + """ diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/__init__.py b/templates/skills/spotify/dependencies/redis/commands/graph/__init__.py new file mode 100644 index 00000000..ffaf1fb4 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/__init__.py @@ -0,0 +1,263 @@ +import warnings + +from ..helpers import quote_string, random_string, stringify_param_value +from .commands import AsyncGraphCommands, GraphCommands +from .edge import Edge # noqa +from .node import Node # noqa +from .path import Path # noqa + +DB_LABELS = "DB.LABELS" +DB_RAELATIONSHIPTYPES = "DB.RELATIONSHIPTYPES" +DB_PROPERTYKEYS = "DB.PROPERTYKEYS" + + +class Graph(GraphCommands): + """ + Graph, collection of nodes and edges. + """ + + def __init__(self, client, name=random_string()): + """ + Create a new graph. + """ + warnings.warn( + DeprecationWarning( + "RedisGraph support is deprecated as of Redis Stack 7.2 \ + (https://redis.com/blog/redisgraph-eol/)" + ) + ) + self.NAME = name # Graph key + self.client = client + self.execute_command = client.execute_command + + self.nodes = {} + self.edges = [] + self._labels = [] # List of node labels. + self._properties = [] # List of properties. + self._relationship_types = [] # List of relation types. + self.version = 0 # Graph version + + @property + def name(self): + return self.NAME + + def _clear_schema(self): + self._labels = [] + self._properties = [] + self._relationship_types = [] + + def _refresh_schema(self): + self._clear_schema() + self._refresh_labels() + self._refresh_relations() + self._refresh_attributes() + + def _refresh_labels(self): + lbls = self.labels() + + # Unpack data. + self._labels = [l[0] for _, l in enumerate(lbls)] + + def _refresh_relations(self): + rels = self.relationship_types() + + # Unpack data. + self._relationship_types = [r[0] for _, r in enumerate(rels)] + + def _refresh_attributes(self): + props = self.property_keys() + + # Unpack data. + self._properties = [p[0] for _, p in enumerate(props)] + + def get_label(self, idx): + """ + Returns a label by it's index + + Args: + + idx: + The index of the label + """ + try: + label = self._labels[idx] + except IndexError: + # Refresh labels. + self._refresh_labels() + label = self._labels[idx] + return label + + def get_relation(self, idx): + """ + Returns a relationship type by it's index + + Args: + + idx: + The index of the relation + """ + try: + relationship_type = self._relationship_types[idx] + except IndexError: + # Refresh relationship types. + self._refresh_relations() + relationship_type = self._relationship_types[idx] + return relationship_type + + def get_property(self, idx): + """ + Returns a property by it's index + + Args: + + idx: + The index of the property + """ + try: + p = self._properties[idx] + except IndexError: + # Refresh properties. + self._refresh_attributes() + p = self._properties[idx] + return p + + def add_node(self, node): + """ + Adds a node to the graph. + """ + if node.alias is None: + node.alias = random_string() + self.nodes[node.alias] = node + + def add_edge(self, edge): + """ + Adds an edge to the graph. + """ + if not (self.nodes[edge.src_node.alias] and self.nodes[edge.dest_node.alias]): + raise AssertionError("Both edge's end must be in the graph") + + self.edges.append(edge) + + def _build_params_header(self, params): + if params is None: + return "" + if not isinstance(params, dict): + raise TypeError("'params' must be a dict") + # Header starts with "CYPHER" + params_header = "CYPHER " + for key, value in params.items(): + params_header += str(key) + "=" + stringify_param_value(value) + " " + return params_header + + # Procedures. + def call_procedure(self, procedure, *args, read_only=False, **kwagrs): + args = [quote_string(arg) for arg in args] + q = f"CALL {procedure}({','.join(args)})" + + y = kwagrs.get("y", None) + if y is not None: + q += f"YIELD {','.join(y)}" + + return self.query(q, read_only=read_only) + + def labels(self): + return self.call_procedure(DB_LABELS, read_only=True).result_set + + def relationship_types(self): + return self.call_procedure(DB_RAELATIONSHIPTYPES, read_only=True).result_set + + def property_keys(self): + return self.call_procedure(DB_PROPERTYKEYS, read_only=True).result_set + + +class AsyncGraph(Graph, AsyncGraphCommands): + """Async version for Graph""" + + async def _refresh_labels(self): + lbls = await self.labels() + + # Unpack data. + self._labels = [l[0] for _, l in enumerate(lbls)] + + async def _refresh_attributes(self): + props = await self.property_keys() + + # Unpack data. + self._properties = [p[0] for _, p in enumerate(props)] + + async def _refresh_relations(self): + rels = await self.relationship_types() + + # Unpack data. + self._relationship_types = [r[0] for _, r in enumerate(rels)] + + async def get_label(self, idx): + """ + Returns a label by it's index + + Args: + + idx: + The index of the label + """ + try: + label = self._labels[idx] + except IndexError: + # Refresh labels. + await self._refresh_labels() + label = self._labels[idx] + return label + + async def get_property(self, idx): + """ + Returns a property by it's index + + Args: + + idx: + The index of the property + """ + try: + p = self._properties[idx] + except IndexError: + # Refresh properties. + await self._refresh_attributes() + p = self._properties[idx] + return p + + async def get_relation(self, idx): + """ + Returns a relationship type by it's index + + Args: + + idx: + The index of the relation + """ + try: + relationship_type = self._relationship_types[idx] + except IndexError: + # Refresh relationship types. + await self._refresh_relations() + relationship_type = self._relationship_types[idx] + return relationship_type + + async def call_procedure(self, procedure, *args, read_only=False, **kwagrs): + args = [quote_string(arg) for arg in args] + q = f"CALL {procedure}({','.join(args)})" + + y = kwagrs.get("y", None) + if y is not None: + f"YIELD {','.join(y)}" + return await self.query(q, read_only=read_only) + + async def labels(self): + return ((await self.call_procedure(DB_LABELS, read_only=True))).result_set + + async def property_keys(self): + return (await self.call_procedure(DB_PROPERTYKEYS, read_only=True)).result_set + + async def relationship_types(self): + return ( + await self.call_procedure(DB_RAELATIONSHIPTYPES, read_only=True) + ).result_set diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/commands.py b/templates/skills/spotify/dependencies/redis/commands/graph/commands.py new file mode 100644 index 00000000..762ab42e --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/commands.py @@ -0,0 +1,313 @@ +from redis import DataError +from redis.exceptions import ResponseError + +from .exceptions import VersionMismatchException +from .execution_plan import ExecutionPlan +from .query_result import AsyncQueryResult, QueryResult + +PROFILE_CMD = "GRAPH.PROFILE" +RO_QUERY_CMD = "GRAPH.RO_QUERY" +QUERY_CMD = "GRAPH.QUERY" +DELETE_CMD = "GRAPH.DELETE" +SLOWLOG_CMD = "GRAPH.SLOWLOG" +CONFIG_CMD = "GRAPH.CONFIG" +LIST_CMD = "GRAPH.LIST" +EXPLAIN_CMD = "GRAPH.EXPLAIN" + + +class GraphCommands: + """RedisGraph Commands""" + + def commit(self): + """ + Create entire graph. + """ + if len(self.nodes) == 0 and len(self.edges) == 0: + return None + + query = "CREATE " + for _, node in self.nodes.items(): + query += str(node) + "," + + query += ",".join([str(edge) for edge in self.edges]) + + # Discard leading comma. + if query[-1] == ",": + query = query[:-1] + + return self.query(query) + + def query(self, q, params=None, timeout=None, read_only=False, profile=False): + """ + Executes a query against the graph. + For more information see `GRAPH.QUERY `_. # noqa + + Args: + + q : str + The query. + params : dict + Query parameters. + timeout : int + Maximum runtime for read queries in milliseconds. + read_only : bool + Executes a readonly query if set to True. + profile : bool + Return details on results produced by and time + spent in each operation. + """ + + # maintain original 'q' + query = q + + # handle query parameters + query = self._build_params_header(params) + query + + # construct query command + # ask for compact result-set format + # specify known graph version + if profile: + cmd = PROFILE_CMD + else: + cmd = RO_QUERY_CMD if read_only else QUERY_CMD + command = [cmd, self.name, query, "--compact"] + + # include timeout is specified + if isinstance(timeout, int): + command.extend(["timeout", timeout]) + elif timeout is not None: + raise Exception("Timeout argument must be a positive integer") + + # issue query + try: + response = self.execute_command(*command) + return QueryResult(self, response, profile) + except ResponseError as e: + if "unknown command" in str(e) and read_only: + # `GRAPH.RO_QUERY` is unavailable in older versions. + return self.query(q, params, timeout, read_only=False) + raise e + except VersionMismatchException as e: + # client view over the graph schema is out of sync + # set client version and refresh local schema + self.version = e.version + self._refresh_schema() + # re-issue query + return self.query(q, params, timeout, read_only) + + def merge(self, pattern): + """ + Merge pattern. + """ + query = "MERGE " + query += str(pattern) + + return self.query(query) + + def delete(self): + """ + Deletes graph. + For more information see `DELETE `_. # noqa + """ + self._clear_schema() + return self.execute_command(DELETE_CMD, self.name) + + # declared here, to override the built in redis.db.flush() + def flush(self): + """ + Commit the graph and reset the edges and the nodes to zero length. + """ + self.commit() + self.nodes = {} + self.edges = [] + + def bulk(self, **kwargs): + """Internal only. Not supported.""" + raise NotImplementedError( + "GRAPH.BULK is internal only. " + "Use https://github.com/redisgraph/redisgraph-bulk-loader." + ) + + def profile(self, query): + """ + Execute a query and produce an execution plan augmented with metrics + for each operation's execution. Return a string representation of a + query execution plan, with details on results produced by and time + spent in each operation. + For more information see `GRAPH.PROFILE `_. # noqa + """ + return self.query(query, profile=True) + + def slowlog(self): + """ + Get a list containing up to 10 of the slowest queries issued + against the given graph ID. + For more information see `GRAPH.SLOWLOG `_. # noqa + + Each item in the list has the following structure: + 1. A unix timestamp at which the log entry was processed. + 2. The issued command. + 3. The issued query. + 4. The amount of time needed for its execution, in milliseconds. + """ + return self.execute_command(SLOWLOG_CMD, self.name) + + def config(self, name, value=None, set=False): + """ + Retrieve or update a RedisGraph configuration. + For more information see `https://redis.io/commands/graph.config-get/>`_. # noqa + + Args: + + name : str + The name of the configuration + value : + The value we want to set (can be used only when `set` is on) + set : bool + Turn on to set a configuration. Default behavior is get. + """ + params = ["SET" if set else "GET", name] + if value is not None: + if set: + params.append(value) + else: + raise DataError( + "``value`` can be provided only when ``set`` is True" + ) # noqa + return self.execute_command(CONFIG_CMD, *params) + + def list_keys(self): + """ + Lists all graph keys in the keyspace. + For more information see `GRAPH.LIST `_. # noqa + """ + return self.execute_command(LIST_CMD) + + def execution_plan(self, query, params=None): + """ + Get the execution plan for given query, + GRAPH.EXPLAIN returns an array of operations. + + Args: + query: the query that will be executed + params: query parameters + """ + query = self._build_params_header(params) + query + + plan = self.execute_command(EXPLAIN_CMD, self.name, query) + if isinstance(plan[0], bytes): + plan = [b.decode() for b in plan] + return "\n".join(plan) + + def explain(self, query, params=None): + """ + Get the execution plan for given query, + GRAPH.EXPLAIN returns ExecutionPlan object. + For more information see `GRAPH.EXPLAIN `_. # noqa + + Args: + query: the query that will be executed + params: query parameters + """ + query = self._build_params_header(params) + query + + plan = self.execute_command(EXPLAIN_CMD, self.name, query) + return ExecutionPlan(plan) + + +class AsyncGraphCommands(GraphCommands): + async def query(self, q, params=None, timeout=None, read_only=False, profile=False): + """ + Executes a query against the graph. + For more information see `GRAPH.QUERY `_. # noqa + + Args: + + q : str + The query. + params : dict + Query parameters. + timeout : int + Maximum runtime for read queries in milliseconds. + read_only : bool + Executes a readonly query if set to True. + profile : bool + Return details on results produced by and time + spent in each operation. + """ + + # maintain original 'q' + query = q + + # handle query parameters + query = self._build_params_header(params) + query + + # construct query command + # ask for compact result-set format + # specify known graph version + if profile: + cmd = PROFILE_CMD + else: + cmd = RO_QUERY_CMD if read_only else QUERY_CMD + command = [cmd, self.name, query, "--compact"] + + # include timeout is specified + if isinstance(timeout, int): + command.extend(["timeout", timeout]) + elif timeout is not None: + raise Exception("Timeout argument must be a positive integer") + + # issue query + try: + response = await self.execute_command(*command) + return await AsyncQueryResult().initialize(self, response, profile) + except ResponseError as e: + if "unknown command" in str(e) and read_only: + # `GRAPH.RO_QUERY` is unavailable in older versions. + return await self.query(q, params, timeout, read_only=False) + raise e + except VersionMismatchException as e: + # client view over the graph schema is out of sync + # set client version and refresh local schema + self.version = e.version + self._refresh_schema() + # re-issue query + return await self.query(q, params, timeout, read_only) + + async def execution_plan(self, query, params=None): + """ + Get the execution plan for given query, + GRAPH.EXPLAIN returns an array of operations. + + Args: + query: the query that will be executed + params: query parameters + """ + query = self._build_params_header(params) + query + + plan = await self.execute_command(EXPLAIN_CMD, self.name, query) + if isinstance(plan[0], bytes): + plan = [b.decode() for b in plan] + return "\n".join(plan) + + async def explain(self, query, params=None): + """ + Get the execution plan for given query, + GRAPH.EXPLAIN returns ExecutionPlan object. + + Args: + query: the query that will be executed + params: query parameters + """ + query = self._build_params_header(params) + query + + plan = await self.execute_command(EXPLAIN_CMD, self.name, query) + return ExecutionPlan(plan) + + async def flush(self): + """ + Commit the graph and reset the edges and the nodes to zero length. + """ + await self.commit() + self.nodes = {} + self.edges = [] diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/edge.py b/templates/skills/spotify/dependencies/redis/commands/graph/edge.py new file mode 100644 index 00000000..6ee195f1 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/edge.py @@ -0,0 +1,91 @@ +from ..helpers import quote_string +from .node import Node + + +class Edge: + """ + An edge connecting two nodes. + """ + + def __init__(self, src_node, relation, dest_node, edge_id=None, properties=None): + """ + Create a new edge. + """ + if src_node is None or dest_node is None: + # NOTE(bors-42): It makes sense to change AssertionError to + # ValueError here + raise AssertionError("Both src_node & dest_node must be provided") + + self.id = edge_id + self.relation = relation or "" + self.properties = properties or {} + self.src_node = src_node + self.dest_node = dest_node + + def to_string(self): + res = "" + if self.properties: + props = ",".join( + key + ":" + str(quote_string(val)) + for key, val in sorted(self.properties.items()) + ) + res += "{" + props + "}" + + return res + + def __str__(self): + # Source node. + if isinstance(self.src_node, Node): + res = str(self.src_node) + else: + res = "()" + + # Edge + res += "-[" + if self.relation: + res += ":" + self.relation + if self.properties: + props = ",".join( + key + ":" + str(quote_string(val)) + for key, val in sorted(self.properties.items()) + ) + res += "{" + props + "}" + res += "]->" + + # Dest node. + if isinstance(self.dest_node, Node): + res += str(self.dest_node) + else: + res += "()" + + return res + + def __eq__(self, rhs): + # Type checking + if not isinstance(rhs, Edge): + return False + + # Quick positive check, if both IDs are set. + if self.id is not None and rhs.id is not None and self.id == rhs.id: + return True + + # Source and destination nodes should match. + if self.src_node != rhs.src_node: + return False + + if self.dest_node != rhs.dest_node: + return False + + # Relation should match. + if self.relation != rhs.relation: + return False + + # Quick check for number of properties. + if len(self.properties) != len(rhs.properties): + return False + + # Compare properties. + if self.properties != rhs.properties: + return False + + return True diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/exceptions.py b/templates/skills/spotify/dependencies/redis/commands/graph/exceptions.py new file mode 100644 index 00000000..4bbac100 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/exceptions.py @@ -0,0 +1,3 @@ +class VersionMismatchException(Exception): + def __init__(self, version): + self.version = version diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/execution_plan.py b/templates/skills/spotify/dependencies/redis/commands/graph/execution_plan.py new file mode 100644 index 00000000..179a80cc --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/execution_plan.py @@ -0,0 +1,211 @@ +import re + + +class ProfileStats: + """ + ProfileStats, runtime execution statistics of operation. + """ + + def __init__(self, records_produced, execution_time): + self.records_produced = records_produced + self.execution_time = execution_time + + +class Operation: + """ + Operation, single operation within execution plan. + """ + + def __init__(self, name, args=None, profile_stats=None): + """ + Create a new operation. + + Args: + name: string that represents the name of the operation + args: operation arguments + profile_stats: profile statistics + """ + self.name = name + self.args = args + self.profile_stats = profile_stats + self.children = [] + + def append_child(self, child): + if not isinstance(child, Operation) or self is child: + raise Exception("child must be Operation") + + self.children.append(child) + return self + + def child_count(self): + return len(self.children) + + def __eq__(self, o: object) -> bool: + if not isinstance(o, Operation): + return False + + return self.name == o.name and self.args == o.args + + def __str__(self) -> str: + args_str = "" if self.args is None else " | " + self.args + return f"{self.name}{args_str}" + + +class ExecutionPlan: + """ + ExecutionPlan, collection of operations. + """ + + def __init__(self, plan): + """ + Create a new execution plan. + + Args: + plan: array of strings that represents the collection operations + the output from GRAPH.EXPLAIN + """ + if not isinstance(plan, list): + raise Exception("plan must be an array") + + if isinstance(plan[0], bytes): + plan = [b.decode() for b in plan] + + self.plan = plan + self.structured_plan = self._operation_tree() + + def _compare_operations(self, root_a, root_b): + """ + Compare execution plan operation tree + + Return: True if operation trees are equal, False otherwise + """ + + # compare current root + if root_a != root_b: + return False + + # make sure root have the same number of children + if root_a.child_count() != root_b.child_count(): + return False + + # recursively compare children + for i in range(root_a.child_count()): + if not self._compare_operations(root_a.children[i], root_b.children[i]): + return False + + return True + + def __str__(self) -> str: + def aggraget_str(str_children): + return "\n".join( + [ + " " + line + for str_child in str_children + for line in str_child.splitlines() + ] + ) + + def combine_str(x, y): + return f"{x}\n{y}" + + return self._operation_traverse( + self.structured_plan, str, aggraget_str, combine_str + ) + + def __eq__(self, o: object) -> bool: + """Compares two execution plans + + Return: True if the two plans are equal False otherwise + """ + # make sure 'o' is an execution-plan + if not isinstance(o, ExecutionPlan): + return False + + # get root for both plans + root_a = self.structured_plan + root_b = o.structured_plan + + # compare execution trees + return self._compare_operations(root_a, root_b) + + def _operation_traverse(self, op, op_f, aggregate_f, combine_f): + """ + Traverse operation tree recursively applying functions + + Args: + op: operation to traverse + op_f: function applied for each operation + aggregate_f: aggregation function applied for all children of a single operation + combine_f: combine function applied for the operation result and the children result + """ # noqa + # apply op_f for each operation + op_res = op_f(op) + if len(op.children) == 0: + return op_res # no children return + else: + # apply _operation_traverse recursively + children = [ + self._operation_traverse(child, op_f, aggregate_f, combine_f) + for child in op.children + ] + # combine the operation result with the children aggregated result + return combine_f(op_res, aggregate_f(children)) + + def _operation_tree(self): + """Build the operation tree from the string representation""" + + # initial state + i = 0 + level = 0 + stack = [] + current = None + + def _create_operation(args): + profile_stats = None + name = args[0].strip() + args.pop(0) + if len(args) > 0 and "Records produced" in args[-1]: + records_produced = int( + re.search("Records produced: (\\d+)", args[-1]).group(1) + ) + execution_time = float( + re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1) + ) + profile_stats = ProfileStats(records_produced, execution_time) + args.pop(-1) + return Operation( + name, None if len(args) == 0 else args[0].strip(), profile_stats + ) + + # iterate plan operations + while i < len(self.plan): + current_op = self.plan[i] + op_level = current_op.count(" ") + if op_level == level: + # if the operation level equal to the current level + # set the current operation and move next + child = _create_operation(current_op.split("|")) + if current: + current = stack.pop() + current.append_child(child) + current = child + i += 1 + elif op_level == level + 1: + # if the operation is child of the current operation + # add it as child and set as current operation + child = _create_operation(current_op.split("|")) + current.append_child(child) + stack.append(current) + current = child + level += 1 + i += 1 + elif op_level < level: + # if the operation is not child of current operation + # go back to it's parent operation + levels_back = level - op_level + 1 + for _ in range(levels_back): + current = stack.pop() + level -= levels_back + else: + raise Exception("corrupted plan") + return stack[0] diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/node.py b/templates/skills/spotify/dependencies/redis/commands/graph/node.py new file mode 100644 index 00000000..4546a393 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/node.py @@ -0,0 +1,88 @@ +from ..helpers import quote_string + + +class Node: + """ + A node within the graph. + """ + + def __init__(self, node_id=None, alias=None, label=None, properties=None): + """ + Create a new node. + """ + self.id = node_id + self.alias = alias + if isinstance(label, list): + label = [inner_label for inner_label in label if inner_label != ""] + + if ( + label is None + or label == "" + or (isinstance(label, list) and len(label) == 0) + ): + self.label = None + self.labels = None + elif isinstance(label, str): + self.label = label + self.labels = [label] + elif isinstance(label, list) and all( + [isinstance(inner_label, str) for inner_label in label] + ): + self.label = label[0] + self.labels = label + else: + raise AssertionError( + "label should be either None, string or a list of strings" + ) + + self.properties = properties or {} + + def to_string(self): + res = "" + if self.properties: + props = ",".join( + key + ":" + str(quote_string(val)) + for key, val in sorted(self.properties.items()) + ) + res += "{" + props + "}" + + return res + + def __str__(self): + res = "(" + if self.alias: + res += self.alias + if self.labels: + res += ":" + ":".join(self.labels) + if self.properties: + props = ",".join( + key + ":" + str(quote_string(val)) + for key, val in sorted(self.properties.items()) + ) + res += "{" + props + "}" + res += ")" + + return res + + def __eq__(self, rhs): + # Type checking + if not isinstance(rhs, Node): + return False + + # Quick positive check, if both IDs are set. + if self.id is not None and rhs.id is not None and self.id != rhs.id: + return False + + # Label should match. + if self.label != rhs.label: + return False + + # Quick check for number of properties. + if len(self.properties) != len(rhs.properties): + return False + + # Compare properties. + if self.properties != rhs.properties: + return False + + return True diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/path.py b/templates/skills/spotify/dependencies/redis/commands/graph/path.py new file mode 100644 index 00000000..ee22dc8c --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/path.py @@ -0,0 +1,78 @@ +from .edge import Edge +from .node import Node + + +class Path: + def __init__(self, nodes, edges): + if not (isinstance(nodes, list) and isinstance(edges, list)): + raise TypeError("nodes and edges must be list") + + self._nodes = nodes + self._edges = edges + self.append_type = Node + + @classmethod + def new_empty_path(cls): + return cls([], []) + + def nodes(self): + return self._nodes + + def edges(self): + return self._edges + + def get_node(self, index): + return self._nodes[index] + + def get_relationship(self, index): + return self._edges[index] + + def first_node(self): + return self._nodes[0] + + def last_node(self): + return self._nodes[-1] + + def edge_count(self): + return len(self._edges) + + def nodes_count(self): + return len(self._nodes) + + def add_node(self, node): + if not isinstance(node, self.append_type): + raise AssertionError("Add Edge before adding Node") + self._nodes.append(node) + self.append_type = Edge + return self + + def add_edge(self, edge): + if not isinstance(edge, self.append_type): + raise AssertionError("Add Node before adding Edge") + self._edges.append(edge) + self.append_type = Node + return self + + def __eq__(self, other): + # Type checking + if not isinstance(other, Path): + return False + + return self.nodes() == other.nodes() and self.edges() == other.edges() + + def __str__(self): + res = "<" + edge_count = self.edge_count() + for i in range(0, edge_count): + node_id = self.get_node(i).id + res += "(" + str(node_id) + ")" + edge = self.get_relationship(i) + res += ( + "-[" + str(int(edge.id)) + "]->" + if edge.src_node == node_id + else "<-[" + str(int(edge.id)) + "]-" + ) + node_id = self.get_node(edge_count).id + res += "(" + str(node_id) + ")" + res += ">" + return res diff --git a/templates/skills/spotify/dependencies/redis/commands/graph/query_result.py b/templates/skills/spotify/dependencies/redis/commands/graph/query_result.py new file mode 100644 index 00000000..7c7f58b9 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/graph/query_result.py @@ -0,0 +1,573 @@ +import sys +from collections import OrderedDict +from distutils.util import strtobool + +# from prettytable import PrettyTable +from redis import ResponseError + +from .edge import Edge +from .exceptions import VersionMismatchException +from .node import Node +from .path import Path + +LABELS_ADDED = "Labels added" +LABELS_REMOVED = "Labels removed" +NODES_CREATED = "Nodes created" +NODES_DELETED = "Nodes deleted" +RELATIONSHIPS_DELETED = "Relationships deleted" +PROPERTIES_SET = "Properties set" +PROPERTIES_REMOVED = "Properties removed" +RELATIONSHIPS_CREATED = "Relationships created" +INDICES_CREATED = "Indices created" +INDICES_DELETED = "Indices deleted" +CACHED_EXECUTION = "Cached execution" +INTERNAL_EXECUTION_TIME = "internal execution time" + +STATS = [ + LABELS_ADDED, + LABELS_REMOVED, + NODES_CREATED, + PROPERTIES_SET, + PROPERTIES_REMOVED, + RELATIONSHIPS_CREATED, + NODES_DELETED, + RELATIONSHIPS_DELETED, + INDICES_CREATED, + INDICES_DELETED, + CACHED_EXECUTION, + INTERNAL_EXECUTION_TIME, +] + + +class ResultSetColumnTypes: + COLUMN_UNKNOWN = 0 + COLUMN_SCALAR = 1 + COLUMN_NODE = 2 # Unused as of RedisGraph v2.1.0, retained for backwards compatibility. # noqa + COLUMN_RELATION = 3 # Unused as of RedisGraph v2.1.0, retained for backwards compatibility. # noqa + + +class ResultSetScalarTypes: + VALUE_UNKNOWN = 0 + VALUE_NULL = 1 + VALUE_STRING = 2 + VALUE_INTEGER = 3 + VALUE_BOOLEAN = 4 + VALUE_DOUBLE = 5 + VALUE_ARRAY = 6 + VALUE_EDGE = 7 + VALUE_NODE = 8 + VALUE_PATH = 9 + VALUE_MAP = 10 + VALUE_POINT = 11 + + +class QueryResult: + def __init__(self, graph, response, profile=False): + """ + A class that represents a result of the query operation. + + Args: + + graph: + The graph on which the query was executed. + response: + The response from the server. + profile: + A boolean indicating if the query command was "GRAPH.PROFILE" + """ + self.graph = graph + self.header = [] + self.result_set = [] + + # in case of an error an exception will be raised + self._check_for_errors(response) + + if len(response) == 1: + self.parse_statistics(response[0]) + elif profile: + self.parse_profile(response) + else: + # start by parsing statistics, matches the one we have + self.parse_statistics(response[-1]) # Last element. + self.parse_results(response) + + def _check_for_errors(self, response): + """ + Check if the response contains an error. + """ + if isinstance(response[0], ResponseError): + error = response[0] + if str(error) == "version mismatch": + version = response[1] + error = VersionMismatchException(version) + raise error + + # If we encountered a run-time error, the last response + # element will be an exception + if isinstance(response[-1], ResponseError): + raise response[-1] + + def parse_results(self, raw_result_set): + """ + Parse the query execution result returned from the server. + """ + self.header = self.parse_header(raw_result_set) + + # Empty header. + if len(self.header) == 0: + return + + self.result_set = self.parse_records(raw_result_set) + + def parse_statistics(self, raw_statistics): + """ + Parse the statistics returned in the response. + """ + self.statistics = {} + + # decode statistics + for idx, stat in enumerate(raw_statistics): + if isinstance(stat, bytes): + raw_statistics[idx] = stat.decode() + + for s in STATS: + v = self._get_value(s, raw_statistics) + if v is not None: + self.statistics[s] = v + + def parse_header(self, raw_result_set): + """ + Parse the header of the result. + """ + # An array of column name/column type pairs. + header = raw_result_set[0] + return header + + def parse_records(self, raw_result_set): + """ + Parses the result set and returns a list of records. + """ + records = [ + [ + self.parse_record_types[self.header[idx][0]](cell) + for idx, cell in enumerate(row) + ] + for row in raw_result_set[1] + ] + + return records + + def parse_entity_properties(self, props): + """ + Parse node / edge properties. + """ + # [[name, value type, value] X N] + properties = {} + for prop in props: + prop_name = self.graph.get_property(prop[0]) + prop_value = self.parse_scalar(prop[1:]) + properties[prop_name] = prop_value + + return properties + + def parse_string(self, cell): + """ + Parse the cell as a string. + """ + if isinstance(cell, bytes): + return cell.decode() + elif not isinstance(cell, str): + return str(cell) + else: + return cell + + def parse_node(self, cell): + """ + Parse the cell to a node. + """ + # Node ID (integer), + # [label string offset (integer)], + # [[name, value type, value] X N] + + node_id = int(cell[0]) + labels = None + if len(cell[1]) > 0: + labels = [] + for inner_label in cell[1]: + labels.append(self.graph.get_label(inner_label)) + properties = self.parse_entity_properties(cell[2]) + return Node(node_id=node_id, label=labels, properties=properties) + + def parse_edge(self, cell): + """ + Parse the cell to an edge. + """ + # Edge ID (integer), + # reltype string offset (integer), + # src node ID offset (integer), + # dest node ID offset (integer), + # [[name, value, value type] X N] + + edge_id = int(cell[0]) + relation = self.graph.get_relation(cell[1]) + src_node_id = int(cell[2]) + dest_node_id = int(cell[3]) + properties = self.parse_entity_properties(cell[4]) + return Edge( + src_node_id, relation, dest_node_id, edge_id=edge_id, properties=properties + ) + + def parse_path(self, cell): + """ + Parse the cell to a path. + """ + nodes = self.parse_scalar(cell[0]) + edges = self.parse_scalar(cell[1]) + return Path(nodes, edges) + + def parse_map(self, cell): + """ + Parse the cell as a map. + """ + m = OrderedDict() + n_entries = len(cell) + + # A map is an array of key value pairs. + # 1. key (string) + # 2. array: (value type, value) + for i in range(0, n_entries, 2): + key = self.parse_string(cell[i]) + m[key] = self.parse_scalar(cell[i + 1]) + + return m + + def parse_point(self, cell): + """ + Parse the cell to point. + """ + p = {} + # A point is received an array of the form: [latitude, longitude] + # It is returned as a map of the form: {"latitude": latitude, "longitude": longitude} # noqa + p["latitude"] = float(cell[0]) + p["longitude"] = float(cell[1]) + return p + + def parse_null(self, cell): + """ + Parse a null value. + """ + return None + + def parse_integer(self, cell): + """ + Parse the integer value from the cell. + """ + return int(cell) + + def parse_boolean(self, value): + """ + Parse the cell value as a boolean. + """ + value = value.decode() if isinstance(value, bytes) else value + try: + scalar = True if strtobool(value) else False + except ValueError: + sys.stderr.write("unknown boolean type\n") + scalar = None + return scalar + + def parse_double(self, cell): + """ + Parse the cell as a double. + """ + return float(cell) + + def parse_array(self, value): + """ + Parse an array of values. + """ + scalar = [self.parse_scalar(value[i]) for i in range(len(value))] + return scalar + + def parse_unknown(self, cell): + """ + Parse a cell of unknown type. + """ + sys.stderr.write("Unknown type\n") + return None + + def parse_scalar(self, cell): + """ + Parse a scalar value from a cell in the result set. + """ + scalar_type = int(cell[0]) + value = cell[1] + scalar = self.parse_scalar_types[scalar_type](value) + + return scalar + + def parse_profile(self, response): + self.result_set = [x[0 : x.index(",")].strip() for x in response] + + def is_empty(self): + return len(self.result_set) == 0 + + @staticmethod + def _get_value(prop, statistics): + for stat in statistics: + if prop in stat: + return float(stat.split(": ")[1].split(" ")[0]) + + return None + + def _get_stat(self, stat): + return self.statistics[stat] if stat in self.statistics else 0 + + @property + def labels_added(self): + """Returns the number of labels added in the query""" + return self._get_stat(LABELS_ADDED) + + @property + def labels_removed(self): + """Returns the number of labels removed in the query""" + return self._get_stat(LABELS_REMOVED) + + @property + def nodes_created(self): + """Returns the number of nodes created in the query""" + return self._get_stat(NODES_CREATED) + + @property + def nodes_deleted(self): + """Returns the number of nodes deleted in the query""" + return self._get_stat(NODES_DELETED) + + @property + def properties_set(self): + """Returns the number of properties set in the query""" + return self._get_stat(PROPERTIES_SET) + + @property + def properties_removed(self): + """Returns the number of properties removed in the query""" + return self._get_stat(PROPERTIES_REMOVED) + + @property + def relationships_created(self): + """Returns the number of relationships created in the query""" + return self._get_stat(RELATIONSHIPS_CREATED) + + @property + def relationships_deleted(self): + """Returns the number of relationships deleted in the query""" + return self._get_stat(RELATIONSHIPS_DELETED) + + @property + def indices_created(self): + """Returns the number of indices created in the query""" + return self._get_stat(INDICES_CREATED) + + @property + def indices_deleted(self): + """Returns the number of indices deleted in the query""" + return self._get_stat(INDICES_DELETED) + + @property + def cached_execution(self): + """Returns whether or not the query execution plan was cached""" + return self._get_stat(CACHED_EXECUTION) == 1 + + @property + def run_time_ms(self): + """Returns the server execution time of the query""" + return self._get_stat(INTERNAL_EXECUTION_TIME) + + @property + def parse_scalar_types(self): + return { + ResultSetScalarTypes.VALUE_NULL: self.parse_null, + ResultSetScalarTypes.VALUE_STRING: self.parse_string, + ResultSetScalarTypes.VALUE_INTEGER: self.parse_integer, + ResultSetScalarTypes.VALUE_BOOLEAN: self.parse_boolean, + ResultSetScalarTypes.VALUE_DOUBLE: self.parse_double, + ResultSetScalarTypes.VALUE_ARRAY: self.parse_array, + ResultSetScalarTypes.VALUE_NODE: self.parse_node, + ResultSetScalarTypes.VALUE_EDGE: self.parse_edge, + ResultSetScalarTypes.VALUE_PATH: self.parse_path, + ResultSetScalarTypes.VALUE_MAP: self.parse_map, + ResultSetScalarTypes.VALUE_POINT: self.parse_point, + ResultSetScalarTypes.VALUE_UNKNOWN: self.parse_unknown, + } + + @property + def parse_record_types(self): + return { + ResultSetColumnTypes.COLUMN_SCALAR: self.parse_scalar, + ResultSetColumnTypes.COLUMN_NODE: self.parse_node, + ResultSetColumnTypes.COLUMN_RELATION: self.parse_edge, + ResultSetColumnTypes.COLUMN_UNKNOWN: self.parse_unknown, + } + + +class AsyncQueryResult(QueryResult): + """ + Async version for the QueryResult class - a class that + represents a result of the query operation. + """ + + def __init__(self): + """ + To init the class you must call self.initialize() + """ + pass + + async def initialize(self, graph, response, profile=False): + """ + Initializes the class. + Args: + + graph: + The graph on which the query was executed. + response: + The response from the server. + profile: + A boolean indicating if the query command was "GRAPH.PROFILE" + """ + self.graph = graph + self.header = [] + self.result_set = [] + + # in case of an error an exception will be raised + self._check_for_errors(response) + + if len(response) == 1: + self.parse_statistics(response[0]) + elif profile: + self.parse_profile(response) + else: + # start by parsing statistics, matches the one we have + self.parse_statistics(response[-1]) # Last element. + await self.parse_results(response) + + return self + + async def parse_node(self, cell): + """ + Parses a node from the cell. + """ + # Node ID (integer), + # [label string offset (integer)], + # [[name, value type, value] X N] + + labels = None + if len(cell[1]) > 0: + labels = [] + for inner_label in cell[1]: + labels.append(await self.graph.get_label(inner_label)) + properties = await self.parse_entity_properties(cell[2]) + node_id = int(cell[0]) + return Node(node_id=node_id, label=labels, properties=properties) + + async def parse_scalar(self, cell): + """ + Parses a scalar value from the server response. + """ + scalar_type = int(cell[0]) + value = cell[1] + try: + scalar = await self.parse_scalar_types[scalar_type](value) + except TypeError: + # Not all of the functions are async + scalar = self.parse_scalar_types[scalar_type](value) + + return scalar + + async def parse_records(self, raw_result_set): + """ + Parses the result set and returns a list of records. + """ + records = [] + for row in raw_result_set[1]: + record = [ + await self.parse_record_types[self.header[idx][0]](cell) + for idx, cell in enumerate(row) + ] + records.append(record) + + return records + + async def parse_results(self, raw_result_set): + """ + Parse the query execution result returned from the server. + """ + self.header = self.parse_header(raw_result_set) + + # Empty header. + if len(self.header) == 0: + return + + self.result_set = await self.parse_records(raw_result_set) + + async def parse_entity_properties(self, props): + """ + Parse node / edge properties. + """ + # [[name, value type, value] X N] + properties = {} + for prop in props: + prop_name = await self.graph.get_property(prop[0]) + prop_value = await self.parse_scalar(prop[1:]) + properties[prop_name] = prop_value + + return properties + + async def parse_edge(self, cell): + """ + Parse the cell to an edge. + """ + # Edge ID (integer), + # reltype string offset (integer), + # src node ID offset (integer), + # dest node ID offset (integer), + # [[name, value, value type] X N] + + edge_id = int(cell[0]) + relation = await self.graph.get_relation(cell[1]) + src_node_id = int(cell[2]) + dest_node_id = int(cell[3]) + properties = await self.parse_entity_properties(cell[4]) + return Edge( + src_node_id, relation, dest_node_id, edge_id=edge_id, properties=properties + ) + + async def parse_path(self, cell): + """ + Parse the cell to a path. + """ + nodes = await self.parse_scalar(cell[0]) + edges = await self.parse_scalar(cell[1]) + return Path(nodes, edges) + + async def parse_map(self, cell): + """ + Parse the cell to a map. + """ + m = OrderedDict() + n_entries = len(cell) + + # A map is an array of key value pairs. + # 1. key (string) + # 2. array: (value type, value) + for i in range(0, n_entries, 2): + key = self.parse_string(cell[i]) + m[key] = await self.parse_scalar(cell[i + 1]) + + return m + + async def parse_array(self, value): + """ + Parse array value. + """ + scalar = [await self.parse_scalar(value[i]) for i in range(len(value))] + return scalar diff --git a/templates/skills/spotify/dependencies/redis/commands/helpers.py b/templates/skills/spotify/dependencies/redis/commands/helpers.py new file mode 100644 index 00000000..127141f6 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/helpers.py @@ -0,0 +1,171 @@ +import copy +import random +import string +from typing import List, Tuple + +import redis +from redis.typing import KeysT, KeyT + + +def list_or_args(keys: KeysT, args: Tuple[KeyT, ...]) -> List[KeyT]: + # returns a single new list combining keys and args + try: + iter(keys) + # a string or bytes instance can be iterated, but indicates + # keys wasn't passed as a list + if isinstance(keys, (bytes, str)): + keys = [keys] + else: + keys = list(keys) + except TypeError: + keys = [keys] + if args: + keys.extend(args) + return keys + + +def nativestr(x): + """Return the decoded binary string, or a string, depending on type.""" + r = x.decode("utf-8", "replace") if isinstance(x, bytes) else x + if r == "null": + return + return r + + +def delist(x): + """Given a list of binaries, return the stringified version.""" + if x is None: + return x + return [nativestr(obj) for obj in x] + + +def parse_to_list(response): + """Optimistically parse the response to a list.""" + res = [] + + if response is None: + return res + + for item in response: + try: + res.append(int(item)) + except ValueError: + try: + res.append(float(item)) + except ValueError: + res.append(nativestr(item)) + except TypeError: + res.append(None) + return res + + +def parse_list_to_dict(response): + res = {} + for i in range(0, len(response), 2): + if isinstance(response[i], list): + res["Child iterators"].append(parse_list_to_dict(response[i])) + try: + if isinstance(response[i + 1], list): + res["Child iterators"].append(parse_list_to_dict(response[i + 1])) + except IndexError: + pass + elif isinstance(response[i + 1], list): + res["Child iterators"] = [parse_list_to_dict(response[i + 1])] + else: + try: + res[response[i]] = float(response[i + 1]) + except (TypeError, ValueError): + res[response[i]] = response[i + 1] + return res + + +def parse_to_dict(response): + if response is None: + return {} + + res = {} + for det in response: + if isinstance(det[1], list): + res[det[0]] = parse_list_to_dict(det[1]) + else: + try: # try to set the attribute. may be provided without value + try: # try to convert the value to float + res[det[0]] = float(det[1]) + except (TypeError, ValueError): + res[det[0]] = det[1] + except IndexError: + pass + return res + + +def random_string(length=10): + """ + Returns a random N character long string. + """ + return "".join( # nosec + random.choice(string.ascii_lowercase) for x in range(length) + ) + + +def quote_string(v): + """ + RedisGraph strings must be quoted, + quote_string wraps given v with quotes incase + v is a string. + """ + + if isinstance(v, bytes): + v = v.decode() + elif not isinstance(v, str): + return v + if len(v) == 0: + return '""' + + v = v.replace("\\", "\\\\") + v = v.replace('"', '\\"') + + return f'"{v}"' + + +def decode_dict_keys(obj): + """Decode the keys of the given dictionary with utf-8.""" + newobj = copy.copy(obj) + for k in obj.keys(): + if isinstance(k, bytes): + newobj[k.decode("utf-8")] = newobj[k] + newobj.pop(k) + return newobj + + +def stringify_param_value(value): + """ + Turn a parameter value into a string suitable for the params header of + a Cypher command. + You may pass any value that would be accepted by `json.dumps()`. + + Ways in which output differs from that of `str()`: + * Strings are quoted. + * None --> "null". + * In dictionaries, keys are _not_ quoted. + + :param value: The parameter value to be turned into a string. + :return: string + """ + + if isinstance(value, str): + return quote_string(value) + elif value is None: + return "null" + elif isinstance(value, (list, tuple)): + return f'[{",".join(map(stringify_param_value, value))}]' + elif isinstance(value, dict): + return f'{{{",".join(f"{k}:{stringify_param_value(v)}" for k, v in value.items())}}}' # noqa + else: + return str(value) + + +def get_protocol_version(client): + if isinstance(client, redis.Redis) or isinstance(client, redis.asyncio.Redis): + return client.connection_pool.connection_kwargs.get("protocol") + elif isinstance(client, redis.cluster.AbstractRedisCluster): + return client.nodes_manager.connection_kwargs.get("protocol") diff --git a/templates/skills/spotify/dependencies/redis/commands/json/__init__.py b/templates/skills/spotify/dependencies/redis/commands/json/__init__.py new file mode 100644 index 00000000..01077e6b --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/json/__init__.py @@ -0,0 +1,147 @@ +from json import JSONDecodeError, JSONDecoder, JSONEncoder + +import redis + +from ..helpers import get_protocol_version, nativestr +from .commands import JSONCommands +from .decoders import bulk_of_jsons, decode_list + + +class JSON(JSONCommands): + """ + Create a client for talking to json. + + :param decoder: + :type json.JSONDecoder: An instance of json.JSONDecoder + + :param encoder: + :type json.JSONEncoder: An instance of json.JSONEncoder + """ + + def __init__( + self, client, version=None, decoder=JSONDecoder(), encoder=JSONEncoder() + ): + """ + Create a client for talking to json. + + :param decoder: + :type json.JSONDecoder: An instance of json.JSONDecoder + + :param encoder: + :type json.JSONEncoder: An instance of json.JSONEncoder + """ + # Set the module commands' callbacks + self._MODULE_CALLBACKS = { + "JSON.ARRPOP": self._decode, + "JSON.DEBUG": self._decode, + "JSON.GET": self._decode, + "JSON.MERGE": lambda r: r and nativestr(r) == "OK", + "JSON.MGET": bulk_of_jsons(self._decode), + "JSON.MSET": lambda r: r and nativestr(r) == "OK", + "JSON.RESP": self._decode, + "JSON.SET": lambda r: r and nativestr(r) == "OK", + "JSON.TOGGLE": self._decode, + } + + _RESP2_MODULE_CALLBACKS = { + "JSON.ARRAPPEND": self._decode, + "JSON.ARRINDEX": self._decode, + "JSON.ARRINSERT": self._decode, + "JSON.ARRLEN": self._decode, + "JSON.ARRTRIM": self._decode, + "JSON.CLEAR": int, + "JSON.DEL": int, + "JSON.FORGET": int, + "JSON.GET": self._decode, + "JSON.NUMINCRBY": self._decode, + "JSON.NUMMULTBY": self._decode, + "JSON.OBJKEYS": self._decode, + "JSON.STRAPPEND": self._decode, + "JSON.OBJLEN": self._decode, + "JSON.STRLEN": self._decode, + "JSON.TOGGLE": self._decode, + } + + _RESP3_MODULE_CALLBACKS = {} + + self.client = client + self.execute_command = client.execute_command + self.MODULE_VERSION = version + + if get_protocol_version(self.client) in ["3", 3]: + self._MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS) + else: + self._MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS) + + for key, value in self._MODULE_CALLBACKS.items(): + self.client.set_response_callback(key, value) + + self.__encoder__ = encoder + self.__decoder__ = decoder + + def _decode(self, obj): + """Get the decoder.""" + if obj is None: + return obj + + try: + x = self.__decoder__.decode(obj) + if x is None: + raise TypeError + return x + except TypeError: + try: + return self.__decoder__.decode(obj.decode()) + except AttributeError: + return decode_list(obj) + except (AttributeError, JSONDecodeError): + return decode_list(obj) + + def _encode(self, obj): + """Get the encoder.""" + return self.__encoder__.encode(obj) + + def pipeline(self, transaction=True, shard_hint=None): + """Creates a pipeline for the JSON module, that can be used for executing + JSON commands, as well as classic core commands. + + Usage example: + + r = redis.Redis() + pipe = r.json().pipeline() + pipe.jsonset('foo', '.', {'hello!': 'world'}) + pipe.jsonget('foo') + pipe.jsonget('notakey') + """ + if isinstance(self.client, redis.RedisCluster): + p = ClusterPipeline( + nodes_manager=self.client.nodes_manager, + commands_parser=self.client.commands_parser, + startup_nodes=self.client.nodes_manager.startup_nodes, + result_callbacks=self.client.result_callbacks, + cluster_response_callbacks=self.client.cluster_response_callbacks, + cluster_error_retry_attempts=self.client.cluster_error_retry_attempts, + read_from_replicas=self.client.read_from_replicas, + reinitialize_steps=self.client.reinitialize_steps, + lock=self.client._lock, + ) + + else: + p = Pipeline( + connection_pool=self.client.connection_pool, + response_callbacks=self._MODULE_CALLBACKS, + transaction=transaction, + shard_hint=shard_hint, + ) + + p._encode = self._encode + p._decode = self._decode + return p + + +class ClusterPipeline(JSONCommands, redis.cluster.ClusterPipeline): + """Cluster pipeline for the module.""" + + +class Pipeline(JSONCommands, redis.client.Pipeline): + """Pipeline for the module.""" diff --git a/templates/skills/spotify/dependencies/redis/commands/json/_util.py b/templates/skills/spotify/dependencies/redis/commands/json/_util.py new file mode 100644 index 00000000..3400bcd6 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/json/_util.py @@ -0,0 +1,3 @@ +from typing import Any, Dict, List, Union + +JsonType = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] diff --git a/templates/skills/spotify/dependencies/redis/commands/json/commands.py b/templates/skills/spotify/dependencies/redis/commands/json/commands.py new file mode 100644 index 00000000..0e46e40f --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/json/commands.py @@ -0,0 +1,431 @@ +import os +from json import JSONDecodeError, loads +from typing import Dict, List, Optional, Tuple, Union + +from redis.exceptions import DataError +from redis.utils import deprecated_function + +from ._util import JsonType +from .decoders import decode_dict_keys +from .path import Path + + +class JSONCommands: + """json commands.""" + + def arrappend( + self, name: str, path: Optional[str] = Path.root_path(), *args: List[JsonType] + ) -> List[Union[int, None]]: + """Append the objects ``args`` to the array under the + ``path` in key ``name``. + + For more information see `JSON.ARRAPPEND `_.. + """ # noqa + pieces = [name, str(path)] + for o in args: + pieces.append(self._encode(o)) + return self.execute_command("JSON.ARRAPPEND", *pieces) + + def arrindex( + self, + name: str, + path: str, + scalar: int, + start: Optional[int] = None, + stop: Optional[int] = None, + ) -> List[Union[int, None]]: + """ + Return the index of ``scalar`` in the JSON array under ``path`` at key + ``name``. + + The search can be limited using the optional inclusive ``start`` + and exclusive ``stop`` indices. + + For more information see `JSON.ARRINDEX `_. + """ # noqa + pieces = [name, str(path), self._encode(scalar)] + if start is not None: + pieces.append(start) + if stop is not None: + pieces.append(stop) + + return self.execute_command("JSON.ARRINDEX", *pieces) + + def arrinsert( + self, name: str, path: str, index: int, *args: List[JsonType] + ) -> List[Union[int, None]]: + """Insert the objects ``args`` to the array at index ``index`` + under the ``path` in key ``name``. + + For more information see `JSON.ARRINSERT `_. + """ # noqa + pieces = [name, str(path), index] + for o in args: + pieces.append(self._encode(o)) + return self.execute_command("JSON.ARRINSERT", *pieces) + + def arrlen( + self, name: str, path: Optional[str] = Path.root_path() + ) -> List[Union[int, None]]: + """Return the length of the array JSON value under ``path`` + at key``name``. + + For more information see `JSON.ARRLEN `_. + """ # noqa + return self.execute_command("JSON.ARRLEN", name, str(path)) + + def arrpop( + self, + name: str, + path: Optional[str] = Path.root_path(), + index: Optional[int] = -1, + ) -> List[Union[str, None]]: + """Pop the element at ``index`` in the array JSON value under + ``path`` at key ``name``. + + For more information see `JSON.ARRPOP `_. + """ # noqa + return self.execute_command("JSON.ARRPOP", name, str(path), index) + + def arrtrim( + self, name: str, path: str, start: int, stop: int + ) -> List[Union[int, None]]: + """Trim the array JSON value under ``path`` at key ``name`` to the + inclusive range given by ``start`` and ``stop``. + + For more information see `JSON.ARRTRIM `_. + """ # noqa + return self.execute_command("JSON.ARRTRIM", name, str(path), start, stop) + + def type(self, name: str, path: Optional[str] = Path.root_path()) -> List[str]: + """Get the type of the JSON value under ``path`` from key ``name``. + + For more information see `JSON.TYPE `_. + """ # noqa + return self.execute_command("JSON.TYPE", name, str(path)) + + def resp(self, name: str, path: Optional[str] = Path.root_path()) -> List: + """Return the JSON value under ``path`` at key ``name``. + + For more information see `JSON.RESP `_. + """ # noqa + return self.execute_command("JSON.RESP", name, str(path)) + + def objkeys( + self, name: str, path: Optional[str] = Path.root_path() + ) -> List[Union[List[str], None]]: + """Return the key names in the dictionary JSON value under ``path`` at + key ``name``. + + For more information see `JSON.OBJKEYS `_. + """ # noqa + return self.execute_command("JSON.OBJKEYS", name, str(path)) + + def objlen( + self, name: str, path: Optional[str] = Path.root_path() + ) -> List[Optional[int]]: + """Return the length of the dictionary JSON value under ``path`` at key + ``name``. + + For more information see `JSON.OBJLEN `_. + """ # noqa + return self.execute_command("JSON.OBJLEN", name, str(path)) + + def numincrby(self, name: str, path: str, number: int) -> str: + """Increment the numeric (integer or floating point) JSON value under + ``path`` at key ``name`` by the provided ``number``. + + For more information see `JSON.NUMINCRBY `_. + """ # noqa + return self.execute_command( + "JSON.NUMINCRBY", name, str(path), self._encode(number) + ) + + @deprecated_function(version="4.0.0", reason="deprecated since redisjson 1.0.0") + def nummultby(self, name: str, path: str, number: int) -> str: + """Multiply the numeric (integer or floating point) JSON value under + ``path`` at key ``name`` with the provided ``number``. + + For more information see `JSON.NUMMULTBY `_. + """ # noqa + return self.execute_command( + "JSON.NUMMULTBY", name, str(path), self._encode(number) + ) + + def clear(self, name: str, path: Optional[str] = Path.root_path()) -> int: + """Empty arrays and objects (to have zero slots/keys without deleting the + array/object). + + Return the count of cleared paths (ignoring non-array and non-objects + paths). + + For more information see `JSON.CLEAR `_. + """ # noqa + return self.execute_command("JSON.CLEAR", name, str(path)) + + def delete(self, key: str, path: Optional[str] = Path.root_path()) -> int: + """Delete the JSON value stored at key ``key`` under ``path``. + + For more information see `JSON.DEL `_. + """ + return self.execute_command("JSON.DEL", key, str(path)) + + # forget is an alias for delete + forget = delete + + def get( + self, name: str, *args, no_escape: Optional[bool] = False + ) -> Optional[List[JsonType]]: + """ + Get the object stored as a JSON value at key ``name``. + + ``args`` is zero or more paths, and defaults to root path + ```no_escape`` is a boolean flag to add no_escape option to get + non-ascii characters + + For more information see `JSON.GET `_. + """ # noqa + pieces = [name] + if no_escape: + pieces.append("noescape") + + if len(args) == 0: + pieces.append(Path.root_path()) + + else: + for p in args: + pieces.append(str(p)) + + # Handle case where key doesn't exist. The JSONDecoder would raise a + # TypeError exception since it can't decode None + try: + return self.execute_command("JSON.GET", *pieces) + except TypeError: + return None + + def mget(self, keys: List[str], path: str) -> List[JsonType]: + """ + Get the objects stored as a JSON values under ``path``. ``keys`` + is a list of one or more keys. + + For more information see `JSON.MGET `_. + """ # noqa + pieces = [] + pieces += keys + pieces.append(str(path)) + return self.execute_command("JSON.MGET", *pieces) + + def set( + self, + name: str, + path: str, + obj: JsonType, + nx: Optional[bool] = False, + xx: Optional[bool] = False, + decode_keys: Optional[bool] = False, + ) -> Optional[str]: + """ + Set the JSON value at key ``name`` under the ``path`` to ``obj``. + + ``nx`` if set to True, set ``value`` only if it does not exist. + ``xx`` if set to True, set ``value`` only if it exists. + ``decode_keys`` If set to True, the keys of ``obj`` will be decoded + with utf-8. + + For the purpose of using this within a pipeline, this command is also + aliased to JSON.SET. + + For more information see `JSON.SET `_. + """ + if decode_keys: + obj = decode_dict_keys(obj) + + pieces = [name, str(path), self._encode(obj)] + + # Handle existential modifiers + if nx and xx: + raise Exception( + "nx and xx are mutually exclusive: use one, the " + "other or neither - but not both" + ) + elif nx: + pieces.append("NX") + elif xx: + pieces.append("XX") + return self.execute_command("JSON.SET", *pieces) + + def mset(self, triplets: List[Tuple[str, str, JsonType]]) -> Optional[str]: + """ + Set the JSON value at key ``name`` under the ``path`` to ``obj`` + for one or more keys. + + ``triplets`` is a list of one or more triplets of key, path, value. + + For the purpose of using this within a pipeline, this command is also + aliased to JSON.MSET. + + For more information see `JSON.MSET `_. + """ + pieces = [] + for triplet in triplets: + pieces.extend([triplet[0], str(triplet[1]), self._encode(triplet[2])]) + return self.execute_command("JSON.MSET", *pieces) + + def merge( + self, + name: str, + path: str, + obj: JsonType, + decode_keys: Optional[bool] = False, + ) -> Optional[str]: + """ + Merges a given JSON value into matching paths. Consequently, JSON values + at matching paths are updated, deleted, or expanded with new children + + ``decode_keys`` If set to True, the keys of ``obj`` will be decoded + with utf-8. + + For more information see `JSON.MERGE `_. + """ + if decode_keys: + obj = decode_dict_keys(obj) + + pieces = [name, str(path), self._encode(obj)] + + return self.execute_command("JSON.MERGE", *pieces) + + def set_file( + self, + name: str, + path: str, + file_name: str, + nx: Optional[bool] = False, + xx: Optional[bool] = False, + decode_keys: Optional[bool] = False, + ) -> Optional[str]: + """ + Set the JSON value at key ``name`` under the ``path`` to the content + of the json file ``file_name``. + + ``nx`` if set to True, set ``value`` only if it does not exist. + ``xx`` if set to True, set ``value`` only if it exists. + ``decode_keys`` If set to True, the keys of ``obj`` will be decoded + with utf-8. + + """ + + with open(file_name, "r") as fp: + file_content = loads(fp.read()) + + return self.set(name, path, file_content, nx=nx, xx=xx, decode_keys=decode_keys) + + def set_path( + self, + json_path: str, + root_folder: str, + nx: Optional[bool] = False, + xx: Optional[bool] = False, + decode_keys: Optional[bool] = False, + ) -> Dict[str, bool]: + """ + Iterate over ``root_folder`` and set each JSON file to a value + under ``json_path`` with the file name as the key. + + ``nx`` if set to True, set ``value`` only if it does not exist. + ``xx`` if set to True, set ``value`` only if it exists. + ``decode_keys`` If set to True, the keys of ``obj`` will be decoded + with utf-8. + + """ + set_files_result = {} + for root, dirs, files in os.walk(root_folder): + for file in files: + file_path = os.path.join(root, file) + try: + file_name = file_path.rsplit(".")[0] + self.set_file( + file_name, + json_path, + file_path, + nx=nx, + xx=xx, + decode_keys=decode_keys, + ) + set_files_result[file_path] = True + except JSONDecodeError: + set_files_result[file_path] = False + + return set_files_result + + def strlen(self, name: str, path: Optional[str] = None) -> List[Union[int, None]]: + """Return the length of the string JSON value under ``path`` at key + ``name``. + + For more information see `JSON.STRLEN `_. + """ # noqa + pieces = [name] + if path is not None: + pieces.append(str(path)) + return self.execute_command("JSON.STRLEN", *pieces) + + def toggle( + self, name: str, path: Optional[str] = Path.root_path() + ) -> Union[bool, List[Optional[int]]]: + """Toggle boolean value under ``path`` at key ``name``. + returning the new value. + + For more information see `JSON.TOGGLE `_. + """ # noqa + return self.execute_command("JSON.TOGGLE", name, str(path)) + + def strappend( + self, name: str, value: str, path: Optional[str] = Path.root_path() + ) -> Union[int, List[Optional[int]]]: + """Append to the string JSON value. If two options are specified after + the key name, the path is determined to be the first. If a single + option is passed, then the root_path (i.e Path.root_path()) is used. + + For more information see `JSON.STRAPPEND `_. + """ # noqa + pieces = [name, str(path), self._encode(value)] + return self.execute_command("JSON.STRAPPEND", *pieces) + + def debug( + self, + subcommand: str, + key: Optional[str] = None, + path: Optional[str] = Path.root_path(), + ) -> Union[int, List[str]]: + """Return the memory usage in bytes of a value under ``path`` from + key ``name``. + + For more information see `JSON.DEBUG `_. + """ # noqa + valid_subcommands = ["MEMORY", "HELP"] + if subcommand not in valid_subcommands: + raise DataError("The only valid subcommands are ", str(valid_subcommands)) + pieces = [subcommand] + if subcommand == "MEMORY": + if key is None: + raise DataError("No key specified") + pieces.append(key) + pieces.append(str(path)) + return self.execute_command("JSON.DEBUG", *pieces) + + @deprecated_function( + version="4.0.0", reason="redisjson-py supported this, call get directly." + ) + def jsonget(self, *args, **kwargs): + return self.get(*args, **kwargs) + + @deprecated_function( + version="4.0.0", reason="redisjson-py supported this, call get directly." + ) + def jsonmget(self, *args, **kwargs): + return self.mget(*args, **kwargs) + + @deprecated_function( + version="4.0.0", reason="redisjson-py supported this, call get directly." + ) + def jsonset(self, *args, **kwargs): + return self.set(*args, **kwargs) diff --git a/templates/skills/spotify/dependencies/redis/commands/json/decoders.py b/templates/skills/spotify/dependencies/redis/commands/json/decoders.py new file mode 100644 index 00000000..b9384711 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/json/decoders.py @@ -0,0 +1,60 @@ +import copy +import re + +from ..helpers import nativestr + + +def bulk_of_jsons(d): + """Replace serialized JSON values with objects in a + bulk array response (list). + """ + + def _f(b): + for index, item in enumerate(b): + if item is not None: + b[index] = d(item) + return b + + return _f + + +def decode_dict_keys(obj): + """Decode the keys of the given dictionary with utf-8.""" + newobj = copy.copy(obj) + for k in obj.keys(): + if isinstance(k, bytes): + newobj[k.decode("utf-8")] = newobj[k] + newobj.pop(k) + return newobj + + +def unstring(obj): + """ + Attempt to parse string to native integer formats. + One can't simply call int/float in a try/catch because there is a + semantic difference between (for example) 15.0 and 15. + """ + floatreg = "^\\d+.\\d+$" + match = re.findall(floatreg, obj) + if match != []: + return float(match[0]) + + intreg = "^\\d+$" + match = re.findall(intreg, obj) + if match != []: + return int(match[0]) + return obj + + +def decode_list(b): + """ + Given a non-deserializable object, make a best effort to + return a useful set of results. + """ + if isinstance(b, list): + return [nativestr(obj) for obj in b] + elif isinstance(b, bytes): + return unstring(nativestr(b)) + elif isinstance(b, str): + return unstring(b) + return b diff --git a/templates/skills/spotify/dependencies/redis/commands/json/path.py b/templates/skills/spotify/dependencies/redis/commands/json/path.py new file mode 100644 index 00000000..bfb0ab2d --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/json/path.py @@ -0,0 +1,16 @@ +class Path: + """This class represents a path in a JSON value.""" + + strPath = "" + + @staticmethod + def root_path(): + """Return the root path's string representation.""" + return "." + + def __init__(self, path): + """Make a new path based on the string representation in `path`.""" + self.strPath = path + + def __repr__(self): + return self.strPath diff --git a/templates/skills/spotify/dependencies/redis/commands/redismodules.py b/templates/skills/spotify/dependencies/redis/commands/redismodules.py new file mode 100644 index 00000000..7e2045a7 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/redismodules.py @@ -0,0 +1,103 @@ +from json import JSONDecoder, JSONEncoder + + +class RedisModuleCommands: + """This class contains the wrapper functions to bring supported redis + modules into the command namespace. + """ + + def json(self, encoder=JSONEncoder(), decoder=JSONDecoder()): + """Access the json namespace, providing support for redis json.""" + + from .json import JSON + + jj = JSON(client=self, encoder=encoder, decoder=decoder) + return jj + + def ft(self, index_name="idx"): + """Access the search namespace, providing support for redis search.""" + + from .search import Search + + s = Search(client=self, index_name=index_name) + return s + + def ts(self): + """Access the timeseries namespace, providing support for + redis timeseries data. + """ + + from .timeseries import TimeSeries + + s = TimeSeries(client=self) + return s + + def bf(self): + """Access the bloom namespace.""" + + from .bf import BFBloom + + bf = BFBloom(client=self) + return bf + + def cf(self): + """Access the bloom namespace.""" + + from .bf import CFBloom + + cf = CFBloom(client=self) + return cf + + def cms(self): + """Access the bloom namespace.""" + + from .bf import CMSBloom + + cms = CMSBloom(client=self) + return cms + + def topk(self): + """Access the bloom namespace.""" + + from .bf import TOPKBloom + + topk = TOPKBloom(client=self) + return topk + + def tdigest(self): + """Access the bloom namespace.""" + + from .bf import TDigestBloom + + tdigest = TDigestBloom(client=self) + return tdigest + + def graph(self, index_name="idx"): + """Access the graph namespace, providing support for + redis graph data. + """ + + from .graph import Graph + + g = Graph(client=self, name=index_name) + return g + + +class AsyncRedisModuleCommands(RedisModuleCommands): + def ft(self, index_name="idx"): + """Access the search namespace, providing support for redis search.""" + + from .search import AsyncSearch + + s = AsyncSearch(client=self, index_name=index_name) + return s + + def graph(self, index_name="idx"): + """Access the graph namespace, providing support for + redis graph data. + """ + + from .graph import AsyncGraph + + g = AsyncGraph(client=self, name=index_name) + return g diff --git a/templates/skills/spotify/dependencies/redis/commands/search/__init__.py b/templates/skills/spotify/dependencies/redis/commands/search/__init__.py new file mode 100644 index 00000000..a2bb23b7 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/__init__.py @@ -0,0 +1,189 @@ +import redis + +from ...asyncio.client import Pipeline as AsyncioPipeline +from .commands import ( + AGGREGATE_CMD, + CONFIG_CMD, + INFO_CMD, + PROFILE_CMD, + SEARCH_CMD, + SPELLCHECK_CMD, + SYNDUMP_CMD, + AsyncSearchCommands, + SearchCommands, +) + + +class Search(SearchCommands): + """ + Create a client for talking to search. + It abstracts the API of the module and lets you just use the engine. + """ + + class BatchIndexer: + """ + A batch indexer allows you to automatically batch + document indexing in pipelines, flushing it every N documents. + """ + + def __init__(self, client, chunk_size=1000): + self.client = client + self.execute_command = client.execute_command + self._pipeline = client.pipeline(transaction=False, shard_hint=None) + self.total = 0 + self.chunk_size = chunk_size + self.current_chunk = 0 + + def __del__(self): + if self.current_chunk: + self.commit() + + def add_document( + self, + doc_id, + nosave=False, + score=1.0, + payload=None, + replace=False, + partial=False, + no_create=False, + **fields, + ): + """ + Add a document to the batch query + """ + self.client._add_document( + doc_id, + conn=self._pipeline, + nosave=nosave, + score=score, + payload=payload, + replace=replace, + partial=partial, + no_create=no_create, + **fields, + ) + self.current_chunk += 1 + self.total += 1 + if self.current_chunk >= self.chunk_size: + self.commit() + + def add_document_hash(self, doc_id, score=1.0, replace=False): + """ + Add a hash to the batch query + """ + self.client._add_document_hash( + doc_id, conn=self._pipeline, score=score, replace=replace + ) + self.current_chunk += 1 + self.total += 1 + if self.current_chunk >= self.chunk_size: + self.commit() + + def commit(self): + """ + Manually commit and flush the batch indexing query + """ + self._pipeline.execute() + self.current_chunk = 0 + + def __init__(self, client, index_name="idx"): + """ + Create a new Client for the given index_name. + The default name is `idx` + + If conn is not None, we employ an already existing redis connection + """ + self._MODULE_CALLBACKS = {} + self.client = client + self.index_name = index_name + self.execute_command = client.execute_command + self._pipeline = client.pipeline + self._RESP2_MODULE_CALLBACKS = { + INFO_CMD: self._parse_info, + SEARCH_CMD: self._parse_search, + AGGREGATE_CMD: self._parse_aggregate, + PROFILE_CMD: self._parse_profile, + SPELLCHECK_CMD: self._parse_spellcheck, + CONFIG_CMD: self._parse_config_get, + SYNDUMP_CMD: self._parse_syndump, + } + + def pipeline(self, transaction=True, shard_hint=None): + """Creates a pipeline for the SEARCH module, that can be used for executing + SEARCH commands, as well as classic core commands. + """ + p = Pipeline( + connection_pool=self.client.connection_pool, + response_callbacks=self._MODULE_CALLBACKS, + transaction=transaction, + shard_hint=shard_hint, + ) + p.index_name = self.index_name + return p + + +class AsyncSearch(Search, AsyncSearchCommands): + class BatchIndexer(Search.BatchIndexer): + """ + A batch indexer allows you to automatically batch + document indexing in pipelines, flushing it every N documents. + """ + + async def add_document( + self, + doc_id, + nosave=False, + score=1.0, + payload=None, + replace=False, + partial=False, + no_create=False, + **fields, + ): + """ + Add a document to the batch query + """ + self.client._add_document( + doc_id, + conn=self._pipeline, + nosave=nosave, + score=score, + payload=payload, + replace=replace, + partial=partial, + no_create=no_create, + **fields, + ) + self.current_chunk += 1 + self.total += 1 + if self.current_chunk >= self.chunk_size: + await self.commit() + + async def commit(self): + """ + Manually commit and flush the batch indexing query + """ + await self._pipeline.execute() + self.current_chunk = 0 + + def pipeline(self, transaction=True, shard_hint=None): + """Creates a pipeline for the SEARCH module, that can be used for executing + SEARCH commands, as well as classic core commands. + """ + p = AsyncPipeline( + connection_pool=self.client.connection_pool, + response_callbacks=self._MODULE_CALLBACKS, + transaction=transaction, + shard_hint=shard_hint, + ) + p.index_name = self.index_name + return p + + +class Pipeline(SearchCommands, redis.client.Pipeline): + """Pipeline for the module.""" + + +class AsyncPipeline(AsyncSearchCommands, AsyncioPipeline, Pipeline): + """AsyncPipeline for the module.""" diff --git a/templates/skills/spotify/dependencies/redis/commands/search/_util.py b/templates/skills/spotify/dependencies/redis/commands/search/_util.py new file mode 100644 index 00000000..dd1dff33 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/_util.py @@ -0,0 +1,7 @@ +def to_string(s): + if isinstance(s, str): + return s + elif isinstance(s, bytes): + return s.decode("utf-8", "ignore") + else: + return s # Not a string we care about diff --git a/templates/skills/spotify/dependencies/redis/commands/search/aggregation.py b/templates/skills/spotify/dependencies/redis/commands/search/aggregation.py new file mode 100644 index 00000000..50d18f47 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/aggregation.py @@ -0,0 +1,372 @@ +from typing import List, Union + +FIELDNAME = object() + + +class Limit: + def __init__(self, offset: int = 0, count: int = 0) -> None: + self.offset = offset + self.count = count + + def build_args(self): + if self.count: + return ["LIMIT", str(self.offset), str(self.count)] + else: + return [] + + +class Reducer: + """ + Base reducer object for all reducers. + + See the `redisearch.reducers` module for the actual reducers. + """ + + NAME = None + + def __init__(self, *args: List[str]) -> None: + self._args = args + self._field = None + self._alias = None + + def alias(self, alias: str) -> "Reducer": + """ + Set the alias for this reducer. + + ### Parameters + + - **alias**: The value of the alias for this reducer. If this is the + special value `aggregation.FIELDNAME` then this reducer will be + aliased using the same name as the field upon which it operates. + Note that using `FIELDNAME` is only possible on reducers which + operate on a single field value. + + This method returns the `Reducer` object making it suitable for + chaining. + """ + if alias is FIELDNAME: + if not self._field: + raise ValueError("Cannot use FIELDNAME alias with no field") + # Chop off initial '@' + alias = self._field[1:] + self._alias = alias + return self + + @property + def args(self) -> List[str]: + return self._args + + +class SortDirection: + """ + This special class is used to indicate sort direction. + """ + + DIRSTRING = None + + def __init__(self, field: str) -> None: + self.field = field + + +class Asc(SortDirection): + """ + Indicate that the given field should be sorted in ascending order + """ + + DIRSTRING = "ASC" + + +class Desc(SortDirection): + """ + Indicate that the given field should be sorted in descending order + """ + + DIRSTRING = "DESC" + + +class AggregateRequest: + """ + Aggregation request which can be passed to `Client.aggregate`. + """ + + def __init__(self, query: str = "*") -> None: + """ + Create an aggregation request. This request may then be passed to + `client.aggregate()`. + + In order for the request to be usable, it must contain at least one + group. + + - **query** Query string for filtering records. + + All member methods (except `build_args()`) + return the object itself, making them useful for chaining. + """ + self._query = query + self._aggregateplan = [] + self._loadfields = [] + self._loadall = False + self._max = 0 + self._with_schema = False + self._verbatim = False + self._cursor = [] + self._dialect = None + + def load(self, *fields: List[str]) -> "AggregateRequest": + """ + Indicate the fields to be returned in the response. These fields are + returned in addition to any others implicitly specified. + + ### Parameters + + - **fields**: If fields not specified, all the fields will be loaded. + Otherwise, fields should be given in the format of `@field`. + """ + if fields: + self._loadfields.extend(fields) + else: + self._loadall = True + return self + + def group_by( + self, fields: List[str], *reducers: Union[Reducer, List[Reducer]] + ) -> "AggregateRequest": + """ + Specify by which fields to group the aggregation. + + ### Parameters + + - **fields**: Fields to group by. This can either be a single string, + or a list of strings. both cases, the field should be specified as + `@field`. + - **reducers**: One or more reducers. Reducers may be found in the + `aggregation` module. + """ + fields = [fields] if isinstance(fields, str) else fields + reducers = [reducers] if isinstance(reducers, Reducer) else reducers + + ret = ["GROUPBY", str(len(fields)), *fields] + for reducer in reducers: + ret += ["REDUCE", reducer.NAME, str(len(reducer.args))] + ret.extend(reducer.args) + if reducer._alias is not None: + ret += ["AS", reducer._alias] + + self._aggregateplan.extend(ret) + return self + + def apply(self, **kwexpr) -> "AggregateRequest": + """ + Specify one or more projection expressions to add to each result + + ### Parameters + + - **kwexpr**: One or more key-value pairs for a projection. The key is + the alias for the projection, and the value is the projection + expression itself, for example `apply(square_root="sqrt(@foo)")` + """ + for alias, expr in kwexpr.items(): + ret = ["APPLY", expr] + if alias is not None: + ret += ["AS", alias] + self._aggregateplan.extend(ret) + + return self + + def limit(self, offset: int, num: int) -> "AggregateRequest": + """ + Sets the limit for the most recent group or query. + + If no group has been defined yet (via `group_by()`) then this sets + the limit for the initial pool of results from the query. Otherwise, + this limits the number of items operated on from the previous group. + + Setting a limit on the initial search results may be useful when + attempting to execute an aggregation on a sample of a large data set. + + ### Parameters + + - **offset**: Result offset from which to begin paging + - **num**: Number of results to return + + + Example of sorting the initial results: + + ``` + AggregateRequest("@sale_amount:[10000, inf]")\ + .limit(0, 10)\ + .group_by("@state", r.count()) + ``` + + Will only group by the states found in the first 10 results of the + query `@sale_amount:[10000, inf]`. On the other hand, + + ``` + AggregateRequest("@sale_amount:[10000, inf]")\ + .limit(0, 1000)\ + .group_by("@state", r.count()\ + .limit(0, 10) + ``` + + Will group all the results matching the query, but only return the + first 10 groups. + + If you only wish to return a *top-N* style query, consider using + `sort_by()` instead. + + """ + _limit = Limit(offset, num) + self._aggregateplan.extend(_limit.build_args()) + return self + + def sort_by(self, *fields: List[str], **kwargs) -> "AggregateRequest": + """ + Indicate how the results should be sorted. This can also be used for + *top-N* style queries + + ### Parameters + + - **fields**: The fields by which to sort. This can be either a single + field or a list of fields. If you wish to specify order, you can + use the `Asc` or `Desc` wrapper classes. + - **max**: Maximum number of results to return. This can be + used instead of `LIMIT` and is also faster. + + + Example of sorting by `foo` ascending and `bar` descending: + + ``` + sort_by(Asc("@foo"), Desc("@bar")) + ``` + + Return the top 10 customers: + + ``` + AggregateRequest()\ + .group_by("@customer", r.sum("@paid").alias(FIELDNAME))\ + .sort_by(Desc("@paid"), max=10) + ``` + """ + if isinstance(fields, (str, SortDirection)): + fields = [fields] + + fields_args = [] + for f in fields: + if isinstance(f, SortDirection): + fields_args += [f.field, f.DIRSTRING] + else: + fields_args += [f] + + ret = ["SORTBY", str(len(fields_args))] + ret.extend(fields_args) + max = kwargs.get("max", 0) + if max > 0: + ret += ["MAX", str(max)] + + self._aggregateplan.extend(ret) + return self + + def filter(self, expressions: Union[str, List[str]]) -> "AggregateRequest": + """ + Specify filter for post-query results using predicates relating to + values in the result set. + + ### Parameters + + - **fields**: Fields to group by. This can either be a single string, + or a list of strings. + """ + if isinstance(expressions, str): + expressions = [expressions] + + for expression in expressions: + self._aggregateplan.extend(["FILTER", expression]) + + return self + + def with_schema(self) -> "AggregateRequest": + """ + If set, the `schema` property will contain a list of `[field, type]` + entries in the result object. + """ + self._with_schema = True + return self + + def verbatim(self) -> "AggregateRequest": + self._verbatim = True + return self + + def cursor(self, count: int = 0, max_idle: float = 0.0) -> "AggregateRequest": + args = ["WITHCURSOR"] + if count: + args += ["COUNT", str(count)] + if max_idle: + args += ["MAXIDLE", str(max_idle * 1000)] + self._cursor = args + return self + + def build_args(self) -> List[str]: + # @foo:bar ... + ret = [self._query] + + if self._with_schema: + ret.append("WITHSCHEMA") + + if self._verbatim: + ret.append("VERBATIM") + + if self._cursor: + ret += self._cursor + + if self._loadall: + ret.append("LOAD") + ret.append("*") + elif self._loadfields: + ret.append("LOAD") + ret.append(str(len(self._loadfields))) + ret.extend(self._loadfields) + + if self._dialect: + ret.extend(["DIALECT", self._dialect]) + + ret.extend(self._aggregateplan) + + return ret + + def dialect(self, dialect: int) -> "AggregateRequest": + """ + Add a dialect field to the aggregate command. + + - **dialect** - dialect version to execute the query under + """ + self._dialect = dialect + return self + + +class Cursor: + def __init__(self, cid: int) -> None: + self.cid = cid + self.max_idle = 0 + self.count = 0 + + def build_args(self): + args = [str(self.cid)] + if self.max_idle: + args += ["MAXIDLE", str(self.max_idle)] + if self.count: + args += ["COUNT", str(self.count)] + return args + + +class AggregateResult: + def __init__(self, rows, cursor: Cursor, schema) -> None: + self.rows = rows + self.cursor = cursor + self.schema = schema + + def __repr__(self) -> (str, str): + cid = self.cursor.cid if self.cursor else -1 + return ( + f"<{self.__class__.__name__} at 0x{id(self):x} " + f"Rows={len(self.rows)}, Cursor={cid}>" + ) diff --git a/templates/skills/spotify/dependencies/redis/commands/search/commands.py b/templates/skills/spotify/dependencies/redis/commands/search/commands.py new file mode 100644 index 00000000..2df2b5a7 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/commands.py @@ -0,0 +1,1117 @@ +import itertools +import time +from typing import Dict, List, Optional, Union + +from redis.client import Pipeline +from redis.utils import deprecated_function + +from ..helpers import get_protocol_version, parse_to_dict +from ._util import to_string +from .aggregation import AggregateRequest, AggregateResult, Cursor +from .document import Document +from .query import Query +from .result import Result +from .suggestion import SuggestionParser + +NUMERIC = "NUMERIC" + +CREATE_CMD = "FT.CREATE" +ALTER_CMD = "FT.ALTER" +SEARCH_CMD = "FT.SEARCH" +ADD_CMD = "FT.ADD" +ADDHASH_CMD = "FT.ADDHASH" +DROP_CMD = "FT.DROP" +DROPINDEX_CMD = "FT.DROPINDEX" +EXPLAIN_CMD = "FT.EXPLAIN" +EXPLAINCLI_CMD = "FT.EXPLAINCLI" +DEL_CMD = "FT.DEL" +AGGREGATE_CMD = "FT.AGGREGATE" +PROFILE_CMD = "FT.PROFILE" +CURSOR_CMD = "FT.CURSOR" +SPELLCHECK_CMD = "FT.SPELLCHECK" +DICT_ADD_CMD = "FT.DICTADD" +DICT_DEL_CMD = "FT.DICTDEL" +DICT_DUMP_CMD = "FT.DICTDUMP" +GET_CMD = "FT.GET" +MGET_CMD = "FT.MGET" +CONFIG_CMD = "FT.CONFIG" +TAGVALS_CMD = "FT.TAGVALS" +ALIAS_ADD_CMD = "FT.ALIASADD" +ALIAS_UPDATE_CMD = "FT.ALIASUPDATE" +ALIAS_DEL_CMD = "FT.ALIASDEL" +INFO_CMD = "FT.INFO" +SUGADD_COMMAND = "FT.SUGADD" +SUGDEL_COMMAND = "FT.SUGDEL" +SUGLEN_COMMAND = "FT.SUGLEN" +SUGGET_COMMAND = "FT.SUGGET" +SYNUPDATE_CMD = "FT.SYNUPDATE" +SYNDUMP_CMD = "FT.SYNDUMP" + +NOOFFSETS = "NOOFFSETS" +NOFIELDS = "NOFIELDS" +NOHL = "NOHL" +NOFREQS = "NOFREQS" +MAXTEXTFIELDS = "MAXTEXTFIELDS" +TEMPORARY = "TEMPORARY" +STOPWORDS = "STOPWORDS" +SKIPINITIALSCAN = "SKIPINITIALSCAN" +WITHSCORES = "WITHSCORES" +FUZZY = "FUZZY" +WITHPAYLOADS = "WITHPAYLOADS" + + +class SearchCommands: + """Search commands.""" + + def _parse_results(self, cmd, res, **kwargs): + if get_protocol_version(self.client) in ["3", 3]: + return res + else: + return self._RESP2_MODULE_CALLBACKS[cmd](res, **kwargs) + + def _parse_info(self, res, **kwargs): + it = map(to_string, res) + return dict(zip(it, it)) + + def _parse_search(self, res, **kwargs): + return Result( + res, + not kwargs["query"]._no_content, + duration=kwargs["duration"], + has_payload=kwargs["query"]._with_payloads, + with_scores=kwargs["query"]._with_scores, + ) + + def _parse_aggregate(self, res, **kwargs): + return self._get_aggregate_result(res, kwargs["query"], kwargs["has_cursor"]) + + def _parse_profile(self, res, **kwargs): + query = kwargs["query"] + if isinstance(query, AggregateRequest): + result = self._get_aggregate_result(res[0], query, query._cursor) + else: + result = Result( + res[0], + not query._no_content, + duration=kwargs["duration"], + has_payload=query._with_payloads, + with_scores=query._with_scores, + ) + + return result, parse_to_dict(res[1]) + + def _parse_spellcheck(self, res, **kwargs): + corrections = {} + if res == 0: + return corrections + + for _correction in res: + if isinstance(_correction, int) and _correction == 0: + continue + + if len(_correction) != 3: + continue + if not _correction[2]: + continue + if not _correction[2][0]: + continue + + # For spellcheck output + # 1) 1) "TERM" + # 2) "{term1}" + # 3) 1) 1) "{score1}" + # 2) "{suggestion1}" + # 2) 1) "{score2}" + # 2) "{suggestion2}" + # + # Following dictionary will be made + # corrections = { + # '{term1}': [ + # {'score': '{score1}', 'suggestion': '{suggestion1}'}, + # {'score': '{score2}', 'suggestion': '{suggestion2}'} + # ] + # } + corrections[_correction[1]] = [ + {"score": _item[0], "suggestion": _item[1]} for _item in _correction[2] + ] + + return corrections + + def _parse_config_get(self, res, **kwargs): + return {kvs[0]: kvs[1] for kvs in res} if res else {} + + def _parse_syndump(self, res, **kwargs): + return {res[i]: res[i + 1] for i in range(0, len(res), 2)} + + def batch_indexer(self, chunk_size=100): + """ + Create a new batch indexer from the client with a given chunk size + """ + return self.BatchIndexer(self, chunk_size=chunk_size) + + def create_index( + self, + fields, + no_term_offsets=False, + no_field_flags=False, + stopwords=None, + definition=None, + max_text_fields=False, + temporary=None, + no_highlight=False, + no_term_frequencies=False, + skip_initial_scan=False, + ): + """ + Create the search index. The index must not already exist. + + ### Parameters: + + - **fields**: a list of TextField or NumericField objects + - **no_term_offsets**: If true, we will not save term offsets in + the index + - **no_field_flags**: If true, we will not save field flags that + allow searching in specific fields + - **stopwords**: If not None, we create the index with this custom + stopword list. The list can be empty + - **max_text_fields**: If true, we will encode indexes as if there + were more than 32 text fields which allows you to add additional + fields (beyond 32). + - **temporary**: Create a lightweight temporary index which will + expire after the specified period of inactivity (in seconds). The + internal idle timer is reset whenever the index is searched or added to. + - **no_highlight**: If true, disabling highlighting support. + Also implied by no_term_offsets. + - **no_term_frequencies**: If true, we avoid saving the term frequencies + in the index. + - **skip_initial_scan**: If true, we do not scan and index. + + For more information see `FT.CREATE `_. + """ # noqa + + args = [CREATE_CMD, self.index_name] + if definition is not None: + args += definition.args + if max_text_fields: + args.append(MAXTEXTFIELDS) + if temporary is not None and isinstance(temporary, int): + args.append(TEMPORARY) + args.append(temporary) + if no_term_offsets: + args.append(NOOFFSETS) + if no_highlight: + args.append(NOHL) + if no_field_flags: + args.append(NOFIELDS) + if no_term_frequencies: + args.append(NOFREQS) + if skip_initial_scan: + args.append(SKIPINITIALSCAN) + if stopwords is not None and isinstance(stopwords, (list, tuple, set)): + args += [STOPWORDS, len(stopwords)] + if len(stopwords) > 0: + args += list(stopwords) + + args.append("SCHEMA") + try: + args += list(itertools.chain(*(f.redis_args() for f in fields))) + except TypeError: + args += fields.redis_args() + + return self.execute_command(*args) + + def alter_schema_add(self, fields: List[str]): + """ + Alter the existing search index by adding new fields. The index + must already exist. + + ### Parameters: + + - **fields**: a list of Field objects to add for the index + + For more information see `FT.ALTER `_. + """ # noqa + + args = [ALTER_CMD, self.index_name, "SCHEMA", "ADD"] + try: + args += list(itertools.chain(*(f.redis_args() for f in fields))) + except TypeError: + args += fields.redis_args() + + return self.execute_command(*args) + + def dropindex(self, delete_documents: bool = False): + """ + Drop the index if it exists. + Replaced `drop_index` in RediSearch 2.0. + Default behavior was changed to not delete the indexed documents. + + ### Parameters: + + - **delete_documents**: If `True`, all documents will be deleted. + + For more information see `FT.DROPINDEX `_. + """ # noqa + delete_str = "DD" if delete_documents else "" + return self.execute_command(DROPINDEX_CMD, self.index_name, delete_str) + + def _add_document( + self, + doc_id, + conn=None, + nosave=False, + score=1.0, + payload=None, + replace=False, + partial=False, + language=None, + no_create=False, + **fields, + ): + """ + Internal add_document used for both batch and single doc indexing + """ + + if partial or no_create: + replace = True + + args = [ADD_CMD, self.index_name, doc_id, score] + if nosave: + args.append("NOSAVE") + if payload is not None: + args.append("PAYLOAD") + args.append(payload) + if replace: + args.append("REPLACE") + if partial: + args.append("PARTIAL") + if no_create: + args.append("NOCREATE") + if language: + args += ["LANGUAGE", language] + args.append("FIELDS") + args += list(itertools.chain(*fields.items())) + + if conn is not None: + return conn.execute_command(*args) + + return self.execute_command(*args) + + def _add_document_hash( + self, doc_id, conn=None, score=1.0, language=None, replace=False + ): + """ + Internal add_document_hash used for both batch and single doc indexing + """ + + args = [ADDHASH_CMD, self.index_name, doc_id, score] + + if replace: + args.append("REPLACE") + + if language: + args += ["LANGUAGE", language] + + if conn is not None: + return conn.execute_command(*args) + + return self.execute_command(*args) + + @deprecated_function( + version="2.0.0", reason="deprecated since redisearch 2.0, call hset instead" + ) + def add_document( + self, + doc_id: str, + nosave: bool = False, + score: float = 1.0, + payload: bool = None, + replace: bool = False, + partial: bool = False, + language: Optional[str] = None, + no_create: str = False, + **fields: List[str], + ): + """ + Add a single document to the index. + + ### Parameters + + - **doc_id**: the id of the saved document. + - **nosave**: if set to true, we just index the document, and don't + save a copy of it. This means that searches will just + return ids. + - **score**: the document ranking, between 0.0 and 1.0 + - **payload**: optional inner-index payload we can save for fast + i access in scoring functions + - **replace**: if True, and the document already is in the index, + we perform an update and reindex the document + - **partial**: if True, the fields specified will be added to the + existing document. + This has the added benefit that any fields specified + with `no_index` + will not be reindexed again. Implies `replace` + - **language**: Specify the language used for document tokenization. + - **no_create**: if True, the document is only updated and reindexed + if it already exists. + If the document does not exist, an error will be + returned. Implies `replace` + - **fields** kwargs dictionary of the document fields to be saved + and/or indexed. + NOTE: Geo points shoule be encoded as strings of "lon,lat" + """ # noqa + return self._add_document( + doc_id, + conn=None, + nosave=nosave, + score=score, + payload=payload, + replace=replace, + partial=partial, + language=language, + no_create=no_create, + **fields, + ) + + @deprecated_function( + version="2.0.0", reason="deprecated since redisearch 2.0, call hset instead" + ) + def add_document_hash(self, doc_id, score=1.0, language=None, replace=False): + """ + Add a hash document to the index. + + ### Parameters + + - **doc_id**: the document's id. This has to be an existing HASH key + in Redis that will hold the fields the index needs. + - **score**: the document ranking, between 0.0 and 1.0 + - **replace**: if True, and the document already is in the index, we + perform an update and reindex the document + - **language**: Specify the language used for document tokenization. + """ # noqa + return self._add_document_hash( + doc_id, conn=None, score=score, language=language, replace=replace + ) + + def delete_document(self, doc_id, conn=None, delete_actual_document=False): + """ + Delete a document from index + Returns 1 if the document was deleted, 0 if not + + ### Parameters + + - **delete_actual_document**: if set to True, RediSearch also delete + the actual document if it is in the index + """ # noqa + args = [DEL_CMD, self.index_name, doc_id] + if delete_actual_document: + args.append("DD") + + if conn is not None: + return conn.execute_command(*args) + + return self.execute_command(*args) + + def load_document(self, id): + """ + Load a single document by id + """ + fields = self.client.hgetall(id) + f2 = {to_string(k): to_string(v) for k, v in fields.items()} + fields = f2 + + try: + del fields["id"] + except KeyError: + pass + + return Document(id=id, **fields) + + def get(self, *ids): + """ + Returns the full contents of multiple documents. + + ### Parameters + + - **ids**: the ids of the saved documents. + + """ + + return self.execute_command(MGET_CMD, self.index_name, *ids) + + def info(self): + """ + Get info an stats about the the current index, including the number of + documents, memory consumption, etc + + For more information see `FT.INFO `_. + """ + + res = self.execute_command(INFO_CMD, self.index_name) + return self._parse_results(INFO_CMD, res) + + def get_params_args( + self, query_params: Union[Dict[str, Union[str, int, float, bytes]], None] + ): + if query_params is None: + return [] + args = [] + if len(query_params) > 0: + args.append("params") + args.append(len(query_params) * 2) + for key, value in query_params.items(): + args.append(key) + args.append(value) + return args + + def _mk_query_args( + self, query, query_params: Union[Dict[str, Union[str, int, float, bytes]], None] + ): + args = [self.index_name] + + if isinstance(query, str): + # convert the query from a text to a query object + query = Query(query) + if not isinstance(query, Query): + raise ValueError(f"Bad query type {type(query)}") + + args += query.get_args() + args += self.get_params_args(query_params) + + return args, query + + def search( + self, + query: Union[str, Query], + query_params: Union[Dict[str, Union[str, int, float, bytes]], None] = None, + ): + """ + Search the index for a given query, and return a result of documents + + ### Parameters + + - **query**: the search query. Either a text for simple queries with + default parameters, or a Query object for complex queries. + See RediSearch's documentation on query format + + For more information see `FT.SEARCH `_. + """ # noqa + args, query = self._mk_query_args(query, query_params=query_params) + st = time.time() + res = self.execute_command(SEARCH_CMD, *args) + + if isinstance(res, Pipeline): + return res + + return self._parse_results( + SEARCH_CMD, res, query=query, duration=(time.time() - st) * 1000.0 + ) + + def explain( + self, + query: Union[str, Query], + query_params: Dict[str, Union[str, int, float]] = None, + ): + """Returns the execution plan for a complex query. + + For more information see `FT.EXPLAIN `_. + """ # noqa + args, query_text = self._mk_query_args(query, query_params=query_params) + return self.execute_command(EXPLAIN_CMD, *args) + + def explain_cli(self, query: Union[str, Query]): # noqa + raise NotImplementedError("EXPLAINCLI will not be implemented.") + + def aggregate( + self, + query: Union[str, Query], + query_params: Dict[str, Union[str, int, float]] = None, + ): + """ + Issue an aggregation query. + + ### Parameters + + **query**: This can be either an `AggregateRequest`, or a `Cursor` + + An `AggregateResult` object is returned. You can access the rows from + its `rows` property, which will always yield the rows of the result. + + For more information see `FT.AGGREGATE `_. + """ # noqa + if isinstance(query, AggregateRequest): + has_cursor = bool(query._cursor) + cmd = [AGGREGATE_CMD, self.index_name] + query.build_args() + elif isinstance(query, Cursor): + has_cursor = True + cmd = [CURSOR_CMD, "READ", self.index_name] + query.build_args() + else: + raise ValueError("Bad query", query) + cmd += self.get_params_args(query_params) + + raw = self.execute_command(*cmd) + return self._parse_results( + AGGREGATE_CMD, raw, query=query, has_cursor=has_cursor + ) + + def _get_aggregate_result( + self, raw: List, query: Union[str, Query, AggregateRequest], has_cursor: bool + ): + if has_cursor: + if isinstance(query, Cursor): + query.cid = raw[1] + cursor = query + else: + cursor = Cursor(raw[1]) + raw = raw[0] + else: + cursor = None + + if isinstance(query, AggregateRequest) and query._with_schema: + schema = raw[0] + rows = raw[2:] + else: + schema = None + rows = raw[1:] + + return AggregateResult(rows, cursor, schema) + + def profile( + self, + query: Union[str, Query, AggregateRequest], + limited: bool = False, + query_params: Optional[Dict[str, Union[str, int, float]]] = None, + ): + """ + Performs a search or aggregate command and collects performance + information. + + ### Parameters + + **query**: This can be either an `AggregateRequest`, `Query` or string. + **limited**: If set to True, removes details of reader iterator. + **query_params**: Define one or more value parameters. + Each parameter has a name and a value. + + """ + st = time.time() + cmd = [PROFILE_CMD, self.index_name, ""] + if limited: + cmd.append("LIMITED") + cmd.append("QUERY") + + if isinstance(query, AggregateRequest): + cmd[2] = "AGGREGATE" + cmd += query.build_args() + elif isinstance(query, Query): + cmd[2] = "SEARCH" + cmd += query.get_args() + cmd += self.get_params_args(query_params) + else: + raise ValueError("Must provide AggregateRequest object or Query object.") + + res = self.execute_command(*cmd) + + return self._parse_results( + PROFILE_CMD, res, query=query, duration=(time.time() - st) * 1000.0 + ) + + def spellcheck(self, query, distance=None, include=None, exclude=None): + """ + Issue a spellcheck query + + ### Parameters + + **query**: search query. + **distance***: the maximal Levenshtein distance for spelling + suggestions (default: 1, max: 4). + **include**: specifies an inclusion custom dictionary. + **exclude**: specifies an exclusion custom dictionary. + + For more information see `FT.SPELLCHECK `_. + """ # noqa + cmd = [SPELLCHECK_CMD, self.index_name, query] + if distance: + cmd.extend(["DISTANCE", distance]) + + if include: + cmd.extend(["TERMS", "INCLUDE", include]) + + if exclude: + cmd.extend(["TERMS", "EXCLUDE", exclude]) + + res = self.execute_command(*cmd) + + return self._parse_results(SPELLCHECK_CMD, res) + + def dict_add(self, name: str, *terms: List[str]): + """Adds terms to a dictionary. + + ### Parameters + + - **name**: Dictionary name. + - **terms**: List of items for adding to the dictionary. + + For more information see `FT.DICTADD `_. + """ # noqa + cmd = [DICT_ADD_CMD, name] + cmd.extend(terms) + return self.execute_command(*cmd) + + def dict_del(self, name: str, *terms: List[str]): + """Deletes terms from a dictionary. + + ### Parameters + + - **name**: Dictionary name. + - **terms**: List of items for removing from the dictionary. + + For more information see `FT.DICTDEL `_. + """ # noqa + cmd = [DICT_DEL_CMD, name] + cmd.extend(terms) + return self.execute_command(*cmd) + + def dict_dump(self, name: str): + """Dumps all terms in the given dictionary. + + ### Parameters + + - **name**: Dictionary name. + + For more information see `FT.DICTDUMP `_. + """ # noqa + cmd = [DICT_DUMP_CMD, name] + return self.execute_command(*cmd) + + def config_set(self, option: str, value: str) -> bool: + """Set runtime configuration option. + + ### Parameters + + - **option**: the name of the configuration option. + - **value**: a value for the configuration option. + + For more information see `FT.CONFIG SET `_. + """ # noqa + cmd = [CONFIG_CMD, "SET", option, value] + raw = self.execute_command(*cmd) + return raw == "OK" + + def config_get(self, option: str) -> str: + """Get runtime configuration option value. + + ### Parameters + + - **option**: the name of the configuration option. + + For more information see `FT.CONFIG GET `_. + """ # noqa + cmd = [CONFIG_CMD, "GET", option] + res = self.execute_command(*cmd) + return self._parse_results(CONFIG_CMD, res) + + def tagvals(self, tagfield: str): + """ + Return a list of all possible tag values + + ### Parameters + + - **tagfield**: Tag field name + + For more information see `FT.TAGVALS `_. + """ # noqa + + return self.execute_command(TAGVALS_CMD, self.index_name, tagfield) + + def aliasadd(self, alias: str): + """ + Alias a search index - will fail if alias already exists + + ### Parameters + + - **alias**: Name of the alias to create + + For more information see `FT.ALIASADD `_. + """ # noqa + + return self.execute_command(ALIAS_ADD_CMD, alias, self.index_name) + + def aliasupdate(self, alias: str): + """ + Updates an alias - will fail if alias does not already exist + + ### Parameters + + - **alias**: Name of the alias to create + + For more information see `FT.ALIASUPDATE `_. + """ # noqa + + return self.execute_command(ALIAS_UPDATE_CMD, alias, self.index_name) + + def aliasdel(self, alias: str): + """ + Removes an alias to a search index + + ### Parameters + + - **alias**: Name of the alias to delete + + For more information see `FT.ALIASDEL `_. + """ # noqa + return self.execute_command(ALIAS_DEL_CMD, alias) + + def sugadd(self, key, *suggestions, **kwargs): + """ + Add suggestion terms to the AutoCompleter engine. Each suggestion has + a score and string. + If kwargs["increment"] is true and the terms are already in the + server's dictionary, we increment their scores. + + For more information see `FT.SUGADD `_. + """ # noqa + # If Transaction is not False it will MULTI/EXEC which will error + pipe = self.pipeline(transaction=False) + for sug in suggestions: + args = [SUGADD_COMMAND, key, sug.string, sug.score] + if kwargs.get("increment"): + args.append("INCR") + if sug.payload: + args.append("PAYLOAD") + args.append(sug.payload) + + pipe.execute_command(*args) + + return pipe.execute()[-1] + + def suglen(self, key: str) -> int: + """ + Return the number of entries in the AutoCompleter index. + + For more information see `FT.SUGLEN `_. + """ # noqa + return self.execute_command(SUGLEN_COMMAND, key) + + def sugdel(self, key: str, string: str) -> int: + """ + Delete a string from the AutoCompleter index. + Returns 1 if the string was found and deleted, 0 otherwise. + + For more information see `FT.SUGDEL `_. + """ # noqa + return self.execute_command(SUGDEL_COMMAND, key, string) + + def sugget( + self, + key: str, + prefix: str, + fuzzy: bool = False, + num: int = 10, + with_scores: bool = False, + with_payloads: bool = False, + ) -> List[SuggestionParser]: + """ + Get a list of suggestions from the AutoCompleter, for a given prefix. + + Parameters: + + prefix : str + The prefix we are searching. **Must be valid ascii or utf-8** + fuzzy : bool + If set to true, the prefix search is done in fuzzy mode. + **NOTE**: Running fuzzy searches on short (<3 letters) prefixes + can be very + slow, and even scan the entire index. + with_scores : bool + If set to true, we also return the (refactored) score of + each suggestion. + This is normally not needed, and is NOT the original score + inserted into the index. + with_payloads : bool + Return suggestion payloads + num : int + The maximum number of results we return. Note that we might + return less. The algorithm trims irrelevant suggestions. + + Returns: + + list: + A list of Suggestion objects. If with_scores was False, the + score of all suggestions is 1. + + For more information see `FT.SUGGET `_. + """ # noqa + args = [SUGGET_COMMAND, key, prefix, "MAX", num] + if fuzzy: + args.append(FUZZY) + if with_scores: + args.append(WITHSCORES) + if with_payloads: + args.append(WITHPAYLOADS) + + res = self.execute_command(*args) + results = [] + if not res: + return results + + parser = SuggestionParser(with_scores, with_payloads, res) + return [s for s in parser] + + def synupdate(self, groupid: str, skipinitial: bool = False, *terms: List[str]): + """ + Updates a synonym group. + The command is used to create or update a synonym group with + additional terms. + Only documents which were indexed after the update will be affected. + + Parameters: + + groupid : + Synonym group id. + skipinitial : bool + If set to true, we do not scan and index. + terms : + The terms. + + For more information see `FT.SYNUPDATE `_. + """ # noqa + cmd = [SYNUPDATE_CMD, self.index_name, groupid] + if skipinitial: + cmd.extend(["SKIPINITIALSCAN"]) + cmd.extend(terms) + return self.execute_command(*cmd) + + def syndump(self): + """ + Dumps the contents of a synonym group. + + The command is used to dump the synonyms data structure. + Returns a list of synonym terms and their synonym group ids. + + For more information see `FT.SYNDUMP `_. + """ # noqa + res = self.execute_command(SYNDUMP_CMD, self.index_name) + return self._parse_results(SYNDUMP_CMD, res) + + +class AsyncSearchCommands(SearchCommands): + async def info(self): + """ + Get info an stats about the the current index, including the number of + documents, memory consumption, etc + + For more information see `FT.INFO `_. + """ + + res = await self.execute_command(INFO_CMD, self.index_name) + return self._parse_results(INFO_CMD, res) + + async def search( + self, + query: Union[str, Query], + query_params: Dict[str, Union[str, int, float]] = None, + ): + """ + Search the index for a given query, and return a result of documents + + ### Parameters + + - **query**: the search query. Either a text for simple queries with + default parameters, or a Query object for complex queries. + See RediSearch's documentation on query format + + For more information see `FT.SEARCH `_. + """ # noqa + args, query = self._mk_query_args(query, query_params=query_params) + st = time.time() + res = await self.execute_command(SEARCH_CMD, *args) + + if isinstance(res, Pipeline): + return res + + return self._parse_results( + SEARCH_CMD, res, query=query, duration=(time.time() - st) * 1000.0 + ) + + async def aggregate( + self, + query: Union[str, Query], + query_params: Dict[str, Union[str, int, float]] = None, + ): + """ + Issue an aggregation query. + + ### Parameters + + **query**: This can be either an `AggregateRequest`, or a `Cursor` + + An `AggregateResult` object is returned. You can access the rows from + its `rows` property, which will always yield the rows of the result. + + For more information see `FT.AGGREGATE `_. + """ # noqa + if isinstance(query, AggregateRequest): + has_cursor = bool(query._cursor) + cmd = [AGGREGATE_CMD, self.index_name] + query.build_args() + elif isinstance(query, Cursor): + has_cursor = True + cmd = [CURSOR_CMD, "READ", self.index_name] + query.build_args() + else: + raise ValueError("Bad query", query) + cmd += self.get_params_args(query_params) + + raw = await self.execute_command(*cmd) + return self._parse_results( + AGGREGATE_CMD, raw, query=query, has_cursor=has_cursor + ) + + async def spellcheck(self, query, distance=None, include=None, exclude=None): + """ + Issue a spellcheck query + + ### Parameters + + **query**: search query. + **distance***: the maximal Levenshtein distance for spelling + suggestions (default: 1, max: 4). + **include**: specifies an inclusion custom dictionary. + **exclude**: specifies an exclusion custom dictionary. + + For more information see `FT.SPELLCHECK `_. + """ # noqa + cmd = [SPELLCHECK_CMD, self.index_name, query] + if distance: + cmd.extend(["DISTANCE", distance]) + + if include: + cmd.extend(["TERMS", "INCLUDE", include]) + + if exclude: + cmd.extend(["TERMS", "EXCLUDE", exclude]) + + res = await self.execute_command(*cmd) + + return self._parse_results(SPELLCHECK_CMD, res) + + async def config_set(self, option: str, value: str) -> bool: + """Set runtime configuration option. + + ### Parameters + + - **option**: the name of the configuration option. + - **value**: a value for the configuration option. + + For more information see `FT.CONFIG SET `_. + """ # noqa + cmd = [CONFIG_CMD, "SET", option, value] + raw = await self.execute_command(*cmd) + return raw == "OK" + + async def config_get(self, option: str) -> str: + """Get runtime configuration option value. + + ### Parameters + + - **option**: the name of the configuration option. + + For more information see `FT.CONFIG GET `_. + """ # noqa + cmd = [CONFIG_CMD, "GET", option] + res = {} + res = await self.execute_command(*cmd) + return self._parse_results(CONFIG_CMD, res) + + async def load_document(self, id): + """ + Load a single document by id + """ + fields = await self.client.hgetall(id) + f2 = {to_string(k): to_string(v) for k, v in fields.items()} + fields = f2 + + try: + del fields["id"] + except KeyError: + pass + + return Document(id=id, **fields) + + async def sugadd(self, key, *suggestions, **kwargs): + """ + Add suggestion terms to the AutoCompleter engine. Each suggestion has + a score and string. + If kwargs["increment"] is true and the terms are already in the + server's dictionary, we increment their scores. + + For more information see `FT.SUGADD `_. + """ # noqa + # If Transaction is not False it will MULTI/EXEC which will error + pipe = self.pipeline(transaction=False) + for sug in suggestions: + args = [SUGADD_COMMAND, key, sug.string, sug.score] + if kwargs.get("increment"): + args.append("INCR") + if sug.payload: + args.append("PAYLOAD") + args.append(sug.payload) + + pipe.execute_command(*args) + + return (await pipe.execute())[-1] + + async def sugget( + self, + key: str, + prefix: str, + fuzzy: bool = False, + num: int = 10, + with_scores: bool = False, + with_payloads: bool = False, + ) -> List[SuggestionParser]: + """ + Get a list of suggestions from the AutoCompleter, for a given prefix. + + Parameters: + + prefix : str + The prefix we are searching. **Must be valid ascii or utf-8** + fuzzy : bool + If set to true, the prefix search is done in fuzzy mode. + **NOTE**: Running fuzzy searches on short (<3 letters) prefixes + can be very + slow, and even scan the entire index. + with_scores : bool + If set to true, we also return the (refactored) score of + each suggestion. + This is normally not needed, and is NOT the original score + inserted into the index. + with_payloads : bool + Return suggestion payloads + num : int + The maximum number of results we return. Note that we might + return less. The algorithm trims irrelevant suggestions. + + Returns: + + list: + A list of Suggestion objects. If with_scores was False, the + score of all suggestions is 1. + + For more information see `FT.SUGGET `_. + """ # noqa + args = [SUGGET_COMMAND, key, prefix, "MAX", num] + if fuzzy: + args.append(FUZZY) + if with_scores: + args.append(WITHSCORES) + if with_payloads: + args.append(WITHPAYLOADS) + + ret = await self.execute_command(*args) + results = [] + if not ret: + return results + + parser = SuggestionParser(with_scores, with_payloads, ret) + return [s for s in parser] diff --git a/templates/skills/spotify/dependencies/redis/commands/search/document.py b/templates/skills/spotify/dependencies/redis/commands/search/document.py new file mode 100644 index 00000000..47534ec2 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/document.py @@ -0,0 +1,17 @@ +class Document: + """ + Represents a single document in a result set + """ + + def __init__(self, id, payload=None, **fields): + self.id = id + self.payload = payload + for k, v in fields.items(): + setattr(self, k, v) + + def __repr__(self): + return f"Document {self.__dict__}" + + def __getitem__(self, item): + value = getattr(self, item) + return value diff --git a/templates/skills/spotify/dependencies/redis/commands/search/field.py b/templates/skills/spotify/dependencies/redis/commands/search/field.py new file mode 100644 index 00000000..f316ed9f --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/field.py @@ -0,0 +1,184 @@ +from typing import List + +from redis import DataError + + +class Field: + NUMERIC = "NUMERIC" + TEXT = "TEXT" + WEIGHT = "WEIGHT" + GEO = "GEO" + TAG = "TAG" + VECTOR = "VECTOR" + SORTABLE = "SORTABLE" + NOINDEX = "NOINDEX" + AS = "AS" + GEOSHAPE = "GEOSHAPE" + + def __init__( + self, + name: str, + args: List[str] = None, + sortable: bool = False, + no_index: bool = False, + as_name: str = None, + ): + if args is None: + args = [] + self.name = name + self.args = args + self.args_suffix = list() + self.as_name = as_name + + if sortable: + self.args_suffix.append(Field.SORTABLE) + if no_index: + self.args_suffix.append(Field.NOINDEX) + + if no_index and not sortable: + raise ValueError("Non-Sortable non-Indexable fields are ignored") + + def append_arg(self, value): + self.args.append(value) + + def redis_args(self): + args = [self.name] + if self.as_name: + args += [self.AS, self.as_name] + args += self.args + args += self.args_suffix + return args + + +class TextField(Field): + """ + TextField is used to define a text field in a schema definition + """ + + NOSTEM = "NOSTEM" + PHONETIC = "PHONETIC" + + def __init__( + self, + name: str, + weight: float = 1.0, + no_stem: bool = False, + phonetic_matcher: str = None, + withsuffixtrie: bool = False, + **kwargs, + ): + Field.__init__(self, name, args=[Field.TEXT, Field.WEIGHT, weight], **kwargs) + + if no_stem: + Field.append_arg(self, self.NOSTEM) + if phonetic_matcher and phonetic_matcher in [ + "dm:en", + "dm:fr", + "dm:pt", + "dm:es", + ]: + Field.append_arg(self, self.PHONETIC) + Field.append_arg(self, phonetic_matcher) + if withsuffixtrie: + Field.append_arg(self, "WITHSUFFIXTRIE") + + +class NumericField(Field): + """ + NumericField is used to define a numeric field in a schema definition + """ + + def __init__(self, name: str, **kwargs): + Field.__init__(self, name, args=[Field.NUMERIC], **kwargs) + + +class GeoShapeField(Field): + """ + GeoShapeField is used to enable within/contain indexing/searching + """ + + SPHERICAL = "SPHERICAL" + FLAT = "FLAT" + + def __init__(self, name: str, coord_system=None, **kwargs): + args = [Field.GEOSHAPE] + if coord_system: + args.append(coord_system) + Field.__init__(self, name, args=args, **kwargs) + + +class GeoField(Field): + """ + GeoField is used to define a geo-indexing field in a schema definition + """ + + def __init__(self, name: str, **kwargs): + Field.__init__(self, name, args=[Field.GEO], **kwargs) + + +class TagField(Field): + """ + TagField is a tag-indexing field with simpler compression and tokenization. + See http://redisearch.io/Tags/ + """ + + SEPARATOR = "SEPARATOR" + CASESENSITIVE = "CASESENSITIVE" + + def __init__( + self, + name: str, + separator: str = ",", + case_sensitive: bool = False, + withsuffixtrie: bool = False, + **kwargs, + ): + args = [Field.TAG, self.SEPARATOR, separator] + if case_sensitive: + args.append(self.CASESENSITIVE) + if withsuffixtrie: + args.append("WITHSUFFIXTRIE") + + Field.__init__(self, name, args=args, **kwargs) + + +class VectorField(Field): + """ + Allows vector similarity queries against the value in this attribute. + See https://oss.redis.com/redisearch/Vectors/#vector_fields. + """ + + def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs): + """ + Create Vector Field. Notice that Vector cannot have sortable or no_index tag, + although it's also a Field. + + ``name`` is the name of the field. + + ``algorithm`` can be "FLAT" or "HNSW". + + ``attributes`` each algorithm can have specific attributes. Some of them + are mandatory and some of them are optional. See + https://oss.redis.com/redisearch/master/Vectors/#specific_creation_attributes_per_algorithm + for more information. + """ + sort = kwargs.get("sortable", False) + noindex = kwargs.get("no_index", False) + + if sort or noindex: + raise DataError("Cannot set 'sortable' or 'no_index' in Vector fields.") + + if algorithm.upper() not in ["FLAT", "HNSW"]: + raise DataError( + "Realtime vector indexing supporting 2 Indexing Methods:" + "'FLAT' and 'HNSW'." + ) + + attr_li = [] + + for key, value in attributes.items(): + attr_li.extend([key, value]) + + Field.__init__( + self, name, args=[Field.VECTOR, algorithm, len(attr_li), *attr_li], **kwargs + ) diff --git a/templates/skills/spotify/dependencies/redis/commands/search/indexDefinition.py b/templates/skills/spotify/dependencies/redis/commands/search/indexDefinition.py new file mode 100644 index 00000000..a668e85b --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/indexDefinition.py @@ -0,0 +1,79 @@ +from enum import Enum + + +class IndexType(Enum): + """Enum of the currently supported index types.""" + + HASH = 1 + JSON = 2 + + +class IndexDefinition: + """IndexDefinition is used to define a index definition for automatic + indexing on Hash or Json update.""" + + def __init__( + self, + prefix=[], + filter=None, + language_field=None, + language=None, + score_field=None, + score=1.0, + payload_field=None, + index_type=None, + ): + self.args = [] + self._append_index_type(index_type) + self._append_prefix(prefix) + self._append_filter(filter) + self._append_language(language_field, language) + self._append_score(score_field, score) + self._append_payload(payload_field) + + def _append_index_type(self, index_type): + """Append `ON HASH` or `ON JSON` according to the enum.""" + if index_type is IndexType.HASH: + self.args.extend(["ON", "HASH"]) + elif index_type is IndexType.JSON: + self.args.extend(["ON", "JSON"]) + elif index_type is not None: + raise RuntimeError(f"index_type must be one of {list(IndexType)}") + + def _append_prefix(self, prefix): + """Append PREFIX.""" + if len(prefix) > 0: + self.args.append("PREFIX") + self.args.append(len(prefix)) + for p in prefix: + self.args.append(p) + + def _append_filter(self, filter): + """Append FILTER.""" + if filter is not None: + self.args.append("FILTER") + self.args.append(filter) + + def _append_language(self, language_field, language): + """Append LANGUAGE_FIELD and LANGUAGE.""" + if language_field is not None: + self.args.append("LANGUAGE_FIELD") + self.args.append(language_field) + if language is not None: + self.args.append("LANGUAGE") + self.args.append(language) + + def _append_score(self, score_field, score): + """Append SCORE_FIELD and SCORE.""" + if score_field is not None: + self.args.append("SCORE_FIELD") + self.args.append(score_field) + if score is not None: + self.args.append("SCORE") + self.args.append(score) + + def _append_payload(self, payload_field): + """Append PAYLOAD_FIELD.""" + if payload_field is not None: + self.args.append("PAYLOAD_FIELD") + self.args.append(payload_field) diff --git a/templates/skills/spotify/dependencies/redis/commands/search/query.py b/templates/skills/spotify/dependencies/redis/commands/search/query.py new file mode 100644 index 00000000..113ddf9d --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/query.py @@ -0,0 +1,362 @@ +from typing import List, Optional, Union + + +class Query: + """ + Query is used to build complex queries that have more parameters than just + the query string. The query string is set in the constructor, and other + options have setter functions. + + The setter functions return the query object, so they can be chained, + i.e. `Query("foo").verbatim().filter(...)` etc. + """ + + def __init__(self, query_string: str) -> None: + """ + Create a new query object. + The query string is set in the constructor, and other options have + setter functions. + """ + + self._query_string: str = query_string + self._offset: int = 0 + self._num: int = 10 + self._no_content: bool = False + self._no_stopwords: bool = False + self._fields: Optional[List[str]] = None + self._verbatim: bool = False + self._with_payloads: bool = False + self._with_scores: bool = False + self._scorer: Optional[str] = None + self._filters: List = list() + self._ids: Optional[List[str]] = None + self._slop: int = -1 + self._timeout: Optional[float] = None + self._in_order: bool = False + self._sortby: Optional[SortbyField] = None + self._return_fields: List = [] + self._summarize_fields: List = [] + self._highlight_fields: List = [] + self._language: Optional[str] = None + self._expander: Optional[str] = None + self._dialect: Optional[int] = None + + def query_string(self) -> str: + """Return the query string of this query only.""" + return self._query_string + + def limit_ids(self, *ids) -> "Query": + """Limit the results to a specific set of pre-known document + ids of any length.""" + self._ids = ids + return self + + def return_fields(self, *fields) -> "Query": + """Add fields to return fields.""" + self._return_fields += fields + return self + + def return_field(self, field: str, as_field: Optional[str] = None) -> "Query": + """Add field to return fields (Optional: add 'AS' name + to the field).""" + self._return_fields.append(field) + if as_field is not None: + self._return_fields += ("AS", as_field) + return self + + def _mk_field_list(self, fields: List[str]) -> List: + if not fields: + return [] + return [fields] if isinstance(fields, str) else list(fields) + + def summarize( + self, + fields: Optional[List] = None, + context_len: Optional[int] = None, + num_frags: Optional[int] = None, + sep: Optional[str] = None, + ) -> "Query": + """ + Return an abridged format of the field, containing only the segments of + the field which contain the matching term(s). + + If `fields` is specified, then only the mentioned fields are + summarized; otherwise all results are summarized. + + Server side defaults are used for each option (except `fields`) + if not specified + + - **fields** List of fields to summarize. All fields are summarized + if not specified + - **context_len** Amount of context to include with each fragment + - **num_frags** Number of fragments per document + - **sep** Separator string to separate fragments + """ + args = ["SUMMARIZE"] + fields = self._mk_field_list(fields) + if fields: + args += ["FIELDS", str(len(fields))] + fields + + if context_len is not None: + args += ["LEN", str(context_len)] + if num_frags is not None: + args += ["FRAGS", str(num_frags)] + if sep is not None: + args += ["SEPARATOR", sep] + + self._summarize_fields = args + return self + + def highlight( + self, fields: Optional[List[str]] = None, tags: Optional[List[str]] = None + ) -> None: + """ + Apply specified markup to matched term(s) within the returned field(s). + + - **fields** If specified then only those mentioned fields are + highlighted, otherwise all fields are highlighted + - **tags** A list of two strings to surround the match. + """ + args = ["HIGHLIGHT"] + fields = self._mk_field_list(fields) + if fields: + args += ["FIELDS", str(len(fields))] + fields + if tags: + args += ["TAGS"] + list(tags) + + self._highlight_fields = args + return self + + def language(self, language: str) -> "Query": + """ + Analyze the query as being in the specified language. + + :param language: The language (e.g. `chinese` or `english`) + """ + self._language = language + return self + + def slop(self, slop: int) -> "Query": + """Allow a maximum of N intervening non matched terms between + phrase terms (0 means exact phrase). + """ + self._slop = slop + return self + + def timeout(self, timeout: float) -> "Query": + """overrides the timeout parameter of the module""" + self._timeout = timeout + return self + + def in_order(self) -> "Query": + """ + Match only documents where the query terms appear in + the same order in the document. + i.e. for the query "hello world", we do not match "world hello" + """ + self._in_order = True + return self + + def scorer(self, scorer: str) -> "Query": + """ + Use a different scoring function to evaluate document relevance. + Default is `TFIDF`. + + :param scorer: The scoring function to use + (e.g. `TFIDF.DOCNORM` or `BM25`) + """ + self._scorer = scorer + return self + + def get_args(self) -> List[str]: + """Format the redis arguments for this query and return them.""" + args = [self._query_string] + args += self._get_args_tags() + args += self._summarize_fields + self._highlight_fields + args += ["LIMIT", self._offset, self._num] + return args + + def _get_args_tags(self) -> List[str]: + args = [] + if self._no_content: + args.append("NOCONTENT") + if self._fields: + args.append("INFIELDS") + args.append(len(self._fields)) + args += self._fields + if self._verbatim: + args.append("VERBATIM") + if self._no_stopwords: + args.append("NOSTOPWORDS") + if self._filters: + for flt in self._filters: + if not isinstance(flt, Filter): + raise AttributeError("Did not receive a Filter object.") + args += flt.args + if self._with_payloads: + args.append("WITHPAYLOADS") + if self._scorer: + args += ["SCORER", self._scorer] + if self._with_scores: + args.append("WITHSCORES") + if self._ids: + args.append("INKEYS") + args.append(len(self._ids)) + args += self._ids + if self._slop >= 0: + args += ["SLOP", self._slop] + if self._timeout is not None: + args += ["TIMEOUT", self._timeout] + if self._in_order: + args.append("INORDER") + if self._return_fields: + args.append("RETURN") + args.append(len(self._return_fields)) + args += self._return_fields + if self._sortby: + if not isinstance(self._sortby, SortbyField): + raise AttributeError("Did not receive a SortByField.") + args.append("SORTBY") + args += self._sortby.args + if self._language: + args += ["LANGUAGE", self._language] + if self._expander: + args += ["EXPANDER", self._expander] + if self._dialect: + args += ["DIALECT", self._dialect] + + return args + + def paging(self, offset: int, num: int) -> "Query": + """ + Set the paging for the query (defaults to 0..10). + + - **offset**: Paging offset for the results. Defaults to 0 + - **num**: How many results do we want + """ + self._offset = offset + self._num = num + return self + + def verbatim(self) -> "Query": + """Set the query to be verbatim, i.e. use no query expansion + or stemming. + """ + self._verbatim = True + return self + + def no_content(self) -> "Query": + """Set the query to only return ids and not the document content.""" + self._no_content = True + return self + + def no_stopwords(self) -> "Query": + """ + Prevent the query from being filtered for stopwords. + Only useful in very big queries that you are certain contain + no stopwords. + """ + self._no_stopwords = True + return self + + def with_payloads(self) -> "Query": + """Ask the engine to return document payloads.""" + self._with_payloads = True + return self + + def with_scores(self) -> "Query": + """Ask the engine to return document search scores.""" + self._with_scores = True + return self + + def limit_fields(self, *fields: List[str]) -> "Query": + """ + Limit the search to specific TEXT fields only. + + - **fields**: A list of strings, case sensitive field names + from the defined schema. + """ + self._fields = fields + return self + + def add_filter(self, flt: "Filter") -> "Query": + """ + Add a numeric or geo filter to the query. + **Currently only one of each filter is supported by the engine** + + - **flt**: A NumericFilter or GeoFilter object, used on a + corresponding field + """ + + self._filters.append(flt) + return self + + def sort_by(self, field: str, asc: bool = True) -> "Query": + """ + Add a sortby field to the query. + + - **field** - the name of the field to sort by + - **asc** - when `True`, sorting will be done in asceding order + """ + self._sortby = SortbyField(field, asc) + return self + + def expander(self, expander: str) -> "Query": + """ + Add a expander field to the query. + + - **expander** - the name of the expander + """ + self._expander = expander + return self + + def dialect(self, dialect: int) -> "Query": + """ + Add a dialect field to the query. + + - **dialect** - dialect version to execute the query under + """ + self._dialect = dialect + return self + + +class Filter: + def __init__(self, keyword: str, field: str, *args: List[str]) -> None: + self.args = [keyword, field] + list(args) + + +class NumericFilter(Filter): + INF = "+inf" + NEG_INF = "-inf" + + def __init__( + self, + field: str, + minval: Union[int, str], + maxval: Union[int, str], + minExclusive: bool = False, + maxExclusive: bool = False, + ) -> None: + args = [ + minval if not minExclusive else f"({minval}", + maxval if not maxExclusive else f"({maxval}", + ] + + Filter.__init__(self, "FILTER", field, *args) + + +class GeoFilter(Filter): + METERS = "m" + KILOMETERS = "km" + FEET = "ft" + MILES = "mi" + + def __init__( + self, field: str, lon: float, lat: float, radius: float, unit: str = KILOMETERS + ) -> None: + Filter.__init__(self, "GEOFILTER", field, lon, lat, radius, unit) + + +class SortbyField: + def __init__(self, field: str, asc=True) -> None: + self.args = [field, "ASC" if asc else "DESC"] diff --git a/templates/skills/spotify/dependencies/redis/commands/search/querystring.py b/templates/skills/spotify/dependencies/redis/commands/search/querystring.py new file mode 100644 index 00000000..3ff13209 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/querystring.py @@ -0,0 +1,317 @@ +def tags(*t): + """ + Indicate that the values should be matched to a tag field + + ### Parameters + + - **t**: Tags to search for + """ + if not t: + raise ValueError("At least one tag must be specified") + return TagValue(*t) + + +def between(a, b, inclusive_min=True, inclusive_max=True): + """ + Indicate that value is a numeric range + """ + return RangeValue(a, b, inclusive_min=inclusive_min, inclusive_max=inclusive_max) + + +def equal(n): + """ + Match a numeric value + """ + return between(n, n) + + +def lt(n): + """ + Match any value less than n + """ + return between(None, n, inclusive_max=False) + + +def le(n): + """ + Match any value less or equal to n + """ + return between(None, n, inclusive_max=True) + + +def gt(n): + """ + Match any value greater than n + """ + return between(n, None, inclusive_min=False) + + +def ge(n): + """ + Match any value greater or equal to n + """ + return between(n, None, inclusive_min=True) + + +def geo(lat, lon, radius, unit="km"): + """ + Indicate that value is a geo region + """ + return GeoValue(lat, lon, radius, unit) + + +class Value: + @property + def combinable(self): + """ + Whether this type of value may be combined with other values + for the same field. This makes the filter potentially more efficient + """ + return False + + @staticmethod + def make_value(v): + """ + Convert an object to a value, if it is not a value already + """ + if isinstance(v, Value): + return v + return ScalarValue(v) + + def to_string(self): + raise NotImplementedError() + + def __str__(self): + return self.to_string() + + +class RangeValue(Value): + combinable = False + + def __init__(self, a, b, inclusive_min=False, inclusive_max=False): + if a is None: + a = "-inf" + if b is None: + b = "inf" + self.range = [str(a), str(b)] + self.inclusive_min = inclusive_min + self.inclusive_max = inclusive_max + + def to_string(self): + return "[{1}{0[0]} {2}{0[1]}]".format( + self.range, + "(" if not self.inclusive_min else "", + "(" if not self.inclusive_max else "", + ) + + +class ScalarValue(Value): + combinable = True + + def __init__(self, v): + self.v = str(v) + + def to_string(self): + return self.v + + +class TagValue(Value): + combinable = False + + def __init__(self, *tags): + self.tags = tags + + def to_string(self): + return "{" + " | ".join(str(t) for t in self.tags) + "}" + + +class GeoValue(Value): + def __init__(self, lon, lat, radius, unit="km"): + self.lon = lon + self.lat = lat + self.radius = radius + self.unit = unit + + def to_string(self): + return f"[{self.lon} {self.lat} {self.radius} {self.unit}]" + + +class Node: + def __init__(self, *children, **kwparams): + """ + Create a node + + ### Parameters + + - **children**: One or more sub-conditions. These can be additional + `intersect`, `disjunct`, `union`, `optional`, or any other `Node` + type. + + The semantics of multiple conditions are dependent on the type of + query. For an `intersection` node, this amounts to a logical AND, + for a `union` node, this amounts to a logical `OR`. + + - **kwparams**: key-value parameters. Each key is the name of a field, + and the value should be a field value. This can be one of the + following: + + - Simple string (for text field matches) + - value returned by one of the helper functions + - list of either a string or a value + + + ### Examples + + Field `num` should be between 1 and 10 + ``` + intersect(num=between(1, 10) + ``` + + Name can either be `bob` or `john` + + ``` + union(name=("bob", "john")) + ``` + + Don't select countries in Israel, Japan, or US + + ``` + disjunct_union(country=("il", "jp", "us")) + ``` + """ + + self.params = [] + + kvparams = {} + for k, v in kwparams.items(): + curvals = kvparams.setdefault(k, []) + if isinstance(v, (str, int, float)): + curvals.append(Value.make_value(v)) + elif isinstance(v, Value): + curvals.append(v) + else: + curvals.extend(Value.make_value(subv) for subv in v) + + self.params += [Node.to_node(p) for p in children] + + for k, v in kvparams.items(): + self.params.extend(self.join_fields(k, v)) + + def join_fields(self, key, vals): + if len(vals) == 1: + return [BaseNode(f"@{key}:{vals[0].to_string()}")] + if not vals[0].combinable: + return [BaseNode(f"@{key}:{v.to_string()}") for v in vals] + s = BaseNode(f"@{key}:({self.JOINSTR.join(v.to_string() for v in vals)})") + return [s] + + @classmethod + def to_node(cls, obj): # noqa + if isinstance(obj, Node): + return obj + return BaseNode(obj) + + @property + def JOINSTR(self): + raise NotImplementedError() + + def to_string(self, with_parens=None): + with_parens = self._should_use_paren(with_parens) + pre, post = ("(", ")") if with_parens else ("", "") + return f"{pre}{self.JOINSTR.join(n.to_string() for n in self.params)}{post}" + + def _should_use_paren(self, optval): + if optval is not None: + return optval + return len(self.params) > 1 + + def __str__(self): + return self.to_string() + + +class BaseNode(Node): + def __init__(self, s): + super().__init__() + self.s = str(s) + + def to_string(self, with_parens=None): + return self.s + + +class IntersectNode(Node): + """ + Create an intersection node. All children need to be satisfied in order for + this node to evaluate as true + """ + + JOINSTR = " " + + +class UnionNode(Node): + """ + Create a union node. Any of the children need to be satisfied in order for + this node to evaluate as true + """ + + JOINSTR = "|" + + +class DisjunctNode(IntersectNode): + """ + Create a disjunct node. In order for this node to be true, all of its + children must evaluate to false + """ + + def to_string(self, with_parens=None): + with_parens = self._should_use_paren(with_parens) + ret = super().to_string(with_parens=False) + if with_parens: + return "(-" + ret + ")" + else: + return "-" + ret + + +class DistjunctUnion(DisjunctNode): + """ + This node is true if *all* of its children are false. This is equivalent to + ``` + disjunct(union(...)) + ``` + """ + + JOINSTR = "|" + + +class OptionalNode(IntersectNode): + """ + Create an optional node. If this nodes evaluates to true, then the document + will be rated higher in score/rank. + """ + + def to_string(self, with_parens=None): + with_parens = self._should_use_paren(with_parens) + ret = super().to_string(with_parens=False) + if with_parens: + return "(~" + ret + ")" + else: + return "~" + ret + + +def intersect(*args, **kwargs): + return IntersectNode(*args, **kwargs) + + +def union(*args, **kwargs): + return UnionNode(*args, **kwargs) + + +def disjunct(*args, **kwargs): + return DisjunctNode(*args, **kwargs) + + +def disjunct_union(*args, **kwargs): + return DistjunctUnion(*args, **kwargs) + + +def querystring(*args, **kwargs): + return intersect(*args, **kwargs).to_string() diff --git a/templates/skills/spotify/dependencies/redis/commands/search/reducers.py b/templates/skills/spotify/dependencies/redis/commands/search/reducers.py new file mode 100644 index 00000000..8b60f232 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/reducers.py @@ -0,0 +1,182 @@ +from typing import Union + +from .aggregation import Asc, Desc, Reducer, SortDirection + + +class FieldOnlyReducer(Reducer): + """See https://redis.io/docs/interact/search-and-query/search/aggregations/""" + + def __init__(self, field: str) -> None: + super().__init__(field) + self._field = field + + +class count(Reducer): + """ + Counts the number of results in the group + """ + + NAME = "COUNT" + + def __init__(self) -> None: + super().__init__() + + +class sum(FieldOnlyReducer): + """ + Calculates the sum of all the values in the given fields within the group + """ + + NAME = "SUM" + + def __init__(self, field: str) -> None: + super().__init__(field) + + +class min(FieldOnlyReducer): + """ + Calculates the smallest value in the given field within the group + """ + + NAME = "MIN" + + def __init__(self, field: str) -> None: + super().__init__(field) + + +class max(FieldOnlyReducer): + """ + Calculates the largest value in the given field within the group + """ + + NAME = "MAX" + + def __init__(self, field: str) -> None: + super().__init__(field) + + +class avg(FieldOnlyReducer): + """ + Calculates the mean value in the given field within the group + """ + + NAME = "AVG" + + def __init__(self, field: str) -> None: + super().__init__(field) + + +class tolist(FieldOnlyReducer): + """ + Returns all the matched properties in a list + """ + + NAME = "TOLIST" + + def __init__(self, field: str) -> None: + super().__init__(field) + + +class count_distinct(FieldOnlyReducer): + """ + Calculate the number of distinct values contained in all the results in + the group for the given field + """ + + NAME = "COUNT_DISTINCT" + + def __init__(self, field: str) -> None: + super().__init__(field) + + +class count_distinctish(FieldOnlyReducer): + """ + Calculate the number of distinct values contained in all the results in the + group for the given field. This uses a faster algorithm than + `count_distinct` but is less accurate + """ + + NAME = "COUNT_DISTINCTISH" + + +class quantile(Reducer): + """ + Return the value for the nth percentile within the range of values for the + field within the group. + """ + + NAME = "QUANTILE" + + def __init__(self, field: str, pct: float) -> None: + super().__init__(field, str(pct)) + self._field = field + + +class stddev(FieldOnlyReducer): + """ + Return the standard deviation for the values within the group + """ + + NAME = "STDDEV" + + def __init__(self, field: str) -> None: + super().__init__(field) + + +class first_value(Reducer): + """ + Selects the first value within the group according to sorting parameters + """ + + NAME = "FIRST_VALUE" + + def __init__(self, field: str, *byfields: Union[Asc, Desc]) -> None: + """ + Selects the first value of the given field within the group. + + ### Parameter + + - **field**: Source field used for the value + - **byfields**: How to sort the results. This can be either the + *class* of `aggregation.Asc` or `aggregation.Desc` in which + case the field `field` is also used as the sort input. + + `byfields` can also be one or more *instances* of `Asc` or `Desc` + indicating the sort order for these fields + """ + + fieldstrs = [] + if ( + len(byfields) == 1 + and isinstance(byfields[0], type) + and issubclass(byfields[0], SortDirection) + ): + byfields = [byfields[0](field)] + + for f in byfields: + fieldstrs += [f.field, f.DIRSTRING] + + args = [field] + if fieldstrs: + args += ["BY"] + fieldstrs + super().__init__(*args) + self._field = field + + +class random_sample(Reducer): + """ + Returns a random sample of items from the dataset, from the given property + """ + + NAME = "RANDOM_SAMPLE" + + def __init__(self, field: str, size: int) -> None: + """ + ### Parameter + + **field**: Field to sample from + **size**: Return this many items (can be less) + """ + args = [field, str(size)] + super().__init__(*args) + self._field = field diff --git a/templates/skills/spotify/dependencies/redis/commands/search/result.py b/templates/skills/spotify/dependencies/redis/commands/search/result.py new file mode 100644 index 00000000..5b19e6fa --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/result.py @@ -0,0 +1,73 @@ +from ._util import to_string +from .document import Document + + +class Result: + """ + Represents the result of a search query, and has an array of Document + objects + """ + + def __init__( + self, res, hascontent, duration=0, has_payload=False, with_scores=False + ): + """ + - **snippets**: An optional dictionary of the form + {field: snippet_size} for snippet formatting + """ + + self.total = res[0] + self.duration = duration + self.docs = [] + + step = 1 + if hascontent: + step = step + 1 + if has_payload: + step = step + 1 + if with_scores: + step = step + 1 + + offset = 2 if with_scores else 1 + + for i in range(1, len(res), step): + id = to_string(res[i]) + payload = to_string(res[i + offset]) if has_payload else None + # fields_offset = 2 if has_payload else 1 + fields_offset = offset + 1 if has_payload else offset + score = float(res[i + 1]) if with_scores else None + + fields = {} + if hascontent and res[i + fields_offset] is not None: + fields = ( + dict( + dict( + zip( + map(to_string, res[i + fields_offset][::2]), + map(to_string, res[i + fields_offset][1::2]), + ) + ) + ) + if hascontent + else {} + ) + try: + del fields["id"] + except KeyError: + pass + + try: + fields["json"] = fields["$"] + del fields["$"] + except KeyError: + pass + + doc = ( + Document(id, score=score, payload=payload, **fields) + if with_scores + else Document(id, payload=payload, **fields) + ) + self.docs.append(doc) + + def __repr__(self) -> str: + return f"Result{{{self.total} total, docs: {self.docs}}}" diff --git a/templates/skills/spotify/dependencies/redis/commands/search/suggestion.py b/templates/skills/spotify/dependencies/redis/commands/search/suggestion.py new file mode 100644 index 00000000..499c8d91 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/search/suggestion.py @@ -0,0 +1,55 @@ +from typing import Optional + +from ._util import to_string + + +class Suggestion: + """ + Represents a single suggestion being sent or returned from the + autocomplete server + """ + + def __init__( + self, string: str, score: float = 1.0, payload: Optional[str] = None + ) -> None: + self.string = to_string(string) + self.payload = to_string(payload) + self.score = score + + def __repr__(self) -> str: + return self.string + + +class SuggestionParser: + """ + Internal class used to parse results from the `SUGGET` command. + This needs to consume either 1, 2, or 3 values at a time from + the return value depending on what objects were requested + """ + + def __init__(self, with_scores: bool, with_payloads: bool, ret) -> None: + self.with_scores = with_scores + self.with_payloads = with_payloads + + if with_scores and with_payloads: + self.sugsize = 3 + self._scoreidx = 1 + self._payloadidx = 2 + elif with_scores: + self.sugsize = 2 + self._scoreidx = 1 + elif with_payloads: + self.sugsize = 2 + self._payloadidx = 1 + else: + self.sugsize = 1 + self._scoreidx = -1 + + self._sugs = ret + + def __iter__(self): + for i in range(0, len(self._sugs), self.sugsize): + ss = self._sugs[i] + score = float(self._sugs[i + self._scoreidx]) if self.with_scores else 1.0 + payload = self._sugs[i + self._payloadidx] if self.with_payloads else None + yield Suggestion(ss, score, payload) diff --git a/templates/skills/spotify/dependencies/redis/commands/sentinel.py b/templates/skills/spotify/dependencies/redis/commands/sentinel.py new file mode 100644 index 00000000..f7457579 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/sentinel.py @@ -0,0 +1,99 @@ +import warnings + + +class SentinelCommands: + """ + A class containing the commands specific to redis sentinel. This class is + to be used as a mixin. + """ + + def sentinel(self, *args): + """Redis Sentinel's SENTINEL command.""" + warnings.warn(DeprecationWarning("Use the individual sentinel_* methods")) + + def sentinel_get_master_addr_by_name(self, service_name): + """Returns a (host, port) pair for the given ``service_name``""" + return self.execute_command("SENTINEL GET-MASTER-ADDR-BY-NAME", service_name) + + def sentinel_master(self, service_name): + """Returns a dictionary containing the specified masters state.""" + return self.execute_command("SENTINEL MASTER", service_name) + + def sentinel_masters(self): + """Returns a list of dictionaries containing each master's state.""" + return self.execute_command("SENTINEL MASTERS") + + def sentinel_monitor(self, name, ip, port, quorum): + """Add a new master to Sentinel to be monitored""" + return self.execute_command("SENTINEL MONITOR", name, ip, port, quorum) + + def sentinel_remove(self, name): + """Remove a master from Sentinel's monitoring""" + return self.execute_command("SENTINEL REMOVE", name) + + def sentinel_sentinels(self, service_name): + """Returns a list of sentinels for ``service_name``""" + return self.execute_command("SENTINEL SENTINELS", service_name) + + def sentinel_set(self, name, option, value): + """Set Sentinel monitoring parameters for a given master""" + return self.execute_command("SENTINEL SET", name, option, value) + + def sentinel_slaves(self, service_name): + """Returns a list of slaves for ``service_name``""" + return self.execute_command("SENTINEL SLAVES", service_name) + + def sentinel_reset(self, pattern): + """ + This command will reset all the masters with matching name. + The pattern argument is a glob-style pattern. + + The reset process clears any previous state in a master (including a + failover in progress), and removes every slave and sentinel already + discovered and associated with the master. + """ + return self.execute_command("SENTINEL RESET", pattern, once=True) + + def sentinel_failover(self, new_master_name): + """ + Force a failover as if the master was not reachable, and without + asking for agreement to other Sentinels (however a new version of the + configuration will be published so that the other Sentinels will + update their configurations). + """ + return self.execute_command("SENTINEL FAILOVER", new_master_name) + + def sentinel_ckquorum(self, new_master_name): + """ + Check if the current Sentinel configuration is able to reach the + quorum needed to failover a master, and the majority needed to + authorize the failover. + + This command should be used in monitoring systems to check if a + Sentinel deployment is ok. + """ + return self.execute_command("SENTINEL CKQUORUM", new_master_name, once=True) + + def sentinel_flushconfig(self): + """ + Force Sentinel to rewrite its configuration on disk, including the + current Sentinel state. + + Normally Sentinel rewrites the configuration every time something + changes in its state (in the context of the subset of the state which + is persisted on disk across restart). + However sometimes it is possible that the configuration file is lost + because of operation errors, disk failures, package upgrade scripts or + configuration managers. In those cases a way to to force Sentinel to + rewrite the configuration file is handy. + + This command works even if the previous configuration file is + completely missing. + """ + return self.execute_command("SENTINEL FLUSHCONFIG") + + +class AsyncSentinelCommands(SentinelCommands): + async def sentinel(self, *args) -> None: + """Redis Sentinel's SENTINEL command.""" + super().sentinel(*args) diff --git a/templates/skills/spotify/dependencies/redis/commands/timeseries/__init__.py b/templates/skills/spotify/dependencies/redis/commands/timeseries/__init__.py new file mode 100644 index 00000000..4188b93d --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/timeseries/__init__.py @@ -0,0 +1,108 @@ +import redis +from redis._parsers.helpers import bool_ok + +from ..helpers import get_protocol_version, parse_to_list +from .commands import ( + ALTER_CMD, + CREATE_CMD, + CREATERULE_CMD, + DEL_CMD, + DELETERULE_CMD, + GET_CMD, + INFO_CMD, + MGET_CMD, + MRANGE_CMD, + MREVRANGE_CMD, + QUERYINDEX_CMD, + RANGE_CMD, + REVRANGE_CMD, + TimeSeriesCommands, +) +from .info import TSInfo +from .utils import parse_get, parse_m_get, parse_m_range, parse_range + + +class TimeSeries(TimeSeriesCommands): + """ + This class subclasses redis-py's `Redis` and implements RedisTimeSeries's + commands (prefixed with "ts"). + The client allows to interact with RedisTimeSeries and use all of it's + functionality. + """ + + def __init__(self, client=None, **kwargs): + """Create a new RedisTimeSeries client.""" + # Set the module commands' callbacks + self._MODULE_CALLBACKS = { + ALTER_CMD: bool_ok, + CREATE_CMD: bool_ok, + CREATERULE_CMD: bool_ok, + DELETERULE_CMD: bool_ok, + } + + _RESP2_MODULE_CALLBACKS = { + DEL_CMD: int, + GET_CMD: parse_get, + INFO_CMD: TSInfo, + MGET_CMD: parse_m_get, + MRANGE_CMD: parse_m_range, + MREVRANGE_CMD: parse_m_range, + RANGE_CMD: parse_range, + REVRANGE_CMD: parse_range, + QUERYINDEX_CMD: parse_to_list, + } + _RESP3_MODULE_CALLBACKS = {} + + self.client = client + self.execute_command = client.execute_command + + if get_protocol_version(self.client) in ["3", 3]: + self._MODULE_CALLBACKS.update(_RESP3_MODULE_CALLBACKS) + else: + self._MODULE_CALLBACKS.update(_RESP2_MODULE_CALLBACKS) + + for k, v in self._MODULE_CALLBACKS.items(): + self.client.set_response_callback(k, v) + + def pipeline(self, transaction=True, shard_hint=None): + """Creates a pipeline for the TimeSeries module, that can be used + for executing only TimeSeries commands and core commands. + + Usage example: + + r = redis.Redis() + pipe = r.ts().pipeline() + for i in range(100): + pipeline.add("with_pipeline", i, 1.1 * i) + pipeline.execute() + + """ + if isinstance(self.client, redis.RedisCluster): + p = ClusterPipeline( + nodes_manager=self.client.nodes_manager, + commands_parser=self.client.commands_parser, + startup_nodes=self.client.nodes_manager.startup_nodes, + result_callbacks=self.client.result_callbacks, + cluster_response_callbacks=self.client.cluster_response_callbacks, + cluster_error_retry_attempts=self.client.cluster_error_retry_attempts, + read_from_replicas=self.client.read_from_replicas, + reinitialize_steps=self.client.reinitialize_steps, + lock=self.client._lock, + ) + + else: + p = Pipeline( + connection_pool=self.client.connection_pool, + response_callbacks=self._MODULE_CALLBACKS, + transaction=transaction, + shard_hint=shard_hint, + ) + return p + + +class ClusterPipeline(TimeSeriesCommands, redis.cluster.ClusterPipeline): + """Cluster pipeline for the module.""" + + +class Pipeline(TimeSeriesCommands, redis.client.Pipeline): + """Pipeline for the module.""" diff --git a/templates/skills/spotify/dependencies/redis/commands/timeseries/commands.py b/templates/skills/spotify/dependencies/redis/commands/timeseries/commands.py new file mode 100644 index 00000000..ad137f2d --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/timeseries/commands.py @@ -0,0 +1,905 @@ +from typing import Dict, List, Optional, Tuple, Union + +from redis.exceptions import DataError +from redis.typing import KeyT, Number + +ADD_CMD = "TS.ADD" +ALTER_CMD = "TS.ALTER" +CREATERULE_CMD = "TS.CREATERULE" +CREATE_CMD = "TS.CREATE" +DECRBY_CMD = "TS.DECRBY" +DELETERULE_CMD = "TS.DELETERULE" +DEL_CMD = "TS.DEL" +GET_CMD = "TS.GET" +INCRBY_CMD = "TS.INCRBY" +INFO_CMD = "TS.INFO" +MADD_CMD = "TS.MADD" +MGET_CMD = "TS.MGET" +MRANGE_CMD = "TS.MRANGE" +MREVRANGE_CMD = "TS.MREVRANGE" +QUERYINDEX_CMD = "TS.QUERYINDEX" +RANGE_CMD = "TS.RANGE" +REVRANGE_CMD = "TS.REVRANGE" + + +class TimeSeriesCommands: + """RedisTimeSeries Commands.""" + + def create( + self, + key: KeyT, + retention_msecs: Optional[int] = None, + uncompressed: Optional[bool] = False, + labels: Optional[Dict[str, str]] = None, + chunk_size: Optional[int] = None, + duplicate_policy: Optional[str] = None, + ): + """ + Create a new time-series. + + Args: + + key: + time-series key + retention_msecs: + Maximum age for samples compared to highest reported timestamp (in milliseconds). + If None or 0 is passed then the series is not trimmed at all. + uncompressed: + Changes data storage from compressed (by default) to uncompressed + labels: + Set of label-value pairs that represent metadata labels of the key. + chunk_size: + Memory size, in bytes, allocated for each data chunk. + Must be a multiple of 8 in the range [128 .. 1048576]. + duplicate_policy: + Policy for handling multiple samples with identical timestamps. + Can be one of: + - 'block': an error will occur for any out of order sample. + - 'first': ignore the new value. + - 'last': override with latest value. + - 'min': only override if the value is lower than the existing value. + - 'max': only override if the value is higher than the existing value. + - 'sum': If a previous sample exists, add the new sample to it so that \ + the updated value is equal to (previous + new). If no previous sample \ + exists, set the updated value equal to the new value. + + For more information: https://redis.io/commands/ts.create/ + """ # noqa + params = [key] + self._append_retention(params, retention_msecs) + self._append_uncompressed(params, uncompressed) + self._append_chunk_size(params, chunk_size) + self._append_duplicate_policy(params, CREATE_CMD, duplicate_policy) + self._append_labels(params, labels) + + return self.execute_command(CREATE_CMD, *params) + + def alter( + self, + key: KeyT, + retention_msecs: Optional[int] = None, + labels: Optional[Dict[str, str]] = None, + chunk_size: Optional[int] = None, + duplicate_policy: Optional[str] = None, + ): + """ + Update the retention, chunk size, duplicate policy, and labels of an existing + time series. + + Args: + + key: + time-series key + retention_msecs: + Maximum retention period, compared to maximal existing timestamp (in milliseconds). + If None or 0 is passed then the series is not trimmed at all. + labels: + Set of label-value pairs that represent metadata labels of the key. + chunk_size: + Memory size, in bytes, allocated for each data chunk. + Must be a multiple of 8 in the range [128 .. 1048576]. + duplicate_policy: + Policy for handling multiple samples with identical timestamps. + Can be one of: + - 'block': an error will occur for any out of order sample. + - 'first': ignore the new value. + - 'last': override with latest value. + - 'min': only override if the value is lower than the existing value. + - 'max': only override if the value is higher than the existing value. + - 'sum': If a previous sample exists, add the new sample to it so that \ + the updated value is equal to (previous + new). If no previous sample \ + exists, set the updated value equal to the new value. + + For more information: https://redis.io/commands/ts.alter/ + """ # noqa + params = [key] + self._append_retention(params, retention_msecs) + self._append_chunk_size(params, chunk_size) + self._append_duplicate_policy(params, ALTER_CMD, duplicate_policy) + self._append_labels(params, labels) + + return self.execute_command(ALTER_CMD, *params) + + def add( + self, + key: KeyT, + timestamp: Union[int, str], + value: Number, + retention_msecs: Optional[int] = None, + uncompressed: Optional[bool] = False, + labels: Optional[Dict[str, str]] = None, + chunk_size: Optional[int] = None, + duplicate_policy: Optional[str] = None, + ): + """ + Append (or create and append) a new sample to a time series. + + Args: + + key: + time-series key + timestamp: + Timestamp of the sample. * can be used for automatic timestamp (using the system clock). + value: + Numeric data value of the sample + retention_msecs: + Maximum retention period, compared to maximal existing timestamp (in milliseconds). + If None or 0 is passed then the series is not trimmed at all. + uncompressed: + Changes data storage from compressed (by default) to uncompressed + labels: + Set of label-value pairs that represent metadata labels of the key. + chunk_size: + Memory size, in bytes, allocated for each data chunk. + Must be a multiple of 8 in the range [128 .. 1048576]. + duplicate_policy: + Policy for handling multiple samples with identical timestamps. + Can be one of: + - 'block': an error will occur for any out of order sample. + - 'first': ignore the new value. + - 'last': override with latest value. + - 'min': only override if the value is lower than the existing value. + - 'max': only override if the value is higher than the existing value. + - 'sum': If a previous sample exists, add the new sample to it so that \ + the updated value is equal to (previous + new). If no previous sample \ + exists, set the updated value equal to the new value. + + For more information: https://redis.io/commands/ts.add/ + """ # noqa + params = [key, timestamp, value] + self._append_retention(params, retention_msecs) + self._append_uncompressed(params, uncompressed) + self._append_chunk_size(params, chunk_size) + self._append_duplicate_policy(params, ADD_CMD, duplicate_policy) + self._append_labels(params, labels) + + return self.execute_command(ADD_CMD, *params) + + def madd(self, ktv_tuples: List[Tuple[KeyT, Union[int, str], Number]]): + """ + Append (or create and append) a new `value` to series + `key` with `timestamp`. + Expects a list of `tuples` as (`key`,`timestamp`, `value`). + Return value is an array with timestamps of insertions. + + For more information: https://redis.io/commands/ts.madd/ + """ # noqa + params = [] + for ktv in ktv_tuples: + params.extend(ktv) + + return self.execute_command(MADD_CMD, *params) + + def incrby( + self, + key: KeyT, + value: Number, + timestamp: Optional[Union[int, str]] = None, + retention_msecs: Optional[int] = None, + uncompressed: Optional[bool] = False, + labels: Optional[Dict[str, str]] = None, + chunk_size: Optional[int] = None, + ): + """ + Increment (or create an time-series and increment) the latest sample's of a series. + This command can be used as a counter or gauge that automatically gets history as a time series. + + Args: + + key: + time-series key + value: + Numeric data value of the sample + timestamp: + Timestamp of the sample. * can be used for automatic timestamp (using the system clock). + retention_msecs: + Maximum age for samples compared to last event time (in milliseconds). + If None or 0 is passed then the series is not trimmed at all. + uncompressed: + Changes data storage from compressed (by default) to uncompressed + labels: + Set of label-value pairs that represent metadata labels of the key. + chunk_size: + Memory size, in bytes, allocated for each data chunk. + + For more information: https://redis.io/commands/ts.incrby/ + """ # noqa + params = [key, value] + self._append_timestamp(params, timestamp) + self._append_retention(params, retention_msecs) + self._append_uncompressed(params, uncompressed) + self._append_chunk_size(params, chunk_size) + self._append_labels(params, labels) + + return self.execute_command(INCRBY_CMD, *params) + + def decrby( + self, + key: KeyT, + value: Number, + timestamp: Optional[Union[int, str]] = None, + retention_msecs: Optional[int] = None, + uncompressed: Optional[bool] = False, + labels: Optional[Dict[str, str]] = None, + chunk_size: Optional[int] = None, + ): + """ + Decrement (or create an time-series and decrement) the latest sample's of a series. + This command can be used as a counter or gauge that automatically gets history as a time series. + + Args: + + key: + time-series key + value: + Numeric data value of the sample + timestamp: + Timestamp of the sample. * can be used for automatic timestamp (using the system clock). + retention_msecs: + Maximum age for samples compared to last event time (in milliseconds). + If None or 0 is passed then the series is not trimmed at all. + uncompressed: + Changes data storage from compressed (by default) to uncompressed + labels: + Set of label-value pairs that represent metadata labels of the key. + chunk_size: + Memory size, in bytes, allocated for each data chunk. + + For more information: https://redis.io/commands/ts.decrby/ + """ # noqa + params = [key, value] + self._append_timestamp(params, timestamp) + self._append_retention(params, retention_msecs) + self._append_uncompressed(params, uncompressed) + self._append_chunk_size(params, chunk_size) + self._append_labels(params, labels) + + return self.execute_command(DECRBY_CMD, *params) + + def delete(self, key: KeyT, from_time: int, to_time: int): + """ + Delete all samples between two timestamps for a given time series. + + Args: + + key: + time-series key. + from_time: + Start timestamp for the range deletion. + to_time: + End timestamp for the range deletion. + + For more information: https://redis.io/commands/ts.del/ + """ # noqa + return self.execute_command(DEL_CMD, key, from_time, to_time) + + def createrule( + self, + source_key: KeyT, + dest_key: KeyT, + aggregation_type: str, + bucket_size_msec: int, + align_timestamp: Optional[int] = None, + ): + """ + Create a compaction rule from values added to `source_key` into `dest_key`. + + Args: + + source_key: + Key name for source time series + dest_key: + Key name for destination (compacted) time series + aggregation_type: + Aggregation type: One of the following: + [`avg`, `sum`, `min`, `max`, `range`, `count`, `first`, `last`, `std.p`, + `std.s`, `var.p`, `var.s`, `twa`] + bucket_size_msec: + Duration of each bucket, in milliseconds + align_timestamp: + Assure that there is a bucket that starts at exactly align_timestamp and + align all other buckets accordingly. + + For more information: https://redis.io/commands/ts.createrule/ + """ # noqa + params = [source_key, dest_key] + self._append_aggregation(params, aggregation_type, bucket_size_msec) + if align_timestamp is not None: + params.append(align_timestamp) + + return self.execute_command(CREATERULE_CMD, *params) + + def deleterule(self, source_key: KeyT, dest_key: KeyT): + """ + Delete a compaction rule from `source_key` to `dest_key`.. + + For more information: https://redis.io/commands/ts.deleterule/ + """ # noqa + return self.execute_command(DELETERULE_CMD, source_key, dest_key) + + def __range_params( + self, + key: KeyT, + from_time: Union[int, str], + to_time: Union[int, str], + count: Optional[int], + aggregation_type: Optional[str], + bucket_size_msec: Optional[int], + filter_by_ts: Optional[List[int]], + filter_by_min_value: Optional[int], + filter_by_max_value: Optional[int], + align: Optional[Union[int, str]], + latest: Optional[bool], + bucket_timestamp: Optional[str], + empty: Optional[bool], + ): + """Create TS.RANGE and TS.REVRANGE arguments.""" + params = [key, from_time, to_time] + self._append_latest(params, latest) + self._append_filer_by_ts(params, filter_by_ts) + self._append_filer_by_value(params, filter_by_min_value, filter_by_max_value) + self._append_count(params, count) + self._append_align(params, align) + self._append_aggregation(params, aggregation_type, bucket_size_msec) + self._append_bucket_timestamp(params, bucket_timestamp) + self._append_empty(params, empty) + + return params + + def range( + self, + key: KeyT, + from_time: Union[int, str], + to_time: Union[int, str], + count: Optional[int] = None, + aggregation_type: Optional[str] = None, + bucket_size_msec: Optional[int] = 0, + filter_by_ts: Optional[List[int]] = None, + filter_by_min_value: Optional[int] = None, + filter_by_max_value: Optional[int] = None, + align: Optional[Union[int, str]] = None, + latest: Optional[bool] = False, + bucket_timestamp: Optional[str] = None, + empty: Optional[bool] = False, + ): + """ + Query a range in forward direction for a specific time-serie. + + Args: + + key: + Key name for timeseries. + from_time: + Start timestamp for the range query. - can be used to express the minimum possible timestamp (0). + to_time: + End timestamp for range query, + can be used to express the maximum possible timestamp. + count: + Limits the number of returned samples. + aggregation_type: + Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`, + `range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`] + bucket_size_msec: + Time bucket for aggregation in milliseconds. + filter_by_ts: + List of timestamps to filter the result by specific timestamps. + filter_by_min_value: + Filter result by minimum value (must mention also filter by_max_value). + filter_by_max_value: + Filter result by maximum value (must mention also filter by_min_value). + align: + Timestamp for alignment control for aggregation. + latest: + Used when a time series is a compaction, reports the compacted value of the + latest possibly partial bucket + bucket_timestamp: + Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`, + `high`, `~`, `mid`]. + empty: + Reports aggregations for empty buckets. + + For more information: https://redis.io/commands/ts.range/ + """ # noqa + params = self.__range_params( + key, + from_time, + to_time, + count, + aggregation_type, + bucket_size_msec, + filter_by_ts, + filter_by_min_value, + filter_by_max_value, + align, + latest, + bucket_timestamp, + empty, + ) + return self.execute_command(RANGE_CMD, *params) + + def revrange( + self, + key: KeyT, + from_time: Union[int, str], + to_time: Union[int, str], + count: Optional[int] = None, + aggregation_type: Optional[str] = None, + bucket_size_msec: Optional[int] = 0, + filter_by_ts: Optional[List[int]] = None, + filter_by_min_value: Optional[int] = None, + filter_by_max_value: Optional[int] = None, + align: Optional[Union[int, str]] = None, + latest: Optional[bool] = False, + bucket_timestamp: Optional[str] = None, + empty: Optional[bool] = False, + ): + """ + Query a range in reverse direction for a specific time-series. + + **Note**: This command is only available since RedisTimeSeries >= v1.4 + + Args: + + key: + Key name for timeseries. + from_time: + Start timestamp for the range query. - can be used to express the minimum possible timestamp (0). + to_time: + End timestamp for range query, + can be used to express the maximum possible timestamp. + count: + Limits the number of returned samples. + aggregation_type: + Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`, + `range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`] + bucket_size_msec: + Time bucket for aggregation in milliseconds. + filter_by_ts: + List of timestamps to filter the result by specific timestamps. + filter_by_min_value: + Filter result by minimum value (must mention also filter_by_max_value). + filter_by_max_value: + Filter result by maximum value (must mention also filter_by_min_value). + align: + Timestamp for alignment control for aggregation. + latest: + Used when a time series is a compaction, reports the compacted value of the + latest possibly partial bucket + bucket_timestamp: + Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`, + `high`, `~`, `mid`]. + empty: + Reports aggregations for empty buckets. + + For more information: https://redis.io/commands/ts.revrange/ + """ # noqa + params = self.__range_params( + key, + from_time, + to_time, + count, + aggregation_type, + bucket_size_msec, + filter_by_ts, + filter_by_min_value, + filter_by_max_value, + align, + latest, + bucket_timestamp, + empty, + ) + return self.execute_command(REVRANGE_CMD, *params) + + def __mrange_params( + self, + aggregation_type: Optional[str], + bucket_size_msec: Optional[int], + count: Optional[int], + filters: List[str], + from_time: Union[int, str], + to_time: Union[int, str], + with_labels: Optional[bool], + filter_by_ts: Optional[List[int]], + filter_by_min_value: Optional[int], + filter_by_max_value: Optional[int], + groupby: Optional[str], + reduce: Optional[str], + select_labels: Optional[List[str]], + align: Optional[Union[int, str]], + latest: Optional[bool], + bucket_timestamp: Optional[str], + empty: Optional[bool], + ): + """Create TS.MRANGE and TS.MREVRANGE arguments.""" + params = [from_time, to_time] + self._append_latest(params, latest) + self._append_filer_by_ts(params, filter_by_ts) + self._append_filer_by_value(params, filter_by_min_value, filter_by_max_value) + self._append_with_labels(params, with_labels, select_labels) + self._append_count(params, count) + self._append_align(params, align) + self._append_aggregation(params, aggregation_type, bucket_size_msec) + self._append_bucket_timestamp(params, bucket_timestamp) + self._append_empty(params, empty) + params.extend(["FILTER"]) + params += filters + self._append_groupby_reduce(params, groupby, reduce) + return params + + def mrange( + self, + from_time: Union[int, str], + to_time: Union[int, str], + filters: List[str], + count: Optional[int] = None, + aggregation_type: Optional[str] = None, + bucket_size_msec: Optional[int] = 0, + with_labels: Optional[bool] = False, + filter_by_ts: Optional[List[int]] = None, + filter_by_min_value: Optional[int] = None, + filter_by_max_value: Optional[int] = None, + groupby: Optional[str] = None, + reduce: Optional[str] = None, + select_labels: Optional[List[str]] = None, + align: Optional[Union[int, str]] = None, + latest: Optional[bool] = False, + bucket_timestamp: Optional[str] = None, + empty: Optional[bool] = False, + ): + """ + Query a range across multiple time-series by filters in forward direction. + + Args: + + from_time: + Start timestamp for the range query. `-` can be used to express the minimum possible timestamp (0). + to_time: + End timestamp for range query, `+` can be used to express the maximum possible timestamp. + filters: + filter to match the time-series labels. + count: + Limits the number of returned samples. + aggregation_type: + Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`, + `range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`] + bucket_size_msec: + Time bucket for aggregation in milliseconds. + with_labels: + Include in the reply all label-value pairs representing metadata labels of the time series. + filter_by_ts: + List of timestamps to filter the result by specific timestamps. + filter_by_min_value: + Filter result by minimum value (must mention also filter_by_max_value). + filter_by_max_value: + Filter result by maximum value (must mention also filter_by_min_value). + groupby: + Grouping by fields the results (must mention also reduce). + reduce: + Applying reducer functions on each group. Can be one of [`avg` `sum`, `min`, + `max`, `range`, `count`, `std.p`, `std.s`, `var.p`, `var.s`]. + select_labels: + Include in the reply only a subset of the key-value pair labels of a series. + align: + Timestamp for alignment control for aggregation. + latest: + Used when a time series is a compaction, reports the compacted + value of the latest possibly partial bucket + bucket_timestamp: + Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`, + `high`, `~`, `mid`]. + empty: + Reports aggregations for empty buckets. + + For more information: https://redis.io/commands/ts.mrange/ + """ # noqa + params = self.__mrange_params( + aggregation_type, + bucket_size_msec, + count, + filters, + from_time, + to_time, + with_labels, + filter_by_ts, + filter_by_min_value, + filter_by_max_value, + groupby, + reduce, + select_labels, + align, + latest, + bucket_timestamp, + empty, + ) + + return self.execute_command(MRANGE_CMD, *params) + + def mrevrange( + self, + from_time: Union[int, str], + to_time: Union[int, str], + filters: List[str], + count: Optional[int] = None, + aggregation_type: Optional[str] = None, + bucket_size_msec: Optional[int] = 0, + with_labels: Optional[bool] = False, + filter_by_ts: Optional[List[int]] = None, + filter_by_min_value: Optional[int] = None, + filter_by_max_value: Optional[int] = None, + groupby: Optional[str] = None, + reduce: Optional[str] = None, + select_labels: Optional[List[str]] = None, + align: Optional[Union[int, str]] = None, + latest: Optional[bool] = False, + bucket_timestamp: Optional[str] = None, + empty: Optional[bool] = False, + ): + """ + Query a range across multiple time-series by filters in reverse direction. + + Args: + + from_time: + Start timestamp for the range query. - can be used to express the minimum possible timestamp (0). + to_time: + End timestamp for range query, + can be used to express the maximum possible timestamp. + filters: + Filter to match the time-series labels. + count: + Limits the number of returned samples. + aggregation_type: + Optional aggregation type. Can be one of [`avg`, `sum`, `min`, `max`, + `range`, `count`, `first`, `last`, `std.p`, `std.s`, `var.p`, `var.s`, `twa`] + bucket_size_msec: + Time bucket for aggregation in milliseconds. + with_labels: + Include in the reply all label-value pairs representing metadata labels of the time series. + filter_by_ts: + List of timestamps to filter the result by specific timestamps. + filter_by_min_value: + Filter result by minimum value (must mention also filter_by_max_value). + filter_by_max_value: + Filter result by maximum value (must mention also filter_by_min_value). + groupby: + Grouping by fields the results (must mention also reduce). + reduce: + Applying reducer functions on each group. Can be one of [`avg` `sum`, `min`, + `max`, `range`, `count`, `std.p`, `std.s`, `var.p`, `var.s`]. + select_labels: + Include in the reply only a subset of the key-value pair labels of a series. + align: + Timestamp for alignment control for aggregation. + latest: + Used when a time series is a compaction, reports the compacted + value of the latest possibly partial bucket + bucket_timestamp: + Controls how bucket timestamps are reported. Can be one of [`-`, `low`, `+`, + `high`, `~`, `mid`]. + empty: + Reports aggregations for empty buckets. + + For more information: https://redis.io/commands/ts.mrevrange/ + """ # noqa + params = self.__mrange_params( + aggregation_type, + bucket_size_msec, + count, + filters, + from_time, + to_time, + with_labels, + filter_by_ts, + filter_by_min_value, + filter_by_max_value, + groupby, + reduce, + select_labels, + align, + latest, + bucket_timestamp, + empty, + ) + + return self.execute_command(MREVRANGE_CMD, *params) + + def get(self, key: KeyT, latest: Optional[bool] = False): + """# noqa + Get the last sample of `key`. + `latest` used when a time series is a compaction, reports the compacted + value of the latest (possibly partial) bucket + + For more information: https://redis.io/commands/ts.get/ + """ # noqa + params = [key] + self._append_latest(params, latest) + return self.execute_command(GET_CMD, *params) + + def mget( + self, + filters: List[str], + with_labels: Optional[bool] = False, + select_labels: Optional[List[str]] = None, + latest: Optional[bool] = False, + ): + """# noqa + Get the last samples matching the specific `filter`. + + Args: + + filters: + Filter to match the time-series labels. + with_labels: + Include in the reply all label-value pairs representing metadata + labels of the time series. + select_labels: + Include in the reply only a subset of the key-value pair labels of a series. + latest: + Used when a time series is a compaction, reports the compacted + value of the latest possibly partial bucket + + For more information: https://redis.io/commands/ts.mget/ + """ # noqa + params = [] + self._append_latest(params, latest) + self._append_with_labels(params, with_labels, select_labels) + params.extend(["FILTER"]) + params += filters + return self.execute_command(MGET_CMD, *params) + + def info(self, key: KeyT): + """# noqa + Get information of `key`. + + For more information: https://redis.io/commands/ts.info/ + """ # noqa + return self.execute_command(INFO_CMD, key) + + def queryindex(self, filters: List[str]): + """# noqa + Get all time series keys matching the `filter` list. + + For more information: https://redis.io/commands/ts.queryindex/ + """ # noq + return self.execute_command(QUERYINDEX_CMD, *filters) + + @staticmethod + def _append_uncompressed(params: List[str], uncompressed: Optional[bool]): + """Append UNCOMPRESSED tag to params.""" + if uncompressed: + params.extend(["UNCOMPRESSED"]) + + @staticmethod + def _append_with_labels( + params: List[str], + with_labels: Optional[bool], + select_labels: Optional[List[str]], + ): + """Append labels behavior to params.""" + if with_labels and select_labels: + raise DataError( + "with_labels and select_labels cannot be provided together." + ) + + if with_labels: + params.extend(["WITHLABELS"]) + if select_labels: + params.extend(["SELECTED_LABELS", *select_labels]) + + @staticmethod + def _append_groupby_reduce( + params: List[str], groupby: Optional[str], reduce: Optional[str] + ): + """Append GROUPBY REDUCE property to params.""" + if groupby is not None and reduce is not None: + params.extend(["GROUPBY", groupby, "REDUCE", reduce.upper()]) + + @staticmethod + def _append_retention(params: List[str], retention: Optional[int]): + """Append RETENTION property to params.""" + if retention is not None: + params.extend(["RETENTION", retention]) + + @staticmethod + def _append_labels(params: List[str], labels: Optional[List[str]]): + """Append LABELS property to params.""" + if labels: + params.append("LABELS") + for k, v in labels.items(): + params.extend([k, v]) + + @staticmethod + def _append_count(params: List[str], count: Optional[int]): + """Append COUNT property to params.""" + if count is not None: + params.extend(["COUNT", count]) + + @staticmethod + def _append_timestamp(params: List[str], timestamp: Optional[int]): + """Append TIMESTAMP property to params.""" + if timestamp is not None: + params.extend(["TIMESTAMP", timestamp]) + + @staticmethod + def _append_align(params: List[str], align: Optional[Union[int, str]]): + """Append ALIGN property to params.""" + if align is not None: + params.extend(["ALIGN", align]) + + @staticmethod + def _append_aggregation( + params: List[str], + aggregation_type: Optional[str], + bucket_size_msec: Optional[int], + ): + """Append AGGREGATION property to params.""" + if aggregation_type is not None: + params.extend(["AGGREGATION", aggregation_type, bucket_size_msec]) + + @staticmethod + def _append_chunk_size(params: List[str], chunk_size: Optional[int]): + """Append CHUNK_SIZE property to params.""" + if chunk_size is not None: + params.extend(["CHUNK_SIZE", chunk_size]) + + @staticmethod + def _append_duplicate_policy( + params: List[str], command: Optional[str], duplicate_policy: Optional[str] + ): + """Append DUPLICATE_POLICY property to params on CREATE + and ON_DUPLICATE on ADD. + """ + if duplicate_policy is not None: + if command == "TS.ADD": + params.extend(["ON_DUPLICATE", duplicate_policy]) + else: + params.extend(["DUPLICATE_POLICY", duplicate_policy]) + + @staticmethod + def _append_filer_by_ts(params: List[str], ts_list: Optional[List[int]]): + """Append FILTER_BY_TS property to params.""" + if ts_list is not None: + params.extend(["FILTER_BY_TS", *ts_list]) + + @staticmethod + def _append_filer_by_value( + params: List[str], min_value: Optional[int], max_value: Optional[int] + ): + """Append FILTER_BY_VALUE property to params.""" + if min_value is not None and max_value is not None: + params.extend(["FILTER_BY_VALUE", min_value, max_value]) + + @staticmethod + def _append_latest(params: List[str], latest: Optional[bool]): + """Append LATEST property to params.""" + if latest: + params.append("LATEST") + + @staticmethod + def _append_bucket_timestamp(params: List[str], bucket_timestamp: Optional[str]): + """Append BUCKET_TIMESTAMP property to params.""" + if bucket_timestamp is not None: + params.extend(["BUCKETTIMESTAMP", bucket_timestamp]) + + @staticmethod + def _append_empty(params: List[str], empty: Optional[bool]): + """Append EMPTY property to params.""" + if empty: + params.append("EMPTY") diff --git a/templates/skills/spotify/dependencies/redis/commands/timeseries/info.py b/templates/skills/spotify/dependencies/redis/commands/timeseries/info.py new file mode 100644 index 00000000..3a384dc0 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/timeseries/info.py @@ -0,0 +1,91 @@ +from ..helpers import nativestr +from .utils import list_to_dict + + +class TSInfo: + """ + Hold information and statistics on the time-series. + Can be created using ``tsinfo`` command + https://oss.redis.com/redistimeseries/commands/#tsinfo. + """ + + rules = [] + labels = [] + sourceKey = None + chunk_count = None + memory_usage = None + total_samples = None + retention_msecs = None + last_time_stamp = None + first_time_stamp = None + + max_samples_per_chunk = None + chunk_size = None + duplicate_policy = None + + def __init__(self, args): + """ + Hold information and statistics on the time-series. + + The supported params that can be passed as args: + + rules: + A list of compaction rules of the time series. + sourceKey: + Key name for source time series in case the current series + is a target of a rule. + chunkCount: + Number of Memory Chunks used for the time series. + memoryUsage: + Total number of bytes allocated for the time series. + totalSamples: + Total number of samples in the time series. + labels: + A list of label-value pairs that represent the metadata + labels of the time series. + retentionTime: + Retention time, in milliseconds, for the time series. + lastTimestamp: + Last timestamp present in the time series. + firstTimestamp: + First timestamp present in the time series. + maxSamplesPerChunk: + Deprecated. + chunkSize: + Amount of memory, in bytes, allocated for data. + duplicatePolicy: + Policy that will define handling of duplicate samples. + + Can read more about on + https://oss.redis.com/redistimeseries/configuration/#duplicate_policy + """ + response = dict(zip(map(nativestr, args[::2]), args[1::2])) + self.rules = response.get("rules") + self.source_key = response.get("sourceKey") + self.chunk_count = response.get("chunkCount") + self.memory_usage = response.get("memoryUsage") + self.total_samples = response.get("totalSamples") + self.labels = list_to_dict(response.get("labels")) + self.retention_msecs = response.get("retentionTime") + self.last_timestamp = response.get("lastTimestamp") + self.first_timestamp = response.get("firstTimestamp") + if "maxSamplesPerChunk" in response: + self.max_samples_per_chunk = response["maxSamplesPerChunk"] + self.chunk_size = ( + self.max_samples_per_chunk * 16 + ) # backward compatible changes + if "chunkSize" in response: + self.chunk_size = response["chunkSize"] + if "duplicatePolicy" in response: + self.duplicate_policy = response["duplicatePolicy"] + if type(self.duplicate_policy) == bytes: + self.duplicate_policy = self.duplicate_policy.decode() + + def get(self, item): + try: + return self.__getitem__(item) + except AttributeError: + return None + + def __getitem__(self, item): + return getattr(self, item) diff --git a/templates/skills/spotify/dependencies/redis/commands/timeseries/utils.py b/templates/skills/spotify/dependencies/redis/commands/timeseries/utils.py new file mode 100644 index 00000000..c49b0402 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/commands/timeseries/utils.py @@ -0,0 +1,44 @@ +from ..helpers import nativestr + + +def list_to_dict(aList): + return {nativestr(aList[i][0]): nativestr(aList[i][1]) for i in range(len(aList))} + + +def parse_range(response): + """Parse range response. Used by TS.RANGE and TS.REVRANGE.""" + return [tuple((r[0], float(r[1]))) for r in response] + + +def parse_m_range(response): + """Parse multi range response. Used by TS.MRANGE and TS.MREVRANGE.""" + res = [] + for item in response: + res.append({nativestr(item[0]): [list_to_dict(item[1]), parse_range(item[2])]}) + return sorted(res, key=lambda d: list(d.keys())) + + +def parse_get(response): + """Parse get response. Used by TS.GET.""" + if not response: + return None + return int(response[0]), float(response[1]) + + +def parse_m_get(response): + """Parse multi get response. Used by TS.MGET.""" + res = [] + for item in response: + if not item[2]: + res.append({nativestr(item[0]): [list_to_dict(item[1]), None, None]}) + else: + res.append( + { + nativestr(item[0]): [ + list_to_dict(item[1]), + int(item[2][0]), + float(item[2][1]), + ] + } + ) + return sorted(res, key=lambda d: list(d.keys())) diff --git a/templates/skills/spotify/dependencies/redis/compat.py b/templates/skills/spotify/dependencies/redis/compat.py new file mode 100644 index 00000000..e4784934 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/compat.py @@ -0,0 +1,6 @@ +# flake8: noqa +try: + from typing import Literal, Protocol, TypedDict # lgtm [py/unused-import] +except ImportError: + from typing_extensions import Literal # lgtm [py/unused-import] + from typing_extensions import Protocol, TypedDict diff --git a/templates/skills/spotify/dependencies/redis/connection.py b/templates/skills/spotify/dependencies/redis/connection.py new file mode 100644 index 00000000..346ff3aa --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/connection.py @@ -0,0 +1,1359 @@ +import copy +import os +import socket +import ssl +import sys +import threading +import weakref +from abc import abstractmethod +from itertools import chain +from queue import Empty, Full, LifoQueue +from time import time +from typing import Any, Callable, List, Optional, Type, Union +from urllib.parse import parse_qs, unquote, urlparse + +from ._parsers import Encoder, _HiredisParser, _RESP2Parser, _RESP3Parser +from .backoff import NoBackoff +from .credentials import CredentialProvider, UsernamePasswordCredentialProvider +from .exceptions import ( + AuthenticationError, + AuthenticationWrongNumberOfArgsError, + ChildDeadlockedError, + ConnectionError, + DataError, + RedisError, + ResponseError, + TimeoutError, +) +from .retry import Retry +from .utils import ( + CRYPTOGRAPHY_AVAILABLE, + HIREDIS_AVAILABLE, + HIREDIS_PACK_AVAILABLE, + SSL_AVAILABLE, + get_lib_version, + str_if_bytes, +) + +if HIREDIS_AVAILABLE: + import hiredis + +SYM_STAR = b"*" +SYM_DOLLAR = b"$" +SYM_CRLF = b"\r\n" +SYM_EMPTY = b"" + +DEFAULT_RESP_VERSION = 2 + +SENTINEL = object() + +DefaultParser: Type[Union[_RESP2Parser, _RESP3Parser, _HiredisParser]] +if HIREDIS_AVAILABLE: + DefaultParser = _HiredisParser +else: + DefaultParser = _RESP2Parser + + +class HiredisRespSerializer: + def pack(self, *args: List): + """Pack a series of arguments into the Redis protocol""" + output = [] + + if isinstance(args[0], str): + args = tuple(args[0].encode().split()) + args[1:] + elif b" " in args[0]: + args = tuple(args[0].split()) + args[1:] + try: + output.append(hiredis.pack_command(args)) + except TypeError: + _, value, traceback = sys.exc_info() + raise DataError(value).with_traceback(traceback) + + return output + + +class PythonRespSerializer: + def __init__(self, buffer_cutoff, encode) -> None: + self._buffer_cutoff = buffer_cutoff + self.encode = encode + + def pack(self, *args): + """Pack a series of arguments into the Redis protocol""" + output = [] + # the client might have included 1 or more literal arguments in + # the command name, e.g., 'CONFIG GET'. The Redis server expects these + # arguments to be sent separately, so split the first argument + # manually. These arguments should be bytestrings so that they are + # not encoded. + if isinstance(args[0], str): + args = tuple(args[0].encode().split()) + args[1:] + elif b" " in args[0]: + args = tuple(args[0].split()) + args[1:] + + buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF)) + + buffer_cutoff = self._buffer_cutoff + for arg in map(self.encode, args): + # to avoid large string mallocs, chunk the command into the + # output list if we're sending large values or memoryviews + arg_length = len(arg) + if ( + len(buff) > buffer_cutoff + or arg_length > buffer_cutoff + or isinstance(arg, memoryview) + ): + buff = SYM_EMPTY.join( + (buff, SYM_DOLLAR, str(arg_length).encode(), SYM_CRLF) + ) + output.append(buff) + output.append(arg) + buff = SYM_CRLF + else: + buff = SYM_EMPTY.join( + ( + buff, + SYM_DOLLAR, + str(arg_length).encode(), + SYM_CRLF, + arg, + SYM_CRLF, + ) + ) + output.append(buff) + return output + + +class AbstractConnection: + "Manages communication to and from a Redis server" + + def __init__( + self, + db: int = 0, + password: Optional[str] = None, + socket_timeout: Optional[float] = None, + socket_connect_timeout: Optional[float] = None, + retry_on_timeout: bool = False, + retry_on_error=SENTINEL, + encoding: str = "utf-8", + encoding_errors: str = "strict", + decode_responses: bool = False, + parser_class=DefaultParser, + socket_read_size: int = 65536, + health_check_interval: int = 0, + client_name: Optional[str] = None, + lib_name: Optional[str] = "redis-py", + lib_version: Optional[str] = get_lib_version(), + username: Optional[str] = None, + retry: Union[Any, None] = None, + redis_connect_func: Optional[Callable[[], None]] = None, + credential_provider: Optional[CredentialProvider] = None, + protocol: Optional[int] = 2, + command_packer: Optional[Callable[[], None]] = None, + ): + """ + Initialize a new Connection. + To specify a retry policy for specific errors, first set + `retry_on_error` to a list of the error/s to retry on, then set + `retry` to a valid `Retry` object. + To retry on TimeoutError, `retry_on_timeout` can also be set to `True`. + """ + if (username or password) and credential_provider is not None: + raise DataError( + "'username' and 'password' cannot be passed along with 'credential_" + "provider'. Please provide only one of the following arguments: \n" + "1. 'password' and (optional) 'username'\n" + "2. 'credential_provider'" + ) + self.pid = os.getpid() + self.db = db + self.client_name = client_name + self.lib_name = lib_name + self.lib_version = lib_version + self.credential_provider = credential_provider + self.password = password + self.username = username + self.socket_timeout = socket_timeout + if socket_connect_timeout is None: + socket_connect_timeout = socket_timeout + self.socket_connect_timeout = socket_connect_timeout + self.retry_on_timeout = retry_on_timeout + if retry_on_error is SENTINEL: + retry_on_error = [] + if retry_on_timeout: + # Add TimeoutError to the errors list to retry on + retry_on_error.append(TimeoutError) + self.retry_on_error = retry_on_error + if retry or retry_on_error: + if retry is None: + self.retry = Retry(NoBackoff(), 1) + else: + # deep-copy the Retry object as it is mutable + self.retry = copy.deepcopy(retry) + # Update the retry's supported errors with the specified errors + self.retry.update_supported_errors(retry_on_error) + else: + self.retry = Retry(NoBackoff(), 0) + self.health_check_interval = health_check_interval + self.next_health_check = 0 + self.redis_connect_func = redis_connect_func + self.encoder = Encoder(encoding, encoding_errors, decode_responses) + self._sock = None + self._socket_read_size = socket_read_size + self.set_parser(parser_class) + self._connect_callbacks = [] + self._buffer_cutoff = 6000 + try: + p = int(protocol) + except TypeError: + p = DEFAULT_RESP_VERSION + except ValueError: + raise ConnectionError("protocol must be an integer") + finally: + if p < 2 or p > 3: + raise ConnectionError("protocol must be either 2 or 3") + # p = DEFAULT_RESP_VERSION + self.protocol = p + self._command_packer = self._construct_command_packer(command_packer) + + def __repr__(self): + repr_args = ",".join([f"{k}={v}" for k, v in self.repr_pieces()]) + return f"{self.__class__.__name__}<{repr_args}>" + + @abstractmethod + def repr_pieces(self): + pass + + def __del__(self): + try: + self.disconnect() + except Exception: + pass + + def _construct_command_packer(self, packer): + if packer is not None: + return packer + elif HIREDIS_PACK_AVAILABLE: + return HiredisRespSerializer() + else: + return PythonRespSerializer(self._buffer_cutoff, self.encoder.encode) + + def register_connect_callback(self, callback): + """ + Register a callback to be called when the connection is established either + initially or reconnected. This allows listeners to issue commands that + are ephemeral to the connection, for example pub/sub subscription or + key tracking. The callback must be a _method_ and will be kept as + a weak reference. + """ + wm = weakref.WeakMethod(callback) + if wm not in self._connect_callbacks: + self._connect_callbacks.append(wm) + + def deregister_connect_callback(self, callback): + """ + De-register a previously registered callback. It will no-longer receive + notifications on connection events. Calling this is not required when the + listener goes away, since the callbacks are kept as weak methods. + """ + try: + self._connect_callbacks.remove(weakref.WeakMethod(callback)) + except ValueError: + pass + + def set_parser(self, parser_class): + """ + Creates a new instance of parser_class with socket size: + _socket_read_size and assigns it to the parser for the connection + :param parser_class: The required parser class + """ + self._parser = parser_class(socket_read_size=self._socket_read_size) + + def connect(self): + "Connects to the Redis server if not already connected" + if self._sock: + return + try: + sock = self.retry.call_with_retry( + lambda: self._connect(), lambda error: self.disconnect(error) + ) + except socket.timeout: + raise TimeoutError("Timeout connecting to server") + except OSError as e: + raise ConnectionError(self._error_message(e)) + + self._sock = sock + try: + if self.redis_connect_func is None: + # Use the default on_connect function + self.on_connect() + else: + # Use the passed function redis_connect_func + self.redis_connect_func(self) + except RedisError: + # clean up after any error in on_connect + self.disconnect() + raise + + # run any user callbacks. right now the only internal callback + # is for pubsub channel/pattern resubscription + # first, remove any dead weakrefs + self._connect_callbacks = [ref for ref in self._connect_callbacks if ref()] + for ref in self._connect_callbacks: + callback = ref() + if callback: + callback(self) + + @abstractmethod + def _connect(self): + pass + + @abstractmethod + def _host_error(self): + pass + + @abstractmethod + def _error_message(self, exception): + pass + + def on_connect(self): + "Initialize the connection, authenticate and select a database" + self._parser.on_connect(self) + parser = self._parser + + auth_args = None + # if credential provider or username and/or password are set, authenticate + if self.credential_provider or (self.username or self.password): + cred_provider = ( + self.credential_provider + or UsernamePasswordCredentialProvider(self.username, self.password) + ) + auth_args = cred_provider.get_credentials() + + # if resp version is specified and we have auth args, + # we need to send them via HELLO + if auth_args and self.protocol not in [2, "2"]: + if isinstance(self._parser, _RESP2Parser): + self.set_parser(_RESP3Parser) + # update cluster exception classes + self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES + self._parser.on_connect(self) + if len(auth_args) == 1: + auth_args = ["default", auth_args[0]] + self.send_command("HELLO", self.protocol, "AUTH", *auth_args) + response = self.read_response() + # if response.get(b"proto") != self.protocol and response.get( + # "proto" + # ) != self.protocol: + # raise ConnectionError("Invalid RESP version") + elif auth_args: + # avoid checking health here -- PING will fail if we try + # to check the health prior to the AUTH + self.send_command("AUTH", *auth_args, check_health=False) + + try: + auth_response = self.read_response() + except AuthenticationWrongNumberOfArgsError: + # a username and password were specified but the Redis + # server seems to be < 6.0.0 which expects a single password + # arg. retry auth with just the password. + # https://github.com/andymccurdy/redis-py/issues/1274 + self.send_command("AUTH", auth_args[-1], check_health=False) + auth_response = self.read_response() + + if str_if_bytes(auth_response) != "OK": + raise AuthenticationError("Invalid Username or Password") + + # if resp version is specified, switch to it + elif self.protocol not in [2, "2"]: + if isinstance(self._parser, _RESP2Parser): + self.set_parser(_RESP3Parser) + # update cluster exception classes + self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES + self._parser.on_connect(self) + self.send_command("HELLO", self.protocol) + response = self.read_response() + if ( + response.get(b"proto") != self.protocol + and response.get("proto") != self.protocol + ): + raise ConnectionError("Invalid RESP version") + + # if a client_name is given, set it + if self.client_name: + self.send_command("CLIENT", "SETNAME", self.client_name) + if str_if_bytes(self.read_response()) != "OK": + raise ConnectionError("Error setting client name") + + try: + # set the library name and version + if self.lib_name: + self.send_command("CLIENT", "SETINFO", "LIB-NAME", self.lib_name) + self.read_response() + except ResponseError: + pass + + try: + if self.lib_version: + self.send_command("CLIENT", "SETINFO", "LIB-VER", self.lib_version) + self.read_response() + except ResponseError: + pass + + # if a database is specified, switch to it + if self.db: + self.send_command("SELECT", self.db) + if str_if_bytes(self.read_response()) != "OK": + raise ConnectionError("Invalid Database") + + def disconnect(self, *args): + "Disconnects from the Redis server" + self._parser.on_disconnect() + + conn_sock = self._sock + self._sock = None + if conn_sock is None: + return + + if os.getpid() == self.pid: + try: + conn_sock.shutdown(socket.SHUT_RDWR) + except (OSError, TypeError): + pass + + try: + conn_sock.close() + except OSError: + pass + + def _send_ping(self): + """Send PING, expect PONG in return""" + self.send_command("PING", check_health=False) + if str_if_bytes(self.read_response()) != "PONG": + raise ConnectionError("Bad response from PING health check") + + def _ping_failed(self, error): + """Function to call when PING fails""" + self.disconnect() + + def check_health(self): + """Check the health of the connection with a PING/PONG""" + if self.health_check_interval and time() > self.next_health_check: + self.retry.call_with_retry(self._send_ping, self._ping_failed) + + def send_packed_command(self, command, check_health=True): + """Send an already packed command to the Redis server""" + if not self._sock: + self.connect() + # guard against health check recursion + if check_health: + self.check_health() + try: + if isinstance(command, str): + command = [command] + for item in command: + self._sock.sendall(item) + except socket.timeout: + self.disconnect() + raise TimeoutError("Timeout writing to socket") + except OSError as e: + self.disconnect() + if len(e.args) == 1: + errno, errmsg = "UNKNOWN", e.args[0] + else: + errno = e.args[0] + errmsg = e.args[1] + raise ConnectionError(f"Error {errno} while writing to socket. {errmsg}.") + except BaseException: + # BaseExceptions can be raised when a socket send operation is not + # finished, e.g. due to a timeout. Ideally, a caller could then re-try + # to send un-sent data. However, the send_packed_command() API + # does not support it so there is no point in keeping the connection open. + self.disconnect() + raise + + def send_command(self, *args, **kwargs): + """Pack and send a command to the Redis server""" + self.send_packed_command( + self._command_packer.pack(*args), + check_health=kwargs.get("check_health", True), + ) + + def can_read(self, timeout=0): + """Poll the socket to see if there's data that can be read.""" + sock = self._sock + if not sock: + self.connect() + + host_error = self._host_error() + + try: + return self._parser.can_read(timeout) + except OSError as e: + self.disconnect() + raise ConnectionError(f"Error while reading from {host_error}: {e.args}") + + def read_response( + self, + disable_decoding=False, + *, + disconnect_on_error=True, + push_request=False, + ): + """Read the response from a previously sent command""" + + host_error = self._host_error() + + try: + if self.protocol in ["3", 3] and not HIREDIS_AVAILABLE: + response = self._parser.read_response( + disable_decoding=disable_decoding, push_request=push_request + ) + else: + response = self._parser.read_response(disable_decoding=disable_decoding) + except socket.timeout: + if disconnect_on_error: + self.disconnect() + raise TimeoutError(f"Timeout reading from {host_error}") + except OSError as e: + if disconnect_on_error: + self.disconnect() + raise ConnectionError( + f"Error while reading from {host_error}" f" : {e.args}" + ) + except BaseException: + # Also by default close in case of BaseException. A lot of code + # relies on this behaviour when doing Command/Response pairs. + # See #1128. + if disconnect_on_error: + self.disconnect() + raise + + if self.health_check_interval: + self.next_health_check = time() + self.health_check_interval + + if isinstance(response, ResponseError): + try: + raise response + finally: + del response # avoid creating ref cycles + return response + + def pack_command(self, *args): + """Pack a series of arguments into the Redis protocol""" + return self._command_packer.pack(*args) + + def pack_commands(self, commands): + """Pack multiple commands into the Redis protocol""" + output = [] + pieces = [] + buffer_length = 0 + buffer_cutoff = self._buffer_cutoff + + for cmd in commands: + for chunk in self._command_packer.pack(*cmd): + chunklen = len(chunk) + if ( + buffer_length > buffer_cutoff + or chunklen > buffer_cutoff + or isinstance(chunk, memoryview) + ): + if pieces: + output.append(SYM_EMPTY.join(pieces)) + buffer_length = 0 + pieces = [] + + if chunklen > buffer_cutoff or isinstance(chunk, memoryview): + output.append(chunk) + else: + pieces.append(chunk) + buffer_length += chunklen + + if pieces: + output.append(SYM_EMPTY.join(pieces)) + return output + + +class Connection(AbstractConnection): + "Manages TCP communication to and from a Redis server" + + def __init__( + self, + host="localhost", + port=6379, + socket_keepalive=False, + socket_keepalive_options=None, + socket_type=0, + **kwargs, + ): + self.host = host + self.port = int(port) + self.socket_keepalive = socket_keepalive + self.socket_keepalive_options = socket_keepalive_options or {} + self.socket_type = socket_type + super().__init__(**kwargs) + + def repr_pieces(self): + pieces = [("host", self.host), ("port", self.port), ("db", self.db)] + if self.client_name: + pieces.append(("client_name", self.client_name)) + return pieces + + def _connect(self): + "Create a TCP socket connection" + # we want to mimic what socket.create_connection does to support + # ipv4/ipv6, but we want to set options prior to calling + # socket.connect() + err = None + for res in socket.getaddrinfo( + self.host, self.port, self.socket_type, socket.SOCK_STREAM + ): + family, socktype, proto, canonname, socket_address = res + sock = None + try: + sock = socket.socket(family, socktype, proto) + # TCP_NODELAY + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + # TCP_KEEPALIVE + if self.socket_keepalive: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + for k, v in self.socket_keepalive_options.items(): + sock.setsockopt(socket.IPPROTO_TCP, k, v) + + # set the socket_connect_timeout before we connect + sock.settimeout(self.socket_connect_timeout) + + # connect + sock.connect(socket_address) + + # set the socket_timeout now that we're connected + sock.settimeout(self.socket_timeout) + return sock + + except OSError as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + raise err + raise OSError("socket.getaddrinfo returned an empty list") + + def _host_error(self): + return f"{self.host}:{self.port}" + + def _error_message(self, exception): + # args for socket.error can either be (errno, "message") + # or just "message" + + host_error = self._host_error() + + if len(exception.args) == 1: + try: + return f"Error connecting to {host_error}. \ + {exception.args[0]}." + except AttributeError: + return f"Connection Error: {exception.args[0]}" + else: + try: + return ( + f"Error {exception.args[0]} connecting to " + f"{host_error}. {exception.args[1]}." + ) + except AttributeError: + return f"Connection Error: {exception.args[0]}" + + +class SSLConnection(Connection): + """Manages SSL connections to and from the Redis server(s). + This class extends the Connection class, adding SSL functionality, and making + use of ssl.SSLContext (https://docs.python.org/3/library/ssl.html#ssl.SSLContext) + """ # noqa + + def __init__( + self, + ssl_keyfile=None, + ssl_certfile=None, + ssl_cert_reqs="required", + ssl_ca_certs=None, + ssl_ca_data=None, + ssl_check_hostname=False, + ssl_ca_path=None, + ssl_password=None, + ssl_validate_ocsp=False, + ssl_validate_ocsp_stapled=False, + ssl_ocsp_context=None, + ssl_ocsp_expected_cert=None, + ssl_min_version=None, + ssl_ciphers=None, + **kwargs, + ): + """Constructor + + Args: + ssl_keyfile: Path to an ssl private key. Defaults to None. + ssl_certfile: Path to an ssl certificate. Defaults to None. + ssl_cert_reqs: The string value for the SSLContext.verify_mode (none, optional, required). Defaults to "required". + ssl_ca_certs: The path to a file of concatenated CA certificates in PEM format. Defaults to None. + ssl_ca_data: Either an ASCII string of one or more PEM-encoded certificates or a bytes-like object of DER-encoded certificates. + ssl_check_hostname: If set, match the hostname during the SSL handshake. Defaults to False. + ssl_ca_path: The path to a directory containing several CA certificates in PEM format. Defaults to None. + ssl_password: Password for unlocking an encrypted private key. Defaults to None. + + ssl_validate_ocsp: If set, perform a full ocsp validation (i.e not a stapled verification) + ssl_validate_ocsp_stapled: If set, perform a validation on a stapled ocsp response + ssl_ocsp_context: A fully initialized OpenSSL.SSL.Context object to be used in verifying the ssl_ocsp_expected_cert + ssl_ocsp_expected_cert: A PEM armoured string containing the expected certificate to be returned from the ocsp verification service. + ssl_min_version: The lowest supported SSL version. It affects the supported SSL versions of the SSLContext. None leaves the default provided by ssl module. + ssl_ciphers: A string listing the ciphers that are allowed to be used. Defaults to None, which means that the default ciphers are used. See https://docs.python.org/3/library/ssl.html#ssl.SSLContext.set_ciphers for more information. + + Raises: + RedisError + """ # noqa + if not SSL_AVAILABLE: + raise RedisError("Python wasn't built with SSL support") + + self.keyfile = ssl_keyfile + self.certfile = ssl_certfile + if ssl_cert_reqs is None: + ssl_cert_reqs = ssl.CERT_NONE + elif isinstance(ssl_cert_reqs, str): + CERT_REQS = { + "none": ssl.CERT_NONE, + "optional": ssl.CERT_OPTIONAL, + "required": ssl.CERT_REQUIRED, + } + if ssl_cert_reqs not in CERT_REQS: + raise RedisError( + f"Invalid SSL Certificate Requirements Flag: {ssl_cert_reqs}" + ) + ssl_cert_reqs = CERT_REQS[ssl_cert_reqs] + self.cert_reqs = ssl_cert_reqs + self.ca_certs = ssl_ca_certs + self.ca_data = ssl_ca_data + self.ca_path = ssl_ca_path + self.check_hostname = ssl_check_hostname + self.certificate_password = ssl_password + self.ssl_validate_ocsp = ssl_validate_ocsp + self.ssl_validate_ocsp_stapled = ssl_validate_ocsp_stapled + self.ssl_ocsp_context = ssl_ocsp_context + self.ssl_ocsp_expected_cert = ssl_ocsp_expected_cert + self.ssl_min_version = ssl_min_version + self.ssl_ciphers = ssl_ciphers + super().__init__(**kwargs) + + def _connect(self): + "Wrap the socket with SSL support" + sock = super()._connect() + context = ssl.create_default_context() + context.check_hostname = self.check_hostname + context.verify_mode = self.cert_reqs + if self.certfile or self.keyfile: + context.load_cert_chain( + certfile=self.certfile, + keyfile=self.keyfile, + password=self.certificate_password, + ) + if ( + self.ca_certs is not None + or self.ca_path is not None + or self.ca_data is not None + ): + context.load_verify_locations( + cafile=self.ca_certs, capath=self.ca_path, cadata=self.ca_data + ) + if self.ssl_min_version is not None: + context.minimum_version = self.ssl_min_version + if self.ssl_ciphers: + context.set_ciphers(self.ssl_ciphers) + sslsock = context.wrap_socket(sock, server_hostname=self.host) + if self.ssl_validate_ocsp is True and CRYPTOGRAPHY_AVAILABLE is False: + raise RedisError("cryptography is not installed.") + + if self.ssl_validate_ocsp_stapled and self.ssl_validate_ocsp: + raise RedisError( + "Either an OCSP staple or pure OCSP connection must be validated " + "- not both." + ) + + # validation for the stapled case + if self.ssl_validate_ocsp_stapled: + import OpenSSL + + from .ocsp import ocsp_staple_verifier + + # if a context is provided use it - otherwise, a basic context + if self.ssl_ocsp_context is None: + staple_ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD) + staple_ctx.use_certificate_file(self.certfile) + staple_ctx.use_privatekey_file(self.keyfile) + else: + staple_ctx = self.ssl_ocsp_context + + staple_ctx.set_ocsp_client_callback( + ocsp_staple_verifier, self.ssl_ocsp_expected_cert + ) + + # need another socket + con = OpenSSL.SSL.Connection(staple_ctx, socket.socket()) + con.request_ocsp() + con.connect((self.host, self.port)) + con.do_handshake() + con.shutdown() + return sslsock + + # pure ocsp validation + if self.ssl_validate_ocsp is True and CRYPTOGRAPHY_AVAILABLE: + from .ocsp import OCSPVerifier + + o = OCSPVerifier(sslsock, self.host, self.port, self.ca_certs) + if o.is_valid(): + return sslsock + else: + raise ConnectionError("ocsp validation error") + return sslsock + + +class UnixDomainSocketConnection(AbstractConnection): + "Manages UDS communication to and from a Redis server" + + def __init__(self, path="", socket_timeout=None, **kwargs): + self.path = path + self.socket_timeout = socket_timeout + super().__init__(**kwargs) + + def repr_pieces(self): + pieces = [("path", self.path), ("db", self.db)] + if self.client_name: + pieces.append(("client_name", self.client_name)) + return pieces + + def _connect(self): + "Create a Unix domain socket connection" + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + sock.settimeout(self.socket_connect_timeout) + sock.connect(self.path) + sock.settimeout(self.socket_timeout) + return sock + + def _host_error(self): + return self.path + + def _error_message(self, exception): + # args for socket.error can either be (errno, "message") + # or just "message" + host_error = self._host_error() + if len(exception.args) == 1: + return ( + f"Error connecting to unix socket: {host_error}. {exception.args[0]}." + ) + else: + return ( + f"Error {exception.args[0]} connecting to unix socket: " + f"{host_error}. {exception.args[1]}." + ) + + +FALSE_STRINGS = ("0", "F", "FALSE", "N", "NO") + + +def to_bool(value): + if value is None or value == "": + return None + if isinstance(value, str) and value.upper() in FALSE_STRINGS: + return False + return bool(value) + + +URL_QUERY_ARGUMENT_PARSERS = { + "db": int, + "socket_timeout": float, + "socket_connect_timeout": float, + "socket_keepalive": to_bool, + "retry_on_timeout": to_bool, + "retry_on_error": list, + "max_connections": int, + "health_check_interval": int, + "ssl_check_hostname": to_bool, + "timeout": float, +} + + +def parse_url(url): + if not ( + url.startswith("redis://") + or url.startswith("rediss://") + or url.startswith("unix://") + ): + raise ValueError( + "Redis URL must specify one of the following " + "schemes (redis://, rediss://, unix://)" + ) + + url = urlparse(url) + kwargs = {} + + for name, value in parse_qs(url.query).items(): + if value and len(value) > 0: + value = unquote(value[0]) + parser = URL_QUERY_ARGUMENT_PARSERS.get(name) + if parser: + try: + kwargs[name] = parser(value) + except (TypeError, ValueError): + raise ValueError(f"Invalid value for `{name}` in connection URL.") + else: + kwargs[name] = value + + if url.username: + kwargs["username"] = unquote(url.username) + if url.password: + kwargs["password"] = unquote(url.password) + + # We only support redis://, rediss:// and unix:// schemes. + if url.scheme == "unix": + if url.path: + kwargs["path"] = unquote(url.path) + kwargs["connection_class"] = UnixDomainSocketConnection + + else: # implied: url.scheme in ("redis", "rediss"): + if url.hostname: + kwargs["host"] = unquote(url.hostname) + if url.port: + kwargs["port"] = int(url.port) + + # If there's a path argument, use it as the db argument if a + # querystring value wasn't specified + if url.path and "db" not in kwargs: + try: + kwargs["db"] = int(unquote(url.path).replace("/", "")) + except (AttributeError, ValueError): + pass + + if url.scheme == "rediss": + kwargs["connection_class"] = SSLConnection + + return kwargs + + +class ConnectionPool: + """ + Create a connection pool. ``If max_connections`` is set, then this + object raises :py:class:`~redis.exceptions.ConnectionError` when the pool's + limit is reached. + + By default, TCP connections are created unless ``connection_class`` + is specified. Use class:`.UnixDomainSocketConnection` for + unix sockets. + + Any additional keyword arguments are passed to the constructor of + ``connection_class``. + """ + + @classmethod + def from_url(cls, url, **kwargs): + """ + Return a connection pool configured from the given URL. + + For example:: + + redis://[[username]:[password]]@localhost:6379/0 + rediss://[[username]:[password]]@localhost:6379/0 + unix://[username@]/path/to/socket.sock?db=0[&password=password] + + Three URL schemes are supported: + + - `redis://` creates a TCP socket connection. See more at: + + - `rediss://` creates a SSL wrapped TCP socket connection. See more at: + + - ``unix://``: creates a Unix Domain Socket connection. + + The username, password, hostname, path and all querystring values + are passed through urllib.parse.unquote in order to replace any + percent-encoded values with their corresponding characters. + + There are several ways to specify a database number. The first value + found will be used: + + 1. A ``db`` querystring option, e.g. redis://localhost?db=0 + 2. If using the redis:// or rediss:// schemes, the path argument + of the url, e.g. redis://localhost/0 + 3. A ``db`` keyword argument to this function. + + If none of these options are specified, the default db=0 is used. + + All querystring options are cast to their appropriate Python types. + Boolean arguments can be specified with string values "True"/"False" + or "Yes"/"No". Values that cannot be properly cast cause a + ``ValueError`` to be raised. Once parsed, the querystring arguments + and keyword arguments are passed to the ``ConnectionPool``'s + class initializer. In the case of conflicting arguments, querystring + arguments always win. + """ + url_options = parse_url(url) + + if "connection_class" in kwargs: + url_options["connection_class"] = kwargs["connection_class"] + + kwargs.update(url_options) + return cls(**kwargs) + + def __init__( + self, + connection_class=Connection, + max_connections: Optional[int] = None, + **connection_kwargs, + ): + max_connections = max_connections or 2**31 + if not isinstance(max_connections, int) or max_connections < 0: + raise ValueError('"max_connections" must be a positive integer') + + self.connection_class = connection_class + self.connection_kwargs = connection_kwargs + self.max_connections = max_connections + + # a lock to protect the critical section in _checkpid(). + # this lock is acquired when the process id changes, such as + # after a fork. during this time, multiple threads in the child + # process could attempt to acquire this lock. the first thread + # to acquire the lock will reset the data structures and lock + # object of this pool. subsequent threads acquiring this lock + # will notice the first thread already did the work and simply + # release the lock. + self._fork_lock = threading.Lock() + self.reset() + + def __repr__(self) -> (str, str): + return ( + f"{type(self).__name__}" + f"<{repr(self.connection_class(**self.connection_kwargs))}>" + ) + + def reset(self) -> None: + self._lock = threading.Lock() + self._created_connections = 0 + self._available_connections = [] + self._in_use_connections = set() + + # this must be the last operation in this method. while reset() is + # called when holding _fork_lock, other threads in this process + # can call _checkpid() which compares self.pid and os.getpid() without + # holding any lock (for performance reasons). keeping this assignment + # as the last operation ensures that those other threads will also + # notice a pid difference and block waiting for the first thread to + # release _fork_lock. when each of these threads eventually acquire + # _fork_lock, they will notice that another thread already called + # reset() and they will immediately release _fork_lock and continue on. + self.pid = os.getpid() + + def _checkpid(self) -> None: + # _checkpid() attempts to keep ConnectionPool fork-safe on modern + # systems. this is called by all ConnectionPool methods that + # manipulate the pool's state such as get_connection() and release(). + # + # _checkpid() determines whether the process has forked by comparing + # the current process id to the process id saved on the ConnectionPool + # instance. if these values are the same, _checkpid() simply returns. + # + # when the process ids differ, _checkpid() assumes that the process + # has forked and that we're now running in the child process. the child + # process cannot use the parent's file descriptors (e.g., sockets). + # therefore, when _checkpid() sees the process id change, it calls + # reset() in order to reinitialize the child's ConnectionPool. this + # will cause the child to make all new connection objects. + # + # _checkpid() is protected by self._fork_lock to ensure that multiple + # threads in the child process do not call reset() multiple times. + # + # there is an extremely small chance this could fail in the following + # scenario: + # 1. process A calls _checkpid() for the first time and acquires + # self._fork_lock. + # 2. while holding self._fork_lock, process A forks (the fork() + # could happen in a different thread owned by process A) + # 3. process B (the forked child process) inherits the + # ConnectionPool's state from the parent. that state includes + # a locked _fork_lock. process B will not be notified when + # process A releases the _fork_lock and will thus never be + # able to acquire the _fork_lock. + # + # to mitigate this possible deadlock, _checkpid() will only wait 5 + # seconds to acquire _fork_lock. if _fork_lock cannot be acquired in + # that time it is assumed that the child is deadlocked and a + # redis.ChildDeadlockedError error is raised. + if self.pid != os.getpid(): + acquired = self._fork_lock.acquire(timeout=5) + if not acquired: + raise ChildDeadlockedError + # reset() the instance for the new process if another thread + # hasn't already done so + try: + if self.pid != os.getpid(): + self.reset() + finally: + self._fork_lock.release() + + def get_connection(self, command_name: str, *keys, **options) -> "Connection": + "Get a connection from the pool" + self._checkpid() + with self._lock: + try: + connection = self._available_connections.pop() + except IndexError: + connection = self.make_connection() + self._in_use_connections.add(connection) + + try: + # ensure this connection is connected to Redis + connection.connect() + # connections that the pool provides should be ready to send + # a command. if not, the connection was either returned to the + # pool before all data has been read or the socket has been + # closed. either way, reconnect and verify everything is good. + try: + if connection.can_read(): + raise ConnectionError("Connection has data") + except (ConnectionError, OSError): + connection.disconnect() + connection.connect() + if connection.can_read(): + raise ConnectionError("Connection not ready") + except BaseException: + # release the connection back to the pool so that we don't + # leak it + self.release(connection) + raise + + return connection + + def get_encoder(self) -> Encoder: + "Return an encoder based on encoding settings" + kwargs = self.connection_kwargs + return Encoder( + encoding=kwargs.get("encoding", "utf-8"), + encoding_errors=kwargs.get("encoding_errors", "strict"), + decode_responses=kwargs.get("decode_responses", False), + ) + + def make_connection(self) -> "Connection": + "Create a new connection" + if self._created_connections >= self.max_connections: + raise ConnectionError("Too many connections") + self._created_connections += 1 + return self.connection_class(**self.connection_kwargs) + + def release(self, connection: "Connection") -> None: + "Releases the connection back to the pool" + self._checkpid() + with self._lock: + try: + self._in_use_connections.remove(connection) + except KeyError: + # Gracefully fail when a connection is returned to this pool + # that the pool doesn't actually own + pass + + if self.owns_connection(connection): + self._available_connections.append(connection) + else: + # pool doesn't own this connection. do not add it back + # to the pool and decrement the count so that another + # connection can take its place if needed + self._created_connections -= 1 + connection.disconnect() + return + + def owns_connection(self, connection: "Connection") -> int: + return connection.pid == self.pid + + def disconnect(self, inuse_connections: bool = True) -> None: + """ + Disconnects connections in the pool + + If ``inuse_connections`` is True, disconnect connections that are + current in use, potentially by other threads. Otherwise only disconnect + connections that are idle in the pool. + """ + self._checkpid() + with self._lock: + if inuse_connections: + connections = chain( + self._available_connections, self._in_use_connections + ) + else: + connections = self._available_connections + + for connection in connections: + connection.disconnect() + + def close(self) -> None: + """Close the pool, disconnecting all connections""" + self.disconnect() + + def set_retry(self, retry: "Retry") -> None: + self.connection_kwargs.update({"retry": retry}) + for conn in self._available_connections: + conn.retry = retry + for conn in self._in_use_connections: + conn.retry = retry + + +class BlockingConnectionPool(ConnectionPool): + """ + Thread-safe blocking connection pool:: + + >>> from redis.client import Redis + >>> client = Redis(connection_pool=BlockingConnectionPool()) + + It performs the same function as the default + :py:class:`~redis.ConnectionPool` implementation, in that, + it maintains a pool of reusable connections that can be shared by + multiple redis clients (safely across threads if required). + + The difference is that, in the event that a client tries to get a + connection from the pool when all of connections are in use, rather than + raising a :py:class:`~redis.ConnectionError` (as the default + :py:class:`~redis.ConnectionPool` implementation does), it + makes the client wait ("blocks") for a specified number of seconds until + a connection becomes available. + + Use ``max_connections`` to increase / decrease the pool size:: + + >>> pool = BlockingConnectionPool(max_connections=10) + + Use ``timeout`` to tell it either how many seconds to wait for a connection + to become available, or to block forever: + + >>> # Block forever. + >>> pool = BlockingConnectionPool(timeout=None) + + >>> # Raise a ``ConnectionError`` after five seconds if a connection is + >>> # not available. + >>> pool = BlockingConnectionPool(timeout=5) + """ + + def __init__( + self, + max_connections=50, + timeout=20, + connection_class=Connection, + queue_class=LifoQueue, + **connection_kwargs, + ): + self.queue_class = queue_class + self.timeout = timeout + super().__init__( + connection_class=connection_class, + max_connections=max_connections, + **connection_kwargs, + ) + + def reset(self): + # Create and fill up a thread safe queue with ``None`` values. + self.pool = self.queue_class(self.max_connections) + while True: + try: + self.pool.put_nowait(None) + except Full: + break + + # Keep a list of actual connection instances so that we can + # disconnect them later. + self._connections = [] + + # this must be the last operation in this method. while reset() is + # called when holding _fork_lock, other threads in this process + # can call _checkpid() which compares self.pid and os.getpid() without + # holding any lock (for performance reasons). keeping this assignment + # as the last operation ensures that those other threads will also + # notice a pid difference and block waiting for the first thread to + # release _fork_lock. when each of these threads eventually acquire + # _fork_lock, they will notice that another thread already called + # reset() and they will immediately release _fork_lock and continue on. + self.pid = os.getpid() + + def make_connection(self): + "Make a fresh connection." + connection = self.connection_class(**self.connection_kwargs) + self._connections.append(connection) + return connection + + def get_connection(self, command_name, *keys, **options): + """ + Get a connection, blocking for ``self.timeout`` until a connection + is available from the pool. + + If the connection returned is ``None`` then creates a new connection. + Because we use a last-in first-out queue, the existing connections + (having been returned to the pool after the initial ``None`` values + were added) will be returned before ``None`` values. This means we only + create new connections when we need to, i.e.: the actual number of + connections will only increase in response to demand. + """ + # Make sure we haven't changed process. + self._checkpid() + + # Try and get a connection from the pool. If one isn't available within + # self.timeout then raise a ``ConnectionError``. + connection = None + try: + connection = self.pool.get(block=True, timeout=self.timeout) + except Empty: + # Note that this is not caught by the redis client and will be + # raised unless handled by application code. If you want never to + raise ConnectionError("No connection available.") + + # If the ``connection`` is actually ``None`` then that's a cue to make + # a new connection to add to the pool. + if connection is None: + connection = self.make_connection() + + try: + # ensure this connection is connected to Redis + connection.connect() + # connections that the pool provides should be ready to send + # a command. if not, the connection was either returned to the + # pool before all data has been read or the socket has been + # closed. either way, reconnect and verify everything is good. + try: + if connection.can_read(): + raise ConnectionError("Connection has data") + except (ConnectionError, OSError): + connection.disconnect() + connection.connect() + if connection.can_read(): + raise ConnectionError("Connection not ready") + except BaseException: + # release the connection back to the pool so that we don't leak it + self.release(connection) + raise + + return connection + + def release(self, connection): + "Releases the connection back to the pool." + # Make sure we haven't changed process. + self._checkpid() + if not self.owns_connection(connection): + # pool doesn't own this connection. do not add it back + # to the pool. instead add a None value which is a placeholder + # that will cause the pool to recreate the connection if + # its needed. + connection.disconnect() + self.pool.put_nowait(None) + return + + # Put the connection back into the pool. + try: + self.pool.put_nowait(connection) + except Full: + # perhaps the pool has been reset() after a fork? regardless, + # we don't want this connection + pass + + def disconnect(self): + "Disconnects all connections in the pool." + self._checkpid() + for connection in self._connections: + connection.disconnect() diff --git a/templates/skills/spotify/dependencies/redis/crc.py b/templates/skills/spotify/dependencies/redis/crc.py new file mode 100644 index 00000000..e2612411 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/crc.py @@ -0,0 +1,23 @@ +from binascii import crc_hqx + +from redis.typing import EncodedT + +# Redis Cluster's key space is divided into 16384 slots. +# For more information see: https://github.com/redis/redis/issues/2576 +REDIS_CLUSTER_HASH_SLOTS = 16384 + +__all__ = ["key_slot", "REDIS_CLUSTER_HASH_SLOTS"] + + +def key_slot(key: EncodedT, bucket: int = REDIS_CLUSTER_HASH_SLOTS) -> int: + """Calculate key slot for a given key. + See Keys distribution model in https://redis.io/topics/cluster-spec + :param key - bytes + :param bucket - int + """ + start = key.find(b"{") + if start > -1: + end = key.find(b"}", start + 1) + if end > -1 and end != start + 1: + key = key[start + 1 : end] + return crc_hqx(key, 0) % bucket diff --git a/templates/skills/spotify/dependencies/redis/credentials.py b/templates/skills/spotify/dependencies/redis/credentials.py new file mode 100644 index 00000000..7ba26dcd --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/credentials.py @@ -0,0 +1,26 @@ +from typing import Optional, Tuple, Union + + +class CredentialProvider: + """ + Credentials Provider. + """ + + def get_credentials(self) -> Union[Tuple[str], Tuple[str, str]]: + raise NotImplementedError("get_credentials must be implemented") + + +class UsernamePasswordCredentialProvider(CredentialProvider): + """ + Simple implementation of CredentialProvider that just wraps static + username and password. + """ + + def __init__(self, username: Optional[str] = None, password: Optional[str] = None): + self.username = username or "" + self.password = password or "" + + def get_credentials(self): + if self.username: + return self.username, self.password + return (self.password,) diff --git a/templates/skills/spotify/dependencies/redis/exceptions.py b/templates/skills/spotify/dependencies/redis/exceptions.py new file mode 100644 index 00000000..8af58cb0 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/exceptions.py @@ -0,0 +1,221 @@ +"Core exceptions raised by the Redis client" + + +class RedisError(Exception): + pass + + +class ConnectionError(RedisError): + pass + + +class TimeoutError(RedisError): + pass + + +class AuthenticationError(ConnectionError): + pass + + +class AuthorizationError(ConnectionError): + pass + + +class BusyLoadingError(ConnectionError): + pass + + +class InvalidResponse(RedisError): + pass + + +class ResponseError(RedisError): + pass + + +class DataError(RedisError): + pass + + +class PubSubError(RedisError): + pass + + +class WatchError(RedisError): + pass + + +class NoScriptError(ResponseError): + pass + + +class OutOfMemoryError(ResponseError): + """ + Indicates the database is full. Can only occur when either: + * Redis maxmemory-policy=noeviction + * Redis maxmemory-policy=volatile* and there are no evictable keys + + For more information see `Memory optimization in Redis `_. # noqa + """ + + pass + + +class ExecAbortError(ResponseError): + pass + + +class ReadOnlyError(ResponseError): + pass + + +class NoPermissionError(ResponseError): + pass + + +class ModuleError(ResponseError): + pass + + +class LockError(RedisError, ValueError): + "Errors acquiring or releasing a lock" + # NOTE: For backwards compatibility, this class derives from ValueError. + # This was originally chosen to behave like threading.Lock. + + def __init__(self, message=None, lock_name=None): + self.message = message + self.lock_name = lock_name + + +class LockNotOwnedError(LockError): + "Error trying to extend or release a lock that is (no longer) owned" + pass + + +class ChildDeadlockedError(Exception): + "Error indicating that a child process is deadlocked after a fork()" + pass + + +class AuthenticationWrongNumberOfArgsError(ResponseError): + """ + An error to indicate that the wrong number of args + were sent to the AUTH command + """ + + pass + + +class RedisClusterException(Exception): + """ + Base exception for the RedisCluster client + """ + + pass + + +class ClusterError(RedisError): + """ + Cluster errors occurred multiple times, resulting in an exhaustion of the + command execution TTL + """ + + pass + + +class ClusterDownError(ClusterError, ResponseError): + """ + Error indicated CLUSTERDOWN error received from cluster. + By default Redis Cluster nodes stop accepting queries if they detect there + is at least a hash slot uncovered (no available node is serving it). + This way if the cluster is partially down (for example a range of hash + slots are no longer covered) the entire cluster eventually becomes + unavailable. It automatically returns available as soon as all the slots + are covered again. + """ + + def __init__(self, resp): + self.args = (resp,) + self.message = resp + + +class AskError(ResponseError): + """ + Error indicated ASK error received from cluster. + When a slot is set as MIGRATING, the node will accept all queries that + pertain to this hash slot, but only if the key in question exists, + otherwise the query is forwarded using a -ASK redirection to the node that + is target of the migration. + + src node: MIGRATING to dst node + get > ASK error + ask dst node > ASKING command + dst node: IMPORTING from src node + asking command only affects next command + any op will be allowed after asking command + """ + + def __init__(self, resp): + """should only redirect to master node""" + self.args = (resp,) + self.message = resp + slot_id, new_node = resp.split(" ") + host, port = new_node.rsplit(":", 1) + self.slot_id = int(slot_id) + self.node_addr = self.host, self.port = host, int(port) + + +class TryAgainError(ResponseError): + """ + Error indicated TRYAGAIN error received from cluster. + Operations on keys that don't exist or are - during resharding - split + between the source and destination nodes, will generate a -TRYAGAIN error. + """ + + def __init__(self, *args, **kwargs): + pass + + +class ClusterCrossSlotError(ResponseError): + """ + Error indicated CROSSSLOT error received from cluster. + A CROSSSLOT error is generated when keys in a request don't hash to the + same slot. + """ + + message = "Keys in request don't hash to the same slot" + + +class MovedError(AskError): + """ + Error indicated MOVED error received from cluster. + A request sent to a node that doesn't serve this key will be replayed with + a MOVED error that points to the correct node. + """ + + pass + + +class MasterDownError(ClusterDownError): + """ + Error indicated MASTERDOWN error received from cluster. + Link with MASTER is down and replica-serve-stale-data is set to 'no'. + """ + + pass + + +class SlotNotCoveredError(RedisClusterException): + """ + This error only happens in the case where the connection pool will try to + fetch what node that is covered by a given slot. + + If this error is raised the client should drop the current node layout and + attempt to reconnect and refresh the node layout again + """ + + pass + + +class MaxConnectionsError(ConnectionError): + ... diff --git a/templates/skills/spotify/dependencies/redis/lock.py b/templates/skills/spotify/dependencies/redis/lock.py new file mode 100644 index 00000000..cae7f27e --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/lock.py @@ -0,0 +1,323 @@ +import threading +import time as mod_time +import uuid +from types import SimpleNamespace, TracebackType +from typing import Optional, Type + +from redis.exceptions import LockError, LockNotOwnedError +from redis.typing import Number + + +class Lock: + """ + A shared, distributed Lock. Using Redis for locking allows the Lock + to be shared across processes and/or machines. + + It's left to the user to resolve deadlock issues and make sure + multiple clients play nicely together. + """ + + lua_release = None + lua_extend = None + lua_reacquire = None + + # KEYS[1] - lock name + # ARGV[1] - token + # return 1 if the lock was released, otherwise 0 + LUA_RELEASE_SCRIPT = """ + local token = redis.call('get', KEYS[1]) + if not token or token ~= ARGV[1] then + return 0 + end + redis.call('del', KEYS[1]) + return 1 + """ + + # KEYS[1] - lock name + # ARGV[1] - token + # ARGV[2] - additional milliseconds + # ARGV[3] - "0" if the additional time should be added to the lock's + # existing ttl or "1" if the existing ttl should be replaced + # return 1 if the locks time was extended, otherwise 0 + LUA_EXTEND_SCRIPT = """ + local token = redis.call('get', KEYS[1]) + if not token or token ~= ARGV[1] then + return 0 + end + local expiration = redis.call('pttl', KEYS[1]) + if not expiration then + expiration = 0 + end + if expiration < 0 then + return 0 + end + + local newttl = ARGV[2] + if ARGV[3] == "0" then + newttl = ARGV[2] + expiration + end + redis.call('pexpire', KEYS[1], newttl) + return 1 + """ + + # KEYS[1] - lock name + # ARGV[1] - token + # ARGV[2] - milliseconds + # return 1 if the locks time was reacquired, otherwise 0 + LUA_REACQUIRE_SCRIPT = """ + local token = redis.call('get', KEYS[1]) + if not token or token ~= ARGV[1] then + return 0 + end + redis.call('pexpire', KEYS[1], ARGV[2]) + return 1 + """ + + def __init__( + self, + redis, + name: str, + timeout: Optional[Number] = None, + sleep: Number = 0.1, + blocking: bool = True, + blocking_timeout: Optional[Number] = None, + thread_local: bool = True, + ): + """ + Create a new Lock instance named ``name`` using the Redis client + supplied by ``redis``. + + ``timeout`` indicates a maximum life for the lock in seconds. + By default, it will remain locked until release() is called. + ``timeout`` can be specified as a float or integer, both representing + the number of seconds to wait. + + ``sleep`` indicates the amount of time to sleep in seconds per loop + iteration when the lock is in blocking mode and another client is + currently holding the lock. + + ``blocking`` indicates whether calling ``acquire`` should block until + the lock has been acquired or to fail immediately, causing ``acquire`` + to return False and the lock not being acquired. Defaults to True. + Note this value can be overridden by passing a ``blocking`` + argument to ``acquire``. + + ``blocking_timeout`` indicates the maximum amount of time in seconds to + spend trying to acquire the lock. A value of ``None`` indicates + continue trying forever. ``blocking_timeout`` can be specified as a + float or integer, both representing the number of seconds to wait. + + ``thread_local`` indicates whether the lock token is placed in + thread-local storage. By default, the token is placed in thread local + storage so that a thread only sees its token, not a token set by + another thread. Consider the following timeline: + + time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds. + thread-1 sets the token to "abc" + time: 1, thread-2 blocks trying to acquire `my-lock` using the + Lock instance. + time: 5, thread-1 has not yet completed. redis expires the lock + key. + time: 5, thread-2 acquired `my-lock` now that it's available. + thread-2 sets the token to "xyz" + time: 6, thread-1 finishes its work and calls release(). if the + token is *not* stored in thread local storage, then + thread-1 would see the token value as "xyz" and would be + able to successfully release the thread-2's lock. + + In some use cases it's necessary to disable thread local storage. For + example, if you have code where one thread acquires a lock and passes + that lock instance to a worker thread to release later. If thread + local storage isn't disabled in this case, the worker thread won't see + the token set by the thread that acquired the lock. Our assumption + is that these cases aren't common and as such default to using + thread local storage. + """ + self.redis = redis + self.name = name + self.timeout = timeout + self.sleep = sleep + self.blocking = blocking + self.blocking_timeout = blocking_timeout + self.thread_local = bool(thread_local) + self.local = threading.local() if self.thread_local else SimpleNamespace() + self.local.token = None + self.register_scripts() + + def register_scripts(self) -> None: + cls = self.__class__ + client = self.redis + if cls.lua_release is None: + cls.lua_release = client.register_script(cls.LUA_RELEASE_SCRIPT) + if cls.lua_extend is None: + cls.lua_extend = client.register_script(cls.LUA_EXTEND_SCRIPT) + if cls.lua_reacquire is None: + cls.lua_reacquire = client.register_script(cls.LUA_REACQUIRE_SCRIPT) + + def __enter__(self) -> "Lock": + if self.acquire(): + return self + raise LockError( + "Unable to acquire lock within the time specified", + lock_name=self.name, + ) + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + self.release() + + def acquire( + self, + sleep: Optional[Number] = None, + blocking: Optional[bool] = None, + blocking_timeout: Optional[Number] = None, + token: Optional[str] = None, + ): + """ + Use Redis to hold a shared, distributed lock named ``name``. + Returns True once the lock is acquired. + + If ``blocking`` is False, always return immediately. If the lock + was acquired, return True, otherwise return False. + + ``blocking_timeout`` specifies the maximum number of seconds to + wait trying to acquire the lock. + + ``token`` specifies the token value to be used. If provided, token + must be a bytes object or a string that can be encoded to a bytes + object with the default encoding. If a token isn't specified, a UUID + will be generated. + """ + if sleep is None: + sleep = self.sleep + if token is None: + token = uuid.uuid1().hex.encode() + else: + encoder = self.redis.get_encoder() + token = encoder.encode(token) + if blocking is None: + blocking = self.blocking + if blocking_timeout is None: + blocking_timeout = self.blocking_timeout + stop_trying_at = None + if blocking_timeout is not None: + stop_trying_at = mod_time.monotonic() + blocking_timeout + while True: + if self.do_acquire(token): + self.local.token = token + return True + if not blocking: + return False + next_try_at = mod_time.monotonic() + sleep + if stop_trying_at is not None and next_try_at > stop_trying_at: + return False + mod_time.sleep(sleep) + + def do_acquire(self, token: str) -> bool: + if self.timeout: + # convert to milliseconds + timeout = int(self.timeout * 1000) + else: + timeout = None + if self.redis.set(self.name, token, nx=True, px=timeout): + return True + return False + + def locked(self) -> bool: + """ + Returns True if this key is locked by any process, otherwise False. + """ + return self.redis.get(self.name) is not None + + def owned(self) -> bool: + """ + Returns True if this key is locked by this lock, otherwise False. + """ + stored_token = self.redis.get(self.name) + # need to always compare bytes to bytes + # TODO: this can be simplified when the context manager is finished + if stored_token and not isinstance(stored_token, bytes): + encoder = self.redis.get_encoder() + stored_token = encoder.encode(stored_token) + return self.local.token is not None and stored_token == self.local.token + + def release(self) -> None: + """ + Releases the already acquired lock + """ + expected_token = self.local.token + if expected_token is None: + raise LockError("Cannot release an unlocked lock", lock_name=self.name) + self.local.token = None + self.do_release(expected_token) + + def do_release(self, expected_token: str) -> None: + if not bool( + self.lua_release(keys=[self.name], args=[expected_token], client=self.redis) + ): + raise LockNotOwnedError( + "Cannot release a lock that's no longer owned", + lock_name=self.name, + ) + + def extend(self, additional_time: int, replace_ttl: bool = False) -> bool: + """ + Adds more time to an already acquired lock. + + ``additional_time`` can be specified as an integer or a float, both + representing the number of seconds to add. + + ``replace_ttl`` if False (the default), add `additional_time` to + the lock's existing ttl. If True, replace the lock's ttl with + `additional_time`. + """ + if self.local.token is None: + raise LockError("Cannot extend an unlocked lock", lock_name=self.name) + if self.timeout is None: + raise LockError("Cannot extend a lock with no timeout", lock_name=self.name) + return self.do_extend(additional_time, replace_ttl) + + def do_extend(self, additional_time: int, replace_ttl: bool) -> bool: + additional_time = int(additional_time * 1000) + if not bool( + self.lua_extend( + keys=[self.name], + args=[self.local.token, additional_time, "1" if replace_ttl else "0"], + client=self.redis, + ) + ): + raise LockNotOwnedError( + "Cannot extend a lock that's no longer owned", + lock_name=self.name, + ) + return True + + def reacquire(self) -> bool: + """ + Resets a TTL of an already acquired lock back to a timeout value. + """ + if self.local.token is None: + raise LockError("Cannot reacquire an unlocked lock", lock_name=self.name) + if self.timeout is None: + raise LockError( + "Cannot reacquire a lock with no timeout", + lock_name=self.name, + ) + return self.do_reacquire() + + def do_reacquire(self) -> bool: + timeout = int(self.timeout * 1000) + if not bool( + self.lua_reacquire( + keys=[self.name], args=[self.local.token, timeout], client=self.redis + ) + ): + raise LockNotOwnedError( + "Cannot reacquire a lock that's no longer owned", + lock_name=self.name, + ) + return True diff --git a/templates/skills/spotify/dependencies/redis/ocsp.py b/templates/skills/spotify/dependencies/redis/ocsp.py new file mode 100644 index 00000000..8819848f --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/ocsp.py @@ -0,0 +1,307 @@ +import base64 +import datetime +import ssl +from urllib.parse import urljoin, urlparse + +import cryptography.hazmat.primitives.hashes +import requests +from cryptography import hazmat, x509 +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat import backends +from cryptography.hazmat.primitives.asymmetric.dsa import DSAPublicKey +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA, EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15 +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey +from cryptography.hazmat.primitives.hashes import SHA1, Hash +from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat +from cryptography.x509 import ocsp +from redis.exceptions import AuthorizationError, ConnectionError + + +def _verify_response(issuer_cert, ocsp_response): + pubkey = issuer_cert.public_key() + try: + if isinstance(pubkey, RSAPublicKey): + pubkey.verify( + ocsp_response.signature, + ocsp_response.tbs_response_bytes, + PKCS1v15(), + ocsp_response.signature_hash_algorithm, + ) + elif isinstance(pubkey, DSAPublicKey): + pubkey.verify( + ocsp_response.signature, + ocsp_response.tbs_response_bytes, + ocsp_response.signature_hash_algorithm, + ) + elif isinstance(pubkey, EllipticCurvePublicKey): + pubkey.verify( + ocsp_response.signature, + ocsp_response.tbs_response_bytes, + ECDSA(ocsp_response.signature_hash_algorithm), + ) + else: + pubkey.verify(ocsp_response.signature, ocsp_response.tbs_response_bytes) + except InvalidSignature: + raise ConnectionError("failed to valid ocsp response") + + +def _check_certificate(issuer_cert, ocsp_bytes, validate=True): + """A wrapper the return the validity of a known ocsp certificate""" + + ocsp_response = ocsp.load_der_ocsp_response(ocsp_bytes) + + if ocsp_response.response_status == ocsp.OCSPResponseStatus.UNAUTHORIZED: + raise AuthorizationError("you are not authorized to view this ocsp certificate") + if ocsp_response.response_status == ocsp.OCSPResponseStatus.SUCCESSFUL: + if ocsp_response.certificate_status != ocsp.OCSPCertStatus.GOOD: + raise ConnectionError( + f'Received an {str(ocsp_response.certificate_status).split(".")[1]} ' + "ocsp certificate status" + ) + else: + raise ConnectionError( + "failed to retrieve a successful response from the ocsp responder" + ) + + if ocsp_response.this_update >= datetime.datetime.now(): + raise ConnectionError("ocsp certificate was issued in the future") + + if ( + ocsp_response.next_update + and ocsp_response.next_update < datetime.datetime.now() + ): + raise ConnectionError("ocsp certificate has invalid update - in the past") + + responder_name = ocsp_response.responder_name + issuer_hash = ocsp_response.issuer_key_hash + responder_hash = ocsp_response.responder_key_hash + + cert_to_validate = issuer_cert + if ( + responder_name is not None + and responder_name == issuer_cert.subject + or responder_hash == issuer_hash + ): + cert_to_validate = issuer_cert + else: + certs = ocsp_response.certificates + responder_certs = _get_certificates( + certs, issuer_cert, responder_name, responder_hash + ) + + try: + responder_cert = responder_certs[0] + except IndexError: + raise ConnectionError("no certificates found for the responder") + + ext = responder_cert.extensions.get_extension_for_class(x509.ExtendedKeyUsage) + if ext is None or x509.oid.ExtendedKeyUsageOID.OCSP_SIGNING not in ext.value: + raise ConnectionError("delegate not autorized for ocsp signing") + cert_to_validate = responder_cert + + if validate: + _verify_response(cert_to_validate, ocsp_response) + return True + + +def _get_certificates(certs, issuer_cert, responder_name, responder_hash): + if responder_name is None: + certificates = [ + c + for c in certs + if _get_pubkey_hash(c) == responder_hash and c.issuer == issuer_cert.subject + ] + else: + certificates = [ + c + for c in certs + if c.subject == responder_name and c.issuer == issuer_cert.subject + ] + + return certificates + + +def _get_pubkey_hash(certificate): + pubkey = certificate.public_key() + + # https://stackoverflow.com/a/46309453/600498 + if isinstance(pubkey, RSAPublicKey): + h = pubkey.public_bytes(Encoding.DER, PublicFormat.PKCS1) + elif isinstance(pubkey, EllipticCurvePublicKey): + h = pubkey.public_bytes(Encoding.X962, PublicFormat.UncompressedPoint) + else: + h = pubkey.public_bytes(Encoding.DER, PublicFormat.SubjectPublicKeyInfo) + + sha1 = Hash(SHA1(), backend=backends.default_backend()) + sha1.update(h) + return sha1.finalize() + + +def ocsp_staple_verifier(con, ocsp_bytes, expected=None): + """An implementation of a function for set_ocsp_client_callback in PyOpenSSL. + + This function validates that the provide ocsp_bytes response is valid, + and matches the expected, stapled responses. + """ + if ocsp_bytes in [b"", None]: + raise ConnectionError("no ocsp response present") + + issuer_cert = None + peer_cert = con.get_peer_certificate().to_cryptography() + for c in con.get_peer_cert_chain(): + cert = c.to_cryptography() + if cert.subject == peer_cert.issuer: + issuer_cert = cert + break + + if issuer_cert is None: + raise ConnectionError("no matching issuer cert found in certificate chain") + + if expected is not None: + e = x509.load_pem_x509_certificate(expected) + if peer_cert != e: + raise ConnectionError("received and expected certificates do not match") + + return _check_certificate(issuer_cert, ocsp_bytes) + + +class OCSPVerifier: + """A class to verify ssl sockets for RFC6960/RFC6961. This can be used + when using direct validation of OCSP responses and certificate revocations. + + @see https://datatracker.ietf.org/doc/html/rfc6960 + @see https://datatracker.ietf.org/doc/html/rfc6961 + """ + + def __init__(self, sock, host, port, ca_certs=None): + self.SOCK = sock + self.HOST = host + self.PORT = port + self.CA_CERTS = ca_certs + + def _bin2ascii(self, der): + """Convert SSL certificates in a binary (DER) format to ASCII PEM.""" + + pem = ssl.DER_cert_to_PEM_cert(der) + cert = x509.load_pem_x509_certificate(pem.encode(), backends.default_backend()) + return cert + + def components_from_socket(self): + """This function returns the certificate, primary issuer, and primary ocsp + server in the chain for a socket already wrapped with ssl. + """ + + # convert the binary certifcate to text + der = self.SOCK.getpeercert(True) + if der is False: + raise ConnectionError("no certificate found for ssl peer") + cert = self._bin2ascii(der) + return self._certificate_components(cert) + + def _certificate_components(self, cert): + """Given an SSL certificate, retract the useful components for + validating the certificate status with an OCSP server. + + Args: + cert ([bytes]): A PEM encoded ssl certificate + """ + + try: + aia = cert.extensions.get_extension_for_oid( + x509.oid.ExtensionOID.AUTHORITY_INFORMATION_ACCESS + ).value + except cryptography.x509.extensions.ExtensionNotFound: + raise ConnectionError("No AIA information present in ssl certificate") + + # fetch certificate issuers + issuers = [ + i + for i in aia + if i.access_method == x509.oid.AuthorityInformationAccessOID.CA_ISSUERS + ] + try: + issuer = issuers[0].access_location.value + except IndexError: + issuer = None + + # now, the series of ocsp server entries + ocsps = [ + i + for i in aia + if i.access_method == x509.oid.AuthorityInformationAccessOID.OCSP + ] + + try: + ocsp = ocsps[0].access_location.value + except IndexError: + raise ConnectionError("no ocsp servers in certificate") + + return cert, issuer, ocsp + + def components_from_direct_connection(self): + """Return the certificate, primary issuer, and primary ocsp server + from the host defined by the socket. This is useful in cases where + different certificates are occasionally presented. + """ + + pem = ssl.get_server_certificate((self.HOST, self.PORT), ca_certs=self.CA_CERTS) + cert = x509.load_pem_x509_certificate(pem.encode(), backends.default_backend()) + return self._certificate_components(cert) + + def build_certificate_url(self, server, cert, issuer_cert): + """Return the complete url to the ocsp""" + orb = ocsp.OCSPRequestBuilder() + + # add_certificate returns an initialized OCSPRequestBuilder + orb = orb.add_certificate( + cert, issuer_cert, cryptography.hazmat.primitives.hashes.SHA256() + ) + request = orb.build() + + path = base64.b64encode( + request.public_bytes(hazmat.primitives.serialization.Encoding.DER) + ) + url = urljoin(server, path.decode("ascii")) + return url + + def check_certificate(self, server, cert, issuer_url): + """Checks the validity of an ocsp server for an issuer""" + + r = requests.get(issuer_url) + if not r.ok: + raise ConnectionError("failed to fetch issuer certificate") + der = r.content + issuer_cert = self._bin2ascii(der) + + ocsp_url = self.build_certificate_url(server, cert, issuer_cert) + + # HTTP 1.1 mandates the addition of the Host header in ocsp responses + header = { + "Host": urlparse(ocsp_url).netloc, + "Content-Type": "application/ocsp-request", + } + r = requests.get(ocsp_url, headers=header) + if not r.ok: + raise ConnectionError("failed to fetch ocsp certificate") + return _check_certificate(issuer_cert, r.content, True) + + def is_valid(self): + """Returns the validity of the certificate wrapping our socket. + This first retrieves for validate the certificate, issuer_url, + and ocsp_server for certificate validate. Then retrieves the + issuer certificate from the issuer_url, and finally checks + the validity of OCSP revocation status. + """ + + # validate the certificate + try: + cert, issuer_url, ocsp_server = self.components_from_socket() + if issuer_url is None: + raise ConnectionError("no issuers found in certificate chain") + return self.check_certificate(ocsp_server, cert, issuer_url) + except AuthorizationError: + cert, issuer_url, ocsp_server = self.components_from_direct_connection() + if issuer_url is None: + raise ConnectionError("no issuers found in certificate chain") + return self.check_certificate(ocsp_server, cert, issuer_url) diff --git a/templates/skills/spotify/dependencies/redis/py.typed b/templates/skills/spotify/dependencies/redis/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/templates/skills/spotify/dependencies/redis/retry.py b/templates/skills/spotify/dependencies/redis/retry.py new file mode 100644 index 00000000..60644305 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/retry.py @@ -0,0 +1,54 @@ +import socket +from time import sleep + +from redis.exceptions import ConnectionError, TimeoutError + + +class Retry: + """Retry a specific number of times after a failure""" + + def __init__( + self, + backoff, + retries, + supported_errors=(ConnectionError, TimeoutError, socket.timeout), + ): + """ + Initialize a `Retry` object with a `Backoff` object + that retries a maximum of `retries` times. + `retries` can be negative to retry forever. + You can specify the types of supported errors which trigger + a retry with the `supported_errors` parameter. + """ + self._backoff = backoff + self._retries = retries + self._supported_errors = supported_errors + + def update_supported_errors(self, specified_errors: list): + """ + Updates the supported errors with the specified error types + """ + self._supported_errors = tuple( + set(self._supported_errors + tuple(specified_errors)) + ) + + def call_with_retry(self, do, fail): + """ + Execute an operation that might fail and returns its result, or + raise the exception that was thrown depending on the `Backoff` object. + `do`: the operation to call. Expects no argument. + `fail`: the failure handler, expects the last error that was thrown + """ + self._backoff.reset() + failures = 0 + while True: + try: + return do() + except self._supported_errors as error: + failures += 1 + fail(error) + if self._retries >= 0 and failures > self._retries: + raise error + backoff = self._backoff.compute(failures) + if backoff > 0: + sleep(backoff) diff --git a/templates/skills/spotify/dependencies/redis/sentinel.py b/templates/skills/spotify/dependencies/redis/sentinel.py new file mode 100644 index 00000000..41f308d1 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/sentinel.py @@ -0,0 +1,389 @@ +import random +import weakref +from typing import Optional + +from redis.client import Redis +from redis.commands import SentinelCommands +from redis.connection import Connection, ConnectionPool, SSLConnection +from redis.exceptions import ConnectionError, ReadOnlyError, ResponseError, TimeoutError +from redis.utils import str_if_bytes + + +class MasterNotFoundError(ConnectionError): + pass + + +class SlaveNotFoundError(ConnectionError): + pass + + +class SentinelManagedConnection(Connection): + def __init__(self, **kwargs): + self.connection_pool = kwargs.pop("connection_pool") + super().__init__(**kwargs) + + def __repr__(self): + pool = self.connection_pool + s = f"{type(self).__name__}" + if self.host: + host_info = f",host={self.host},port={self.port}" + s = s % host_info + return s + + def connect_to(self, address): + self.host, self.port = address + super().connect() + if self.connection_pool.check_connection: + self.send_command("PING") + if str_if_bytes(self.read_response()) != "PONG": + raise ConnectionError("PING failed") + + def _connect_retry(self): + if self._sock: + return # already connected + if self.connection_pool.is_master: + self.connect_to(self.connection_pool.get_master_address()) + else: + for slave in self.connection_pool.rotate_slaves(): + try: + return self.connect_to(slave) + except ConnectionError: + continue + raise SlaveNotFoundError # Never be here + + def connect(self): + return self.retry.call_with_retry(self._connect_retry, lambda error: None) + + def read_response( + self, + disable_decoding=False, + *, + disconnect_on_error: Optional[bool] = False, + push_request: Optional[bool] = False, + ): + try: + return super().read_response( + disable_decoding=disable_decoding, + disconnect_on_error=disconnect_on_error, + push_request=push_request, + ) + except ReadOnlyError: + if self.connection_pool.is_master: + # When talking to a master, a ReadOnlyError when likely + # indicates that the previous master that we're still connected + # to has been demoted to a slave and there's a new master. + # calling disconnect will force the connection to re-query + # sentinel during the next connect() attempt. + self.disconnect() + raise ConnectionError("The previous master is now a slave") + raise + + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): + pass + + +class SentinelConnectionPoolProxy: + def __init__( + self, + connection_pool, + is_master, + check_connection, + service_name, + sentinel_manager, + ): + self.connection_pool_ref = weakref.ref(connection_pool) + self.is_master = is_master + self.check_connection = check_connection + self.service_name = service_name + self.sentinel_manager = sentinel_manager + self.reset() + + def reset(self): + self.master_address = None + self.slave_rr_counter = None + + def get_master_address(self): + master_address = self.sentinel_manager.discover_master(self.service_name) + if self.is_master and self.master_address != master_address: + self.master_address = master_address + # disconnect any idle connections so that they reconnect + # to the new master the next time that they are used. + connection_pool = self.connection_pool_ref() + if connection_pool is not None: + connection_pool.disconnect(inuse_connections=False) + return master_address + + def rotate_slaves(self): + slaves = self.sentinel_manager.discover_slaves(self.service_name) + if slaves: + if self.slave_rr_counter is None: + self.slave_rr_counter = random.randint(0, len(slaves) - 1) + for _ in range(len(slaves)): + self.slave_rr_counter = (self.slave_rr_counter + 1) % len(slaves) + slave = slaves[self.slave_rr_counter] + yield slave + # Fallback to the master connection + try: + yield self.get_master_address() + except MasterNotFoundError: + pass + raise SlaveNotFoundError(f"No slave found for {self.service_name!r}") + + +class SentinelConnectionPool(ConnectionPool): + """ + Sentinel backed connection pool. + + If ``check_connection`` flag is set to True, SentinelManagedConnection + sends a PING command right after establishing the connection. + """ + + def __init__(self, service_name, sentinel_manager, **kwargs): + kwargs["connection_class"] = kwargs.get( + "connection_class", + SentinelManagedSSLConnection + if kwargs.pop("ssl", False) + else SentinelManagedConnection, + ) + self.is_master = kwargs.pop("is_master", True) + self.check_connection = kwargs.pop("check_connection", False) + self.proxy = SentinelConnectionPoolProxy( + connection_pool=self, + is_master=self.is_master, + check_connection=self.check_connection, + service_name=service_name, + sentinel_manager=sentinel_manager, + ) + super().__init__(**kwargs) + self.connection_kwargs["connection_pool"] = self.proxy + self.service_name = service_name + self.sentinel_manager = sentinel_manager + + def __repr__(self): + role = "master" if self.is_master else "slave" + return f"{type(self).__name__}>> from redis.sentinel import Sentinel + >>> sentinel = Sentinel([('localhost', 26379)], socket_timeout=0.1) + >>> master = sentinel.master_for('mymaster', socket_timeout=0.1) + >>> master.set('foo', 'bar') + >>> slave = sentinel.slave_for('mymaster', socket_timeout=0.1) + >>> slave.get('foo') + b'bar' + + ``sentinels`` is a list of sentinel nodes. Each node is represented by + a pair (hostname, port). + + ``min_other_sentinels`` defined a minimum number of peers for a sentinel. + When querying a sentinel, if it doesn't meet this threshold, responses + from that sentinel won't be considered valid. + + ``sentinel_kwargs`` is a dictionary of connection arguments used when + connecting to sentinel instances. Any argument that can be passed to + a normal Redis connection can be specified here. If ``sentinel_kwargs`` is + not specified, any socket_timeout and socket_keepalive options specified + in ``connection_kwargs`` will be used. + + ``connection_kwargs`` are keyword arguments that will be used when + establishing a connection to a Redis server. + """ + + def __init__( + self, + sentinels, + min_other_sentinels=0, + sentinel_kwargs=None, + **connection_kwargs, + ): + # if sentinel_kwargs isn't defined, use the socket_* options from + # connection_kwargs + if sentinel_kwargs is None: + sentinel_kwargs = { + k: v for k, v in connection_kwargs.items() if k.startswith("socket_") + } + self.sentinel_kwargs = sentinel_kwargs + + self.sentinels = [ + Redis(hostname, port, **self.sentinel_kwargs) + for hostname, port in sentinels + ] + self.min_other_sentinels = min_other_sentinels + self.connection_kwargs = connection_kwargs + + def execute_command(self, *args, **kwargs): + """ + Execute Sentinel command in sentinel nodes. + once - If set to True, then execute the resulting command on a single + node at random, rather than across the entire sentinel cluster. + """ + once = bool(kwargs.get("once", False)) + if "once" in kwargs.keys(): + kwargs.pop("once") + + if once: + random.choice(self.sentinels).execute_command(*args, **kwargs) + else: + for sentinel in self.sentinels: + sentinel.execute_command(*args, **kwargs) + return True + + def __repr__(self): + sentinel_addresses = [] + for sentinel in self.sentinels: + sentinel_addresses.append( + "{host}:{port}".format_map(sentinel.connection_pool.connection_kwargs) + ) + return f'{type(self).__name__}' + + def check_master_state(self, state, service_name): + if not state["is_master"] or state["is_sdown"] or state["is_odown"]: + return False + # Check if our sentinel doesn't see other nodes + if state["num-other-sentinels"] < self.min_other_sentinels: + return False + return True + + def discover_master(self, service_name): + """ + Asks sentinel servers for the Redis master's address corresponding + to the service labeled ``service_name``. + + Returns a pair (address, port) or raises MasterNotFoundError if no + master is found. + """ + collected_errors = list() + for sentinel_no, sentinel in enumerate(self.sentinels): + try: + masters = sentinel.sentinel_masters() + except (ConnectionError, TimeoutError) as e: + collected_errors.append(f"{sentinel} - {e!r}") + continue + state = masters.get(service_name) + if state and self.check_master_state(state, service_name): + # Put this sentinel at the top of the list + self.sentinels[0], self.sentinels[sentinel_no] = ( + sentinel, + self.sentinels[0], + ) + return state["ip"], state["port"] + + error_info = "" + if len(collected_errors) > 0: + error_info = f" : {', '.join(collected_errors)}" + raise MasterNotFoundError(f"No master found for {service_name!r}{error_info}") + + def filter_slaves(self, slaves): + "Remove slaves that are in an ODOWN or SDOWN state" + slaves_alive = [] + for slave in slaves: + if slave["is_odown"] or slave["is_sdown"]: + continue + slaves_alive.append((slave["ip"], slave["port"])) + return slaves_alive + + def discover_slaves(self, service_name): + "Returns a list of alive slaves for service ``service_name``" + for sentinel in self.sentinels: + try: + slaves = sentinel.sentinel_slaves(service_name) + except (ConnectionError, ResponseError, TimeoutError): + continue + slaves = self.filter_slaves(slaves) + if slaves: + return slaves + return [] + + def master_for( + self, + service_name, + redis_class=Redis, + connection_pool_class=SentinelConnectionPool, + **kwargs, + ): + """ + Returns a redis client instance for the ``service_name`` master. + + A :py:class:`~redis.sentinel.SentinelConnectionPool` class is + used to retrieve the master's address before establishing a new + connection. + + NOTE: If the master's address has changed, any cached connections to + the old master are closed. + + By default clients will be a :py:class:`~redis.Redis` instance. + Specify a different class to the ``redis_class`` argument if you + desire something different. + + The ``connection_pool_class`` specifies the connection pool to + use. The :py:class:`~redis.sentinel.SentinelConnectionPool` + will be used by default. + + All other keyword arguments are merged with any connection_kwargs + passed to this class and passed to the connection pool as keyword + arguments to be used to initialize Redis connections. + """ + kwargs["is_master"] = True + connection_kwargs = dict(self.connection_kwargs) + connection_kwargs.update(kwargs) + return redis_class.from_pool( + connection_pool_class(service_name, self, **connection_kwargs) + ) + + def slave_for( + self, + service_name, + redis_class=Redis, + connection_pool_class=SentinelConnectionPool, + **kwargs, + ): + """ + Returns redis client instance for the ``service_name`` slave(s). + + A SentinelConnectionPool class is used to retrieve the slave's + address before establishing a new connection. + + By default clients will be a :py:class:`~redis.Redis` instance. + Specify a different class to the ``redis_class`` argument if you + desire something different. + + The ``connection_pool_class`` specifies the connection pool to use. + The SentinelConnectionPool will be used by default. + + All other keyword arguments are merged with any connection_kwargs + passed to this class and passed to the connection pool as keyword + arguments to be used to initialize Redis connections. + """ + kwargs["is_master"] = False + connection_kwargs = dict(self.connection_kwargs) + connection_kwargs.update(kwargs) + return redis_class.from_pool( + connection_pool_class(service_name, self, **connection_kwargs) + ) diff --git a/templates/skills/spotify/dependencies/redis/typing.py b/templates/skills/spotify/dependencies/redis/typing.py new file mode 100644 index 00000000..56a1e99b --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/typing.py @@ -0,0 +1,65 @@ +# from __future__ import annotations + +from datetime import datetime, timedelta +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Iterable, + Mapping, + Type, + TypeVar, + Union, +) + +from redis.compat import Protocol + +if TYPE_CHECKING: + from redis._parsers import Encoder + from redis.asyncio.connection import ConnectionPool as AsyncConnectionPool + from redis.connection import ConnectionPool + + +Number = Union[int, float] +EncodedT = Union[bytes, memoryview] +DecodedT = Union[str, int, float] +EncodableT = Union[EncodedT, DecodedT] +AbsExpiryT = Union[int, datetime] +ExpiryT = Union[int, timedelta] +ZScoreBoundT = Union[float, str] # str allows for the [ or ( prefix +BitfieldOffsetT = Union[int, str] # str allows for #x syntax +_StringLikeT = Union[bytes, str, memoryview] +KeyT = _StringLikeT # Main redis key space +PatternT = _StringLikeT # Patterns matched against keys, fields etc +FieldT = EncodableT # Fields within hash tables, streams and geo commands +KeysT = Union[KeyT, Iterable[KeyT]] +ChannelT = _StringLikeT +GroupT = _StringLikeT # Consumer group +ConsumerT = _StringLikeT # Consumer name +StreamIdT = Union[int, _StringLikeT] +ScriptTextT = _StringLikeT +TimeoutSecT = Union[int, float, _StringLikeT] +# Mapping is not covariant in the key type, which prevents +# Mapping[_StringLikeT, X] from accepting arguments of type Dict[str, X]. Using +# a TypeVar instead of a Union allows mappings with any of the permitted types +# to be passed. Care is needed if there is more than one such mapping in a +# type signature because they will all be required to be the same key type. +AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview) +AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview) +AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview) + +ExceptionMappingT = Mapping[str, Union[Type[Exception], Mapping[str, Type[Exception]]]] + + +class CommandsProtocol(Protocol): + connection_pool: Union["AsyncConnectionPool", "ConnectionPool"] + + def execute_command(self, *args, **options): + ... + + +class ClusterCommandsProtocol(CommandsProtocol, Protocol): + encoder: "Encoder" + + def execute_command(self, *args, **options) -> Union[Any, Awaitable]: + ... diff --git a/templates/skills/spotify/dependencies/redis/utils.py b/templates/skills/spotify/dependencies/redis/utils.py new file mode 100644 index 00000000..01fdfed7 --- /dev/null +++ b/templates/skills/spotify/dependencies/redis/utils.py @@ -0,0 +1,147 @@ +import logging +import sys +from contextlib import contextmanager +from functools import wraps +from typing import Any, Dict, Mapping, Union + +try: + import hiredis # noqa + + # Only support Hiredis >= 1.0: + HIREDIS_AVAILABLE = not hiredis.__version__.startswith("0.") + HIREDIS_PACK_AVAILABLE = hasattr(hiredis, "pack_command") +except ImportError: + HIREDIS_AVAILABLE = False + HIREDIS_PACK_AVAILABLE = False + +try: + import ssl # noqa + + SSL_AVAILABLE = True +except ImportError: + SSL_AVAILABLE = False + +try: + import cryptography # noqa + + CRYPTOGRAPHY_AVAILABLE = True +except ImportError: + CRYPTOGRAPHY_AVAILABLE = False + +if sys.version_info >= (3, 8): + from importlib import metadata +else: + import importlib_metadata as metadata + + +def from_url(url, **kwargs): + """ + Returns an active Redis client generated from the given database URL. + + Will attempt to extract the database id from the path url fragment, if + none is provided. + """ + from redis.client import Redis + + return Redis.from_url(url, **kwargs) + + +@contextmanager +def pipeline(redis_obj): + p = redis_obj.pipeline() + yield p + p.execute() + + +def str_if_bytes(value: Union[str, bytes]) -> str: + return ( + value.decode("utf-8", errors="replace") if isinstance(value, bytes) else value + ) + + +def safe_str(value): + return str(str_if_bytes(value)) + + +def dict_merge(*dicts: Mapping[str, Any]) -> Dict[str, Any]: + """ + Merge all provided dicts into 1 dict. + *dicts : `dict` + dictionaries to merge + """ + merged = {} + + for d in dicts: + merged.update(d) + + return merged + + +def list_keys_to_dict(key_list, callback): + return dict.fromkeys(key_list, callback) + + +def merge_result(command, res): + """ + Merge all items in `res` into a list. + + This command is used when sending a command to multiple nodes + and the result from each node should be merged into a single list. + + res : 'dict' + """ + result = set() + + for v in res.values(): + for value in v: + result.add(value) + + return list(result) + + +def warn_deprecated(name, reason="", version="", stacklevel=2): + import warnings + + msg = f"Call to deprecated {name}." + if reason: + msg += f" ({reason})" + if version: + msg += f" -- Deprecated since version {version}." + warnings.warn(msg, category=DeprecationWarning, stacklevel=stacklevel) + + +def deprecated_function(reason="", version="", name=None): + """ + Decorator to mark a function as deprecated. + """ + + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + warn_deprecated(name or func.__name__, reason, version, stacklevel=3) + return func(*args, **kwargs) + + return wrapper + + return decorator + + +def _set_info_logger(): + """ + Set up a logger that log info logs to stdout. + (This is used by the default push response handler) + """ + if "push_response" not in logging.root.manager.loggerDict.keys(): + logger = logging.getLogger("push_response") + logger.setLevel(logging.INFO) + handler = logging.StreamHandler() + handler.setLevel(logging.INFO) + logger.addHandler(handler) + + +def get_lib_version(): + try: + libver = metadata.version("redis") + except metadata.PackageNotFoundError: + libver = "99.99.99" + return libver diff --git a/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/INSTALLER b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/LICENSE b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/LICENSE new file mode 100644 index 00000000..67db8588 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/METADATA b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/METADATA new file mode 100644 index 00000000..05779fa2 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/METADATA @@ -0,0 +1,122 @@ +Metadata-Version: 2.1 +Name: requests +Version: 2.31.0 +Summary: Python HTTP for Humans. +Home-page: https://requests.readthedocs.io +Author: Kenneth Reitz +Author-email: me@kennethreitz.org +License: Apache 2.0 +Project-URL: Documentation, https://requests.readthedocs.io +Project-URL: Source, https://github.com/psf/requests +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.7 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: charset-normalizer (<4,>=2) +Requires-Dist: idna (<4,>=2.5) +Requires-Dist: urllib3 (<3,>=1.21.1) +Requires-Dist: certifi (>=2017.4.17) +Provides-Extra: security +Provides-Extra: socks +Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks' +Provides-Extra: use_chardet_on_py3 +Requires-Dist: chardet (<6,>=3.0.2) ; extra == 'use_chardet_on_py3' + +# Requests + +**Requests** is a simple, yet elegant, HTTP library. + +```python +>>> import requests +>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass')) +>>> r.status_code +200 +>>> r.headers['content-type'] +'application/json; charset=utf8' +>>> r.encoding +'utf-8' +>>> r.text +'{"authenticated": true, ...' +>>> r.json() +{'authenticated': True, ...} +``` + +Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method! + +Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code. + +[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests) +[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests) +[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors) + +## Installing Requests and Supported Versions + +Requests is available on PyPI: + +```console +$ python -m pip install requests +``` + +Requests officially supports Python 3.7+. + +## Supported Features & Best–Practices + +Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today. + +- Keep-Alive & Connection Pooling +- International Domains and URLs +- Sessions with Cookie Persistence +- Browser-style TLS/SSL Verification +- Basic & Digest Authentication +- Familiar `dict`–like Cookies +- Automatic Content Decompression and Decoding +- Multi-part File Uploads +- SOCKS Proxy Support +- Connection Timeouts +- Streaming Downloads +- Automatic honoring of `.netrc` +- Chunked HTTP Requests + +## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io) + +[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io) + +## Cloning the repository + +When cloning the Requests repository, you may need to add the `-c +fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see +[this issue](https://github.com/psf/requests/issues/2690) for more background): + +```shell +git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git +``` + +You can also apply this setting to your global Git config: + +```shell +git config --global fetch.fsck.badTimezone ignore +``` + +--- + +[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf) + + diff --git a/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/RECORD b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/RECORD new file mode 100644 index 00000000..40a0bcc7 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/RECORD @@ -0,0 +1,42 @@ +requests-2.31.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +requests-2.31.0.dist-info/LICENSE,sha256=CeipvOyAZxBGUsFoaFqwkx54aPnIKEtm9a5u2uXxEws,10142 +requests-2.31.0.dist-info/METADATA,sha256=eCPokOnbb0FROLrfl0R5EpDvdufsb9CaN4noJH__54I,4634 +requests-2.31.0.dist-info/RECORD,, +requests-2.31.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +requests-2.31.0.dist-info/top_level.txt,sha256=fMSVmHfb5rbGOo6xv-O_tUX6j-WyixssE-SnwcDRxNQ,9 +requests/__init__.py,sha256=LvmKhjIz8mHaKXthC2Mv5ykZ1d92voyf3oJpd-VuAig,4963 +requests/__pycache__/__init__.cpython-311.pyc,, +requests/__pycache__/__version__.cpython-311.pyc,, +requests/__pycache__/_internal_utils.cpython-311.pyc,, +requests/__pycache__/adapters.cpython-311.pyc,, +requests/__pycache__/api.cpython-311.pyc,, +requests/__pycache__/auth.cpython-311.pyc,, +requests/__pycache__/certs.cpython-311.pyc,, +requests/__pycache__/compat.cpython-311.pyc,, +requests/__pycache__/cookies.cpython-311.pyc,, +requests/__pycache__/exceptions.cpython-311.pyc,, +requests/__pycache__/help.cpython-311.pyc,, +requests/__pycache__/hooks.cpython-311.pyc,, +requests/__pycache__/models.cpython-311.pyc,, +requests/__pycache__/packages.cpython-311.pyc,, +requests/__pycache__/sessions.cpython-311.pyc,, +requests/__pycache__/status_codes.cpython-311.pyc,, +requests/__pycache__/structures.cpython-311.pyc,, +requests/__pycache__/utils.cpython-311.pyc,, +requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435 +requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495 +requests/adapters.py,sha256=v_FmjU5KZ76k-YttShZYB5RprIzhhL8Y3zgW9p4eBQ8,19553 +requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449 +requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187 +requests/certs.py,sha256=Z9Sb410Anv6jUFTyss0jFFhU6xst8ctELqfy8Ev23gw,429 +requests/compat.py,sha256=yxntVOSEHGMrn7FNr_32EEam1ZNAdPRdSE13_yaHzTk,1451 +requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560 +requests/exceptions.py,sha256=DhveFBclVjTRxhRduVpO-GbMYMID2gmjdLfNEqNpI_U,3811 +requests/help.py,sha256=gPX5d_H7Xd88aDABejhqGgl9B1VFRTt5BmiYvL3PzIQ,3875 +requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733 +requests/models.py,sha256=-DlKi0or8gFAM6VzutobXvvBW_2wrJuOF5NfndTIddA,35223 +requests/packages.py,sha256=DXgv-FJIczZITmv0vEBAhWj4W-5CGCIN_ksvgR17Dvs,957 +requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373 +requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235 +requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912 +requests/utils.py,sha256=6sx2X3cIVA8BgWOg8odxFy-_lbWDFETU8HI4fU4Rmqw,33448 diff --git a/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/WHEEL b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/WHEEL new file mode 100644 index 00000000..1f37c02f --- /dev/null +++ b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/top_level.txt b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/top_level.txt new file mode 100644 index 00000000..f2293605 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests-2.31.0.dist-info/top_level.txt @@ -0,0 +1 @@ +requests diff --git a/templates/skills/spotify/dependencies/requests/__init__.py b/templates/skills/spotify/dependencies/requests/__init__.py new file mode 100644 index 00000000..300a16c5 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/__init__.py @@ -0,0 +1,180 @@ +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +import warnings + +import urllib3 + +from .exceptions import RequestsDependencyWarning + +try: + from charset_normalizer import __version__ as charset_normalizer_version +except ImportError: + charset_normalizer_version = None + +try: + from chardet import __version__ as chardet_version +except ImportError: + chardet_version = None + + +def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): + urllib3_version = urllib3_version.split(".") + assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append("0") + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1 + assert major >= 1 + if major == 1: + assert minor >= 21 + + # Check charset_normalizer for compatibility. + if chardet_version: + major, minor, patch = chardet_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet_version >= 3.0.2, < 6.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) + elif charset_normalizer_version: + major, minor, patch = charset_normalizer_version.split(".")[:3] + major, minor, patch = int(major), int(minor), int(patch) + # charset_normalizer >= 2.0.0 < 4.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) + else: + raise Exception("You need either charset_normalizer or chardet installed") + + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split("."))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) + warnings.warn(warning, RequestsDependencyWarning) + + +# Check imported dependencies for compatibility. +try: + check_compatibility( + urllib3.__version__, chardet_version, charset_normalizer_version + ) +except (AssertionError, ValueError): + warnings.warn( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format( + urllib3.__version__, chardet_version, charset_normalizer_version + ), + RequestsDependencyWarning, + ) + +# Attempt to enable urllib3's fallback for SNI support +# if the standard library doesn't support SNI or the +# 'ssl' library isn't available. +try: + try: + import ssl + except ImportError: + ssl = None + + if not getattr(ssl, "HAS_SNI", False): + from urllib3.contrib import pyopenssl + + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from urllib3.exceptions import DependencyWarning + +warnings.simplefilter("ignore", DependencyWarning) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +from . import packages, utils +from .__version__ import ( + __author__, + __author_email__, + __build__, + __cake__, + __copyright__, + __description__, + __license__, + __title__, + __url__, + __version__, +) +from .api import delete, get, head, options, patch, post, put, request +from .exceptions import ( + ConnectionError, + ConnectTimeout, + FileModeWarning, + HTTPError, + JSONDecodeError, + ReadTimeout, + RequestException, + Timeout, + TooManyRedirects, + URLRequired, +) +from .models import PreparedRequest, Request, Response +from .sessions import Session, session +from .status_codes import codes + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter("default", FileModeWarning, append=True) diff --git a/templates/skills/spotify/dependencies/requests/__version__.py b/templates/skills/spotify/dependencies/requests/__version__.py new file mode 100644 index 00000000..5063c3f8 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.31.0" +__build__ = 0x023100 +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache 2.0" +__copyright__ = "Copyright Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/templates/skills/spotify/dependencies/requests/_internal_utils.py b/templates/skills/spotify/dependencies/requests/_internal_utils.py new file mode 100644 index 00000000..f2cf635e --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/_internal_utils.py @@ -0,0 +1,50 @@ +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" +import re + +from .compat import builtin_str + +_VALID_HEADER_NAME_RE_BYTE = re.compile(rb"^[^:\s][^:\r\n]*$") +_VALID_HEADER_NAME_RE_STR = re.compile(r"^[^:\s][^:\r\n]*$") +_VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") +_VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") + +_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) +_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) +HEADER_VALIDATORS = { + bytes: _HEADER_VALIDATORS_BYTE, + str: _HEADER_VALIDATORS_STR, +} + + +def to_native_string(string, encoding="ascii"): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode("ascii") + return True + except UnicodeEncodeError: + return False diff --git a/templates/skills/spotify/dependencies/requests/adapters.py b/templates/skills/spotify/dependencies/requests/adapters.py new file mode 100644 index 00000000..78e3bb6e --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/adapters.py @@ -0,0 +1,538 @@ +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket # noqa: F401 + +from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError +from urllib3.exceptions import HTTPError as _HTTPError +from urllib3.exceptions import InvalidHeader as _InvalidHeader +from urllib3.exceptions import ( + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, +) +from urllib3.exceptions import ProxyError as _ProxyError +from urllib3.exceptions import ReadTimeoutError, ResponseError +from urllib3.exceptions import SSLError as _SSLError +from urllib3.poolmanager import PoolManager, proxy_from_url +from urllib3.util import Timeout as TimeoutSauce +from urllib3.util import parse_url +from urllib3.util.retry import Retry + +from .auth import _basic_auth_str +from .compat import basestring, urlparse +from .cookies import extract_cookies_to_jar +from .exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidHeader, + InvalidProxyURL, + InvalidSchema, + InvalidURL, + ProxyError, + ReadTimeout, + RetryError, + SSLError, +) +from .models import Response +from .structures import CaseInsensitiveDict +from .utils import ( + DEFAULT_CA_BUNDLE_PATH, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) + +try: + from urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +class BaseAdapter: + """The Base Transport Adapter""" + + def __init__(self): + super().__init__() + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + + __attrs__ = [ + "max_retries", + "config", + "_pool_connections", + "_pool_maxsize", + "_pool_block", + ] + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK, + ): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super().__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager( + self._pool_connections, self._pool_maxsize, block=self._pool_block + ) + + def init_poolmanager( + self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs + ): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager( + num_pools=connections, + maxsize=maxsize, + block=block, + **pool_kwargs, + ) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith("socks"): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs, + ) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith("https") and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) + + conn.cert_reqs = "CERT_REQUIRED" + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = "CERT_NONE" + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise OSError( + f"Could not find the TLS certificate file, " + f"invalid path: {conn.cert_file}" + ) + if conn.key_file and not os.path.exists(conn.key_file): + raise OSError( + f"Could not find the TLS key file, invalid path: {conn.key_file}" + ) + + def build_response(self, req, resp): + """Builds a :class:`Response ` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter ` + + :param req: The :class:`PreparedRequest ` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, "status", None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode("utf-8") + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = proxy and scheme != "https" + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith("socks") + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter `. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return headers + + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + conn = self.get_connection(request.url, proxies) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) + + chunked = not (request.body is None or "Content-Length" in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked, + ) + + except (ProtocolError, OSError) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + elif isinstance(e, _InvalidHeader): + raise InvalidHeader(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/templates/skills/spotify/dependencies/requests/api.py b/templates/skills/spotify/dependencies/requests/api.py new file mode 100644 index 00000000..cd0b3eea --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/api.py @@ -0,0 +1,157 @@ +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("get", url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("options", url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return request("head", url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("post", url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("put", url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("patch", url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request("delete", url, **kwargs) diff --git a/templates/skills/spotify/dependencies/requests/auth.py b/templates/skills/spotify/dependencies/requests/auth.py new file mode 100644 index 00000000..9733686d --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/auth.py @@ -0,0 +1,315 @@ +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import hashlib +import os +import re +import threading +import time +import warnings +from base64 import b64encode + +from ._internal_utils import to_native_string +from .compat import basestring, str, urlparse +from .cookies import extract_cookies_to_jar +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode("latin1") + + if isinstance(password, str): + password = password.encode("latin1") + + authstr = "Basic " + to_native_string( + b64encode(b":".join((username, password))).strip() + ) + + return authstr + + +class AuthBase: + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError("Auth hooks must be callable.") + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers["Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, "init"): + self._thread_local.init = True + self._thread_local.last_nonce = "" + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal["realm"] + nonce = self._thread_local.chal["nonce"] + qop = self._thread_local.chal.get("qop") + algorithm = self._thread_local.chal.get("algorithm") + opaque = self._thread_local.chal.get("opaque") + hash_utf8 = None + + if algorithm is None: + _algorithm = "MD5" + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == "MD5" or _algorithm == "MD5-SESS": + + def md5_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.md5(x).hexdigest() + + hash_utf8 = md5_utf8 + elif _algorithm == "SHA": + + def sha_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha1(x).hexdigest() + + hash_utf8 = sha_utf8 + elif _algorithm == "SHA-256": + + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha256(x).hexdigest() + + hash_utf8 = sha256_utf8 + elif _algorithm == "SHA-512": + + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode("utf-8") + return hashlib.sha512(x).hexdigest() + + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731 + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += f"?{p_parsed.query}" + + A1 = f"{self.username}:{realm}:{self.password}" + A2 = f"{method}:{path}" + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = f"{self._thread_local.nonce_count:08x}" + s = str(self._thread_local.nonce_count).encode("utf-8") + s += nonce.encode("utf-8") + s += time.ctime().encode("utf-8") + s += os.urandom(8) + + cnonce = hashlib.sha1(s).hexdigest()[:16] + if _algorithm == "MD5-SESS": + HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}") + + if not qop: + respdig = KD(HA1, f"{nonce}:{HA2}") + elif qop == "auth" or "auth" in qop.split(","): + noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}" + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = ( + f'username="{self.username}", realm="{realm}", nonce="{nonce}", ' + f'uri="{path}", response="{respdig}"' + ) + if opaque: + base += f', opaque="{opaque}"' + if algorithm: + base += f', algorithm="{algorithm}"' + if entdig: + base += f', digest="{entdig}"' + if qop: + base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"' + + return f"Digest {base}" + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get("www-authenticate", "") + + if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r"digest ", flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers["Authorization"] = self.build_digest_header( + prep.method, prep.url + ) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers["Authorization"] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook("response", self.handle_401) + r.register_hook("response", self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) + + def __ne__(self, other): + return not self == other diff --git a/templates/skills/spotify/dependencies/requests/certs.py b/templates/skills/spotify/dependencies/requests/certs.py new file mode 100644 index 00000000..be422c3e --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/certs.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" +from certifi import where + +if __name__ == "__main__": + print(where()) diff --git a/templates/skills/spotify/dependencies/requests/compat.py b/templates/skills/spotify/dependencies/requests/compat.py new file mode 100644 index 00000000..6776163c --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/compat.py @@ -0,0 +1,79 @@ +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module previously handled import compatibility issues +between Python 2 and Python 3. It remains for backwards +compatibility until the next major version. +""" + +try: + import chardet +except ImportError: + import charset_normalizer as chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = _ver[0] == 2 + +#: Python 3.x? +is_py3 = _ver[0] == 3 + +# json/simplejson module import resolution +has_simplejson = False +try: + import simplejson as json + + has_simplejson = True +except ImportError: + import json + +if has_simplejson: + from simplejson import JSONDecodeError +else: + from json import JSONDecodeError + +# Keep OrderedDict for backwards compatibility. +from collections import OrderedDict +from collections.abc import Callable, Mapping, MutableMapping +from http import cookiejar as cookielib +from http.cookies import Morsel +from io import StringIO + +# -------------- +# Legacy Imports +# -------------- +from urllib.parse import ( + quote, + quote_plus, + unquote, + unquote_plus, + urldefrag, + urlencode, + urljoin, + urlparse, + urlsplit, + urlunparse, +) +from urllib.request import ( + getproxies, + getproxies_environment, + parse_http_list, + proxy_bypass, + proxy_bypass_environment, +) + +builtin_str = str +str = str +bytes = bytes +basestring = (str, bytes) +numeric_types = (int, float) +integer_types = (int,) diff --git a/templates/skills/spotify/dependencies/requests/cookies.py b/templates/skills/spotify/dependencies/requests/cookies.py new file mode 100644 index 00000000..bf54ab23 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/cookies.py @@ -0,0 +1,561 @@ +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import calendar +import copy +import time + +from ._internal_utils import to_native_string +from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest: + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get("Host"): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers["Host"], encoding="utf-8") + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse( + [ + parsed.scheme, + host, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment, + ] + ) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError( + "Cookie headers should be added with add_unredirected_header()" + ) + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse: + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, "_original_response") and response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get("Cookie") + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name( + self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + ) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if (domain is None or cookie.domain == domain) and ( + path is None or cookie.path == path + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super().__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if ( + hasattr(cookie.value, "startswith") + and cookie.value.startswith('"') + and cookie.value.endswith('"') + ): + cookie.value = cookie.value.replace('\\"', "") + return super().set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super().update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: + # if there are multiple cookies that meet passed in criteria + raise CookieConflictError( + f"There are multiple cookies with name, {name!r}" + ) + # we will eventually return this as long as no cookie conflict + toReturn = cookie.value + + if toReturn: + return toReturn + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop("_cookies_lock") + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if "_cookies_lock" not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, "copy"): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = { + "version": 0, + "name": name, + "value": value, + "port": None, + "domain": "", + "path": "/", + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, + } + + badargs = set(kwargs) - set(result) + if badargs: + raise TypeError( + f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + ) + + result.update(kwargs) + result["port_specified"] = bool(result["port"]) + result["domain_specified"] = bool(result["domain"]) + result["domain_initial_dot"] = result["domain"].startswith(".") + result["path_specified"] = bool(result["path"]) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel["max-age"]: + try: + expires = int(time.time() + int(morsel["max-age"])) + except ValueError: + raise TypeError(f"max-age: {morsel['max-age']} must be integer") + elif morsel["expires"]: + time_template = "%a, %d-%b-%Y %H:%M:%S GMT" + expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) + return create_cookie( + comment=morsel["comment"], + comment_url=bool(morsel["comment"]), + discard=False, + domain=morsel["domain"], + expires=expires, + name=morsel.key, + path=morsel["path"], + port=None, + rest={"HttpOnly": morsel["httponly"]}, + rfc2109=False, + secure=bool(morsel["secure"]), + value=morsel.value, + version=morsel["version"] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + :rtype: CookieJar + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError("You can only merge into CookieJar") + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/templates/skills/spotify/dependencies/requests/exceptions.py b/templates/skills/spotify/dependencies/requests/exceptions.py new file mode 100644 index 00000000..e1cedf88 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/exceptions.py @@ -0,0 +1,141 @@ +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from urllib3.exceptions import HTTPError as BaseHTTPError + +from .compat import JSONDecodeError as CompatJSONDecodeError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop("response", None) + self.response = response + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): + self.request = self.response.request + super().__init__(*args, **kwargs) + + +class InvalidJSONError(RequestException): + """A JSON error occurred.""" + + +class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError): + """Couldn't decode the text into json""" + + def __init__(self, *args, **kwargs): + """ + Construct the JSONDecodeError instance first with all + args. Then use it's args to construct the IOError so that + the json specific args aren't used as IOError specific args + and the error message from JSONDecodeError is preserved. + """ + CompatJSONDecodeError.__init__(self, *args) + InvalidJSONError.__init__(self, *self.args, **kwargs) + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL scheme (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """The URL scheme provided is either invalid or unsupported.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/templates/skills/spotify/dependencies/requests/help.py b/templates/skills/spotify/dependencies/requests/help.py new file mode 100644 index 00000000..8fbcd656 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/help.py @@ -0,0 +1,134 @@ +"""Module containing bug report helper(s).""" + +import json +import platform +import ssl +import sys + +import idna +import urllib3 + +from . import __version__ as requests_version + +try: + import charset_normalizer +except ImportError: + charset_normalizer = None + +try: + import chardet +except ImportError: + chardet = None + +try: + from urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import cryptography + import OpenSSL + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 3.10.3 it will return + {'name': 'CPython', 'version': '3.10.3'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == "CPython": + implementation_version = platform.python_version() + elif implementation == "PyPy": + implementation_version = "{}.{}.{}".format( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + elif implementation == "Jython": + implementation_version = platform.python_version() # Complete Guess + elif implementation == "IronPython": + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = "Unknown" + + return {"name": implementation, "version": implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + "system": platform.system(), + "release": platform.release(), + } + except OSError: + platform_info = { + "system": "Unknown", + "release": "Unknown", + } + + implementation_info = _implementation() + urllib3_info = {"version": urllib3.__version__} + charset_normalizer_info = {"version": None} + chardet_info = {"version": None} + if charset_normalizer: + charset_normalizer_info = {"version": charset_normalizer.__version__} + if chardet: + chardet_info = {"version": chardet.__version__} + + pyopenssl_info = { + "version": None, + "openssl_version": "", + } + if OpenSSL: + pyopenssl_info = { + "version": OpenSSL.__version__, + "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", + } + cryptography_info = { + "version": getattr(cryptography, "__version__", ""), + } + idna_info = { + "version": getattr(idna, "__version__", ""), + } + + system_ssl = ssl.OPENSSL_VERSION_NUMBER + system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} + + return { + "platform": platform_info, + "implementation": implementation_info, + "system_ssl": system_ssl_info, + "using_pyopenssl": pyopenssl is not None, + "using_charset_normalizer": chardet is None, + "pyOpenSSL": pyopenssl_info, + "urllib3": urllib3_info, + "chardet": chardet_info, + "charset_normalizer": charset_normalizer_info, + "cryptography": cryptography_info, + "idna": idna_info, + "requests": { + "version": requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/templates/skills/spotify/dependencies/requests/hooks.py b/templates/skills/spotify/dependencies/requests/hooks.py new file mode 100644 index 00000000..d181ba2e --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/hooks.py @@ -0,0 +1,33 @@ +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ["response"] + + +def default_hooks(): + return {event: [] for event in HOOKS} + + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or {} + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, "__call__"): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/templates/skills/spotify/dependencies/requests/models.py b/templates/skills/spotify/dependencies/requests/models.py new file mode 100644 index 00000000..617a4134 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/models.py @@ -0,0 +1,1034 @@ +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna # noqa: F401 +from io import UnsupportedOperation + +from urllib3.exceptions import ( + DecodeError, + LocationParseError, + ProtocolError, + ReadTimeoutError, + SSLError, +) +from urllib3.fields import RequestField +from urllib3.filepost import encode_multipart_formdata +from urllib3.util import parse_url + +from ._internal_utils import to_native_string, unicode_is_ascii +from .auth import HTTPBasicAuth +from .compat import ( + Callable, + JSONDecodeError, + Mapping, + basestring, + builtin_str, + chardet, + cookielib, +) +from .compat import json as complexjson +from .compat import urlencode, urlsplit, urlunparse +from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header +from .exceptions import ( + ChunkedEncodingError, + ConnectionError, + ContentDecodingError, + HTTPError, + InvalidJSONError, + InvalidURL, +) +from .exceptions import JSONDecodeError as RequestsJSONDecodeError +from .exceptions import MissingSchema +from .exceptions import SSLError as RequestsSSLError +from .exceptions import StreamConsumedError +from .hooks import default_hooks +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( + check_header_validity, + get_auth_from_url, + guess_filename, + guess_json_utf, + iter_slices, + parse_header_links, + requote_uri, + stream_decode_response_unicode, + super_len, + to_key_val_list, +) + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin: + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = "/" + + url.append(path) + + query = p.query + if query: + url.append("?") + url.append(query) + + return "".join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, "read"): + return data + elif hasattr(data, "__iter__"): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): + vs = [vs] + for v in vs: + if v is not None: + result.append( + ( + k.encode("utf-8") if isinstance(k, str) else k, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if not files: + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, "__iter__"): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, "read"): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin: + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError(f'Unsupported event specified, with event name "{event}"') + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, "__iter__"): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return f"" + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return f"" + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + import idna + + try: + host = idna.encode(host, uts46=True).decode("utf-8") + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode("utf8") + else: + url = str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ":" in url and not url.lower().startswith("http"): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + raise MissingSchema( + f"Invalid URL {url!r}: No scheme supplied. " + f"Perhaps you meant https://{url}?" + ) + + if not host: + raise InvalidURL(f"Invalid URL {url!r}: No host supplied") + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL("URL has an invalid label.") + elif host.startswith(("*", ".")): + raise InvalidURL("URL has an invalid label.") + + # Carefully reconstruct the network location + netloc = auth or "" + if netloc: + netloc += "@" + netloc += host + if port: + netloc += f":{port}" + + # Bare domains aren't valid URLs. + if not path: + path = "/" + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = f"{query}&{enc_params}" + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = "application/json" + + try: + body = complexjson.dumps(json, allow_nan=False) + except ValueError as ve: + raise InvalidJSONError(ve, request=self) + + if not isinstance(body, bytes): + body = body.encode("utf-8") + + is_stream = all( + [ + hasattr(data, "__iter__"), + not isinstance(data, (basestring, list, tuple, Mapping)), + ] + ) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, "tell", None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError( + "Streamed bodies and files are mutually exclusive." + ) + + if length: + self.headers["Content-Length"] = builtin_str(length) + else: + self.headers["Transfer-Encoding"] = "chunked" + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, "read"): + content_type = None + else: + content_type = "application/x-www-form-urlencoded" + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ("content-type" not in self.headers): + self.headers["Content-Type"] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers["Content-Length"] = builtin_str(length) + elif ( + self.method not in ("GET", "HEAD") + and self.headers.get("Content-Length") is None + ): + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers["Content-Length"] = "0" + + def prepare_auth(self, auth, url=""): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers["Cookie"] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response: + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + "_content", + "status_code", + "headers", + "url", + "history", + "encoding", + "reason", + "cookies", + "elapsed", + "request", + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, "_content_consumed", True) + setattr(self, "raw", None) + + def __repr__(self): + return f"" + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return "location" in self.headers and self.status_code in REDIRECT_STATI + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return "location" in self.headers and self.status_code in ( + codes.moved_permanently, + codes.permanent_redirect, + ) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + return chardet.detect(self.content)["encoding"] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, "stream"): + try: + yield from self.raw.stream(chunk_size, decode_content=True) + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + except SSLError as e: + raise RequestsSSLError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError( + f"chunk_size must be an int, it is instead a {type(chunk_size)}." + ) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines( + self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None + ): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content( + chunk_size=chunk_size, decode_unicode=decode_unicode + ): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + yield from lines + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError("The content for this response was already consumed") + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``charset_normalizer`` or ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return "" + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors="replace") + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors="replace") + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises requests.exceptions.JSONDecodeError: If the response body does not + contain valid json. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using charset_normalizer to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads(self.content.decode(encoding), **kwargs) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + except JSONDecodeError as e: + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + try: + return complexjson.loads(self.text, **kwargs) + except JSONDecodeError as e: + # Catch JSON-related errors and raise as requests.JSONDecodeError + # This aliases json.JSONDecodeError and simplejson.JSONDecodeError + raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get("link") + + resolved_links = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get("rel") or link.get("url") + resolved_links[key] = link + + return resolved_links + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = "" + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode("utf-8") + except UnicodeDecodeError: + reason = self.reason.decode("iso-8859-1") + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = ( + f"{self.status_code} Client Error: {reason} for url: {self.url}" + ) + + elif 500 <= self.status_code < 600: + http_error_msg = ( + f"{self.status_code} Server Error: {reason} for url: {self.url}" + ) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, "release_conn", None) + if release_conn is not None: + release_conn() diff --git a/templates/skills/spotify/dependencies/requests/packages.py b/templates/skills/spotify/dependencies/requests/packages.py new file mode 100644 index 00000000..77c45c9e --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/packages.py @@ -0,0 +1,28 @@ +import sys + +try: + import chardet +except ImportError: + import warnings + + import charset_normalizer as chardet + + warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") + +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + +for package in ("urllib3", "idna"): + locals()[package] = __import__(package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == package or mod.startswith(f"{package}."): + sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] + +target = chardet.__name__ +for mod in list(sys.modules): + if mod == target or mod.startswith(f"{target}."): + target = target.replace(target, "chardet") + sys.modules[f"requests.packages.{target}"] = sys.modules[mod] +# Kinda cool, though, right? diff --git a/templates/skills/spotify/dependencies/requests/sessions.py b/templates/skills/spotify/dependencies/requests/sessions.py new file mode 100644 index 00000000..dbcf2a7b --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/sessions.py @@ -0,0 +1,833 @@ +""" +requests.sessions +~~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from collections import OrderedDict +from datetime import timedelta + +from ._internal_utils import to_native_string +from .adapters import HTTPAdapter +from .auth import _basic_auth_str +from .compat import Mapping, cookielib, urljoin, urlparse +from .cookies import ( + RequestsCookieJar, + cookiejar_from_dict, + extract_cookies_to_jar, + merge_cookies, +) +from .exceptions import ( + ChunkedEncodingError, + ContentDecodingError, + InvalidSchema, + TooManyRedirects, +) +from .hooks import default_hooks, dispatch_hook + +# formerly defined here, reexposed here for backward compatibility +from .models import ( # noqa: F401 + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( # noqa: F401 + DEFAULT_PORTS, + default_headers, + get_auth_from_url, + get_environ_proxies, + get_netrc_auth, + requote_uri, + resolve_proxies, + rewind_body, + should_bypass_proxies, + to_key_val_list, +) + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == "win32": + preferred_clock = time.perf_counter +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get("response") == []: + return request_hooks + + if request_hooks is None or request_hooks.get("response") == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin: + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers["location"] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + location = location.encode("latin1") + return to_native_string(location, "utf8") + return None + + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if ( + old_parsed.scheme == "http" + and old_parsed.port in (80, None) + and new_parsed.scheme == "https" + and new_parsed.port in (443, None) + ): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if ( + not changed_scheme + and old_parsed.port in default_port + and new_parsed.port in default_port + ): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + + def resolve_redirects( + self, + resp, + req, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, + yield_requests=False, + **adapter_kwargs, + ): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects( + f"Exceeded {self.max_redirects} redirects.", response=resp + ) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith("//"): + parsed_rurl = urlparse(resp.url) + url = ":".join([to_native_string(parsed_rurl.scheme), url]) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == "" and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/psf/requests/issues/1084 + if resp.status_code not in ( + codes.temporary_redirect, + codes.permanent_redirect, + ): + # https://github.com/psf/requests/issues/3490 + purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + headers.pop("Cookie", None) + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = prepared_request._body_position is not None and ( + "Content-Length" in headers or "Transfer-Encoding" in headers + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs, + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if "Authorization" in headers and self.should_strip_auth( + response.request.url, url + ): + # If we get redirected to a new host, we should strip out any + # authentication headers. + del headers["Authorization"] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + headers = prepared_request.headers + scheme = urlparse(prepared_request.url).scheme + new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) + + if "Proxy-Authorization" in headers: + del headers["Proxy-Authorization"] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + # urllib3 handles proxy authorization for us in the standard adapter. + # Avoid appending this to TLS tunneled requests where it may be leaked. + if not scheme.startswith('https') and username and password: + headers["Proxy-Authorization"] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != "HEAD": + method = "GET" + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != "HEAD": + method = "GET" + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == "POST": + method = "GET" + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('https://httpbin.org/get') + + + Or as a context manager:: + + >>> with requests.Session() as s: + ... s.get('https://httpbin.org/get') + + """ + + __attrs__ = [ + "headers", + "cookies", + "auth", + "proxies", + "hooks", + "params", + "verify", + "cert", + "adapters", + "stream", + "trust_env", + "max_redirects", + ] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request ` sent from this + #: :class:`Session `. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request `. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request `. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request `. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar `, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount("https://", HTTPAdapter()) + self.mount("http://", HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest ` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request ` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies + ) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting( + request.headers, self.headers, dict_class=CaseInsensitiveDict + ), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + """Constructs a :class:`Request `, prepares it and sends it. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + "timeout": timeout, + "allow_redirects": allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("GET", url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", True) + return self.request("OPTIONS", url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault("allow_redirects", False) + return self.request("HEAD", url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("POST", url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PUT", url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("PATCH", url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request("DELETE", url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault("stream", self.stream) + kwargs.setdefault("verify", self.verify) + kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: + kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError("You can only send PreparedRequests.") + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop("allow_redirects", True) + stream = kwargs.get("stream") + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook("response", hooks, r, **kwargs) + + # Persist cookies + if r.history: + + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Resolve redirects if allowed. + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next( + self.resolve_redirects(r, request, yield_requests=True, **kwargs) + ) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get("no_proxy") if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration + # and be compatible with cURL. + if verify is True or verify is None: + verify = ( + os.environ.get("REQUESTS_CA_BUNDLE") + or os.environ.get("CURL_CA_BUNDLE") + or verify + ) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for (prefix, adapter) in self.adapters.items(): + + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema(f"No connection adapters were found for {url!r}") + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + + :rtype: Session + """ + return Session() diff --git a/templates/skills/spotify/dependencies/requests/status_codes.py b/templates/skills/spotify/dependencies/requests/status_codes.py new file mode 100644 index 00000000..4bd072be --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/status_codes.py @@ -0,0 +1,128 @@ +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + # Informational. + 100: ("continue",), + 101: ("switching_protocols",), + 102: ("processing",), + 103: ("checkpoint",), + 122: ("uri_too_long", "request_uri_too_long"), + 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), + 201: ("created",), + 202: ("accepted",), + 203: ("non_authoritative_info", "non_authoritative_information"), + 204: ("no_content",), + 205: ("reset_content", "reset"), + 206: ("partial_content", "partial"), + 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), + 208: ("already_reported",), + 226: ("im_used",), + # Redirection. + 300: ("multiple_choices",), + 301: ("moved_permanently", "moved", "\\o-"), + 302: ("found",), + 303: ("see_other", "other"), + 304: ("not_modified",), + 305: ("use_proxy",), + 306: ("switch_proxy",), + 307: ("temporary_redirect", "temporary_moved", "temporary"), + 308: ( + "permanent_redirect", + "resume_incomplete", + "resume", + ), # "resume" and "resume_incomplete" to be removed in 3.0 + # Client Error. + 400: ("bad_request", "bad"), + 401: ("unauthorized",), + 402: ("payment_required", "payment"), + 403: ("forbidden",), + 404: ("not_found", "-o-"), + 405: ("method_not_allowed", "not_allowed"), + 406: ("not_acceptable",), + 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 408: ("request_timeout", "timeout"), + 409: ("conflict",), + 410: ("gone",), + 411: ("length_required",), + 412: ("precondition_failed", "precondition"), + 413: ("request_entity_too_large",), + 414: ("request_uri_too_large",), + 415: ("unsupported_media_type", "unsupported_media", "media_type"), + 416: ( + "requested_range_not_satisfiable", + "requested_range", + "range_not_satisfiable", + ), + 417: ("expectation_failed",), + 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), + 421: ("misdirected_request",), + 422: ("unprocessable_entity", "unprocessable"), + 423: ("locked",), + 424: ("failed_dependency", "dependency"), + 425: ("unordered_collection", "unordered"), + 426: ("upgrade_required", "upgrade"), + 428: ("precondition_required", "precondition"), + 429: ("too_many_requests", "too_many"), + 431: ("header_fields_too_large", "fields_too_large"), + 444: ("no_response", "none"), + 449: ("retry_with", "retry"), + 450: ("blocked_by_windows_parental_controls", "parental_controls"), + 451: ("unavailable_for_legal_reasons", "legal_reasons"), + 499: ("client_closed_request",), + # Server Error. + 500: ("internal_server_error", "server_error", "/o\\", "✗"), + 501: ("not_implemented",), + 502: ("bad_gateway",), + 503: ("service_unavailable", "unavailable"), + 504: ("gateway_timeout",), + 505: ("http_version_not_supported", "http_version"), + 506: ("variant_also_negotiates",), + 507: ("insufficient_storage",), + 509: ("bandwidth_limit_exceeded", "bandwidth"), + 510: ("not_extended",), + 511: ("network_authentication_required", "network_auth", "network_authentication"), +} + +codes = LookupDict(name="status_codes") + + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(("\\", "/")): + setattr(codes, title.upper(), code) + + def doc(code): + names = ", ".join(f"``{n}``" for n in _codes[code]) + return "* %d: %s" % (code, names) + + global __doc__ + __doc__ = ( + __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) + if __doc__ is not None + else None + ) + + +_init() diff --git a/templates/skills/spotify/dependencies/requests/structures.py b/templates/skills/spotify/dependencies/requests/structures.py new file mode 100644 index 00000000..188e13e4 --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/structures.py @@ -0,0 +1,99 @@ +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from collections import OrderedDict + +from .compat import Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super().__init__() + + def __repr__(self): + return f"" + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/templates/skills/spotify/dependencies/requests/utils.py b/templates/skills/spotify/dependencies/requests/utils.py new file mode 100644 index 00000000..a367417f --- /dev/null +++ b/templates/skills/spotify/dependencies/requests/utils.py @@ -0,0 +1,1094 @@ +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict + +from urllib3.util import make_headers, parse_url + +from . import certs +from .__version__ import __version__ + +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import ( # noqa: F401 + _HEADER_VALIDATORS_BYTE, + _HEADER_VALIDATORS_STR, + HEADER_VALIDATORS, + to_native_string, +) +from .compat import ( + Mapping, + basestring, + bytes, + getproxies, + getproxies_environment, + integer_types, +) +from .compat import parse_http_list as _parse_list_header +from .compat import ( + proxy_bypass, + proxy_bypass_environment, + quote, + str, + unquote, + urlparse, + urlunparse, +) +from .cookies import cookiejar_from_dict +from .exceptions import ( + FileModeWarning, + InvalidHeader, + InvalidURL, + UnrewindableBodyError, +) +from .structures import CaseInsensitiveDict + +NETRC_FILES = (".netrc", "_netrc") + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {"http": 80, "https": 443} + +# Ensure that ', ' is used to preserve previous delimiter behavior. +DEFAULT_ACCEPT_ENCODING = ", ".join( + re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) +) + + +if sys.platform == "win32": + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + import winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", + ) + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] + except (OSError, ValueError): + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(";") + # now check if we match one of the registry values. + for test in proxyOverride: + if test == "": + if "." not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, "items"): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, "__len__"): + total_length = len(o) + + elif hasattr(o, "len"): + total_length = o.len + + elif hasattr(o, "fileno"): + try: + fileno = o.fileno() + except (io.UnsupportedOperation, AttributeError): + # AttributeError is a surprising exception, seeing as how we've just checked + # that `hasattr(o, 'fileno')`. It happens for objects obtained via + # `Tarfile.extractfile()`, per issue 5229. + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if "b" not in o.mode: + warnings.warn( + ( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode." + ), + FileModeWarning, + ) + + if hasattr(o, "tell"): + try: + current_position = o.tell() + except OSError: + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, "seek") and total_length is None: + # StringIO and BytesIO have seek but no usable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except OSError: + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get("NETRC") + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = (f"~/{f}" for f in NETRC_FILES) + + try: + from netrc import NetrcParseError, netrc + + netrc_path = None + + for f in netrc_locations: + try: + loc = os.path.expanduser(f) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See https://bugs.python.org/issue20164 & + # https://github.com/psf/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b":" + if isinstance(url, str): + splitstr = splitstr.decode("ascii") + host = ri.netloc.split(splitstr)[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = 0 if _netrc[0] else 1 + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, OSError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, "name", None) + if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + if not prefix: + # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), + # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users + break + member = "/".join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, member.split("/")[-1]) + if not os.path.exists(extracted_path): + # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + with atomic_open(extracted_path) as file_handler: + file_handler.write(zip_file.read(member)) + return extracted_path + + +@contextlib.contextmanager +def atomic_open(filename): + """Write a file to the disk in an atomic fashion""" + tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) + try: + with os.fdopen(tmp_descriptor, "wb") as tmp_handler: + yield tmp_handler + os.replace(tmp_name, filename) + except BaseException: + os.remove(tmp_name) + raise + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError("cannot encode objects that are not 2-tuples") + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if "=" not in item: + result[item] = None + continue + name, value = item.split("=", 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn( + ( + "In requests 3.0, get_encodings_from_content will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return ( + charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content) + ) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(";") + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1 :].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get("content-type") + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if "charset" in params: + return params["charset"].strip("'\"") + + if "text" in content_type: + return "ISO-8859-1" + + if "application/json" in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return "utf-8" + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes an iterator.""" + + if r.encoding is None: + yield from iterator + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace") + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b"", final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos : pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn( + ( + "In requests 3.0, get_unicode_from_response will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors="replace") + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~" +) + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split("%") + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL(f"Invalid percent-escape sequence: '{h}'") + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = f"%{parts[i]}" + else: + parts[i] = f"%{parts[i]}" + return "".join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] + netaddr, bits = net.split("/") + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack(">I", bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except OSError: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count("/") == 1: + try: + mask = int(string_network.split("/")[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split("/")[0]) + except OSError: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + def get_proxy(key): + return os.environ.get(key) or os.environ.get(key.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy("no_proxy") + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += f":{parsed.port}" + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ("no_proxy", no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get("all")) + + proxy_keys = [ + urlparts.scheme + "://" + urlparts.hostname, + urlparts.scheme, + "all://" + urlparts.hostname, + "all", + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def resolve_proxies(request, proxies, trust_env=True): + """This method takes proxy information from a request and configuration + input to resolve a mapping of target proxies. This will consider settings + such a NO_PROXY to strip proxy configurations. + + :param request: Request or PreparedRequest + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + :param trust_env: Boolean declaring whether to trust environment configs + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + url = request.url + scheme = urlparse(url).scheme + no_proxy = proxies.get("no_proxy") + new_proxies = proxies.copy() + + if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get("all")) + + if proxy: + new_proxies.setdefault(scheme, proxy) + return new_proxies + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return f"{name}/{__version__}" + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict( + { + "User-Agent": default_user_agent(), + "Accept-Encoding": DEFAULT_ACCEPT_ENCODING, + "Accept": "*/*", + "Connection": "keep-alive", + } + ) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = " '\"" + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(", *<", value): + try: + url, params = val.split(";", 1) + except ValueError: + url, params = val, "" + + link = {"url": url.strip("<> '\"")} + + for param in params.split(";"): + try: + key, value = param.split("=") + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = "\x00".encode("ascii") # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return "utf-32" # BOM included + if sample[:3] == codecs.BOM_UTF8: + return "utf-8-sig" # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return "utf-16" # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return "utf-8" + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" + if sample[1::2] == _null2: # 2nd and 4th are null + return "utf-16-le" + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return "utf-32-be" + if sample[1:] == _null3: + return "utf-32-le" + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + parsed = parse_url(url) + scheme, auth, host, port, path, query, fragment = parsed + + # A defect in urlparse determines that there isn't a netloc present in some + # urls. We previously assumed parsing was overly cautious, and swapped the + # netloc and path. Due to a lack of tests on the original defect, this is + # maintained with parse_url for backwards compatibility. + netloc = parsed.netloc + if not netloc: + netloc, path = path, netloc + + if auth: + # parse_url doesn't provide the netloc with auth + # so we'll add it ourselves. + netloc = "@".join([auth, netloc]) + if scheme is None: + scheme = new_scheme + if path is None: + path = "" + + return urlunparse((scheme, netloc, path, "", query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ("", "") + + return auth + + +def check_header_validity(header): + """Verifies that header parts don't contain leading whitespace + reserved characters, or return characters. + + :param header: tuple, in the format (name, value). + """ + name, value = header + _validate_header_part(header, name, 0) + _validate_header_part(header, value, 1) + + +def _validate_header_part(header, header_part, header_validator_index): + if isinstance(header_part, str): + validator = _HEADER_VALIDATORS_STR[header_validator_index] + elif isinstance(header_part, bytes): + validator = _HEADER_VALIDATORS_BYTE[header_validator_index] + else: + raise InvalidHeader( + f"Header part ({header_part!r}) from {header} " + f"must be of type str or bytes, not {type(header_part)}" + ) + + if not validator.match(header_part): + header_kind = "name" if header_validator_index == 0 else "value" + raise InvalidHeader( + f"Invalid leading whitespace, reserved character(s), or return" + f"character(s) in header {header_kind}: {header_part!r}" + ) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit("@", 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, "")) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, "seek", None) + if body_seek is not None and isinstance( + prepared_request._body_position, integer_types + ): + try: + body_seek(prepared_request._body_position) + except OSError: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect." + ) + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/templates/skills/spotify/dependencies/six-1.16.0.dist-info/INSTALLER b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/six-1.16.0.dist-info/LICENSE b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/LICENSE new file mode 100644 index 00000000..de663311 --- /dev/null +++ b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/LICENSE @@ -0,0 +1,18 @@ +Copyright (c) 2010-2020 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/templates/skills/spotify/dependencies/six-1.16.0.dist-info/METADATA b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/METADATA new file mode 100644 index 00000000..6d7525c2 --- /dev/null +++ b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/METADATA @@ -0,0 +1,49 @@ +Metadata-Version: 2.1 +Name: six +Version: 1.16.0 +Summary: Python 2 and 3 compatibility utilities +Home-page: https://github.com/benjaminp/six +Author: Benjamin Peterson +Author-email: benjamin@python.org +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 3 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.* + +.. image:: https://img.shields.io/pypi/v/six.svg + :target: https://pypi.org/project/six/ + :alt: six on PyPI + +.. image:: https://travis-ci.org/benjaminp/six.svg?branch=master + :target: https://travis-ci.org/benjaminp/six + :alt: six on TravisCI + +.. image:: https://readthedocs.org/projects/six/badge/?version=latest + :target: https://six.readthedocs.io/ + :alt: six's documentation on Read the Docs + +.. image:: https://img.shields.io/badge/license-MIT-green.svg + :target: https://github.com/benjaminp/six/blob/master/LICENSE + :alt: MIT License badge + +Six is a Python 2 and 3 compatibility library. It provides utility functions +for smoothing over the differences between the Python versions with the goal of +writing Python code that is compatible on both Python versions. See the +documentation for more information on what is provided. + +Six supports Python 2.7 and 3.3+. It is contained in only one Python +file, so it can be easily copied into your project. (The copyright and license +notice must be retained.) + +Online documentation is at https://six.readthedocs.io/. + +Bugs can be reported to https://github.com/benjaminp/six. The code can also +be found there. + + diff --git a/templates/skills/spotify/dependencies/six-1.16.0.dist-info/RECORD b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/RECORD new file mode 100644 index 00000000..17e2e083 --- /dev/null +++ b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/RECORD @@ -0,0 +1,8 @@ +__pycache__/six.cpython-311.pyc,, +six-1.16.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +six-1.16.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066 +six-1.16.0.dist-info/METADATA,sha256=VQcGIFCAEmfZcl77E5riPCN4v2TIsc_qtacnjxKHJoI,1795 +six-1.16.0.dist-info/RECORD,, +six-1.16.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +six-1.16.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 +six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549 diff --git a/templates/skills/spotify/dependencies/six-1.16.0.dist-info/WHEEL b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/WHEEL new file mode 100644 index 00000000..01b8fc7d --- /dev/null +++ b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/templates/skills/spotify/dependencies/six-1.16.0.dist-info/top_level.txt b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/top_level.txt new file mode 100644 index 00000000..ffe2fce4 --- /dev/null +++ b/templates/skills/spotify/dependencies/six-1.16.0.dist-info/top_level.txt @@ -0,0 +1 @@ +six diff --git a/templates/skills/spotify/dependencies/six.py b/templates/skills/spotify/dependencies/six.py new file mode 100644 index 00000000..4e15675d --- /dev/null +++ b/templates/skills/spotify/dependencies/six.py @@ -0,0 +1,998 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.16.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""") + + +if sys.version_info[:2] > (3,): + exec_("""def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper(wrapper, wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + return functools.partial(_update_wrapper, wrapped=wrapped, + assigned=assigned, updated=updated) + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d['__orig_bases__'] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + if hasattr(cls, '__qualname__'): + orig_vars['__qualname__'] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def ensure_binary(s, encoding='utf-8', errors='strict'): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding='utf-8', errors='strict'): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding='utf-8', errors='strict'): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/INSTALLER b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/LICENSE.md b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/LICENSE.md new file mode 100644 index 00000000..f121286c --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Paul Lamere + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/METADATA b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/METADATA new file mode 100644 index 00000000..beeb19c0 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/METADATA @@ -0,0 +1,100 @@ +Metadata-Version: 2.1 +Name: spotipy +Version: 2.23.0 +Summary: A light weight Python library for the Spotify Web API +Home-page: https://spotipy.readthedocs.org/ +Author: @plamere +Author-email: paul@echonest.com +License: MIT +Project-URL: Source, https://github.com/plamere/spotipy +Description-Content-Type: text/markdown +License-File: LICENSE.md +Requires-Dist: redis (>=3.5.3) +Requires-Dist: requests (>=2.25.0) +Requires-Dist: six (>=1.15.0) +Requires-Dist: urllib3 (>=1.26.0) +Requires-Dist: redis (<4.0.0) ; python_version < "3.4" +Provides-Extra: doc +Requires-Dist: Sphinx (>=1.5.2) ; extra == 'doc' +Provides-Extra: test +Requires-Dist: mock (==2.0.0) ; extra == 'test' + +# Spotipy + +##### A light weight Python library for the Spotify Web API + +![Tests](https://github.com/plamere/spotipy/workflows/Tests/badge.svg?branch=master) [![Documentation Status](https://readthedocs.org/projects/spotipy/badge/?version=latest)](https://spotipy.readthedocs.io/en/latest/?badge=latest) + +## Documentation + +Spotipy's full documentation is online at [Spotipy Documentation](http://spotipy.readthedocs.org/). + +## Installation + +```bash +pip install spotipy +``` + +alternatively, for Windows users + +```bash +py -m pip install spotipy +``` + +or upgrade + +```bash +pip install spotipy --upgrade +``` + +## Quick Start + +A full set of examples can be found in the [online documentation](http://spotipy.readthedocs.org/) and in the [Spotipy examples directory](https://github.com/plamere/spotipy/tree/master/examples). + +To get started, install spotipy and create an app on https://developers.spotify.com/. +Add your new ID and SECRET to your environment: + +### Without user authentication + +```python +import spotipy +from spotipy.oauth2 import SpotifyClientCredentials + +sp = spotipy.Spotify(auth_manager=SpotifyClientCredentials(client_id="YOUR_APP_CLIENT_ID", + client_secret="YOUR_APP_CLIENT_SECRET")) + +results = sp.search(q='weezer', limit=20) +for idx, track in enumerate(results['tracks']['items']): + print(idx, track['name']) +``` + +### With user authentication + +A redirect URI must be added to your application at [My Dashboard](https://developer.spotify.com/dashboard/applications) to access user authenticated features. + +```python +import spotipy +from spotipy.oauth2 import SpotifyOAuth + +sp = spotipy.Spotify(auth_manager=SpotifyOAuth(client_id="YOUR_APP_CLIENT_ID", + client_secret="YOUR_APP_CLIENT_SECRET", + redirect_uri="YOUR_APP_REDIRECT_URI", + scope="user-library-read")) + +results = sp.current_user_saved_tracks() +for idx, item in enumerate(results['items']): + track = item['track'] + print(idx, track['artists'][0]['name'], " – ", track['name']) +``` + +## Reporting Issues + +For common questions please check our [FAQ](FAQ.md). + +You can ask questions about Spotipy on +[Stack Overflow](http://stackoverflow.com/questions/ask). +Don’t forget to add the *Spotipy* tag, and any other relevant tags as well, before posting. + +If you have suggestions, bugs or other issues specific to this library, +file them [here](https://github.com/plamere/spotipy/issues). +Or just send a pull request. diff --git a/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/RECORD b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/RECORD new file mode 100644 index 00000000..d10efba4 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/RECORD @@ -0,0 +1,19 @@ +spotipy-2.23.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +spotipy-2.23.0.dist-info/LICENSE.md,sha256=tsEBFbMqRzu097t-Ecqy-G6uyDeOFEqFcrWtQuS3I_8,1068 +spotipy-2.23.0.dist-info/METADATA,sha256=I0-D4pcOm-EPF5NayigGceAVQ-Jrz4z47H4YatDSq8s,3253 +spotipy-2.23.0.dist-info/RECORD,, +spotipy-2.23.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +spotipy-2.23.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +spotipy-2.23.0.dist-info/top_level.txt,sha256=dSwjjk5rAZzv_g4EkBg7OkAUjW6k02wB8jjA29BrSa0,8 +spotipy/__init__.py,sha256=6DLF5dHdanFTLYpi1yrQKlC44FgLiTTa-hycEQ53ZFw,159 +spotipy/__pycache__/__init__.cpython-311.pyc,, +spotipy/__pycache__/cache_handler.cpython-311.pyc,, +spotipy/__pycache__/client.cpython-311.pyc,, +spotipy/__pycache__/exceptions.cpython-311.pyc,, +spotipy/__pycache__/oauth2.cpython-311.pyc,, +spotipy/__pycache__/util.cpython-311.pyc,, +spotipy/cache_handler.py,sha256=1PA6rG_MApoNr-KlDQU_WSB1CYEX5W-tKViQF6K30vs,6181 +spotipy/client.py,sha256=Eqsyi1o49YbxI_QU5kpZte52JA8HTX_m2AziIj4jPaI,75075 +spotipy/exceptions.py,sha256=s4lt7yTc8PeAiJGnShH7nu7HKXKGVnRouRaHT51Xo4w,568 +spotipy/oauth2.py,sha256=rXWNs77R5u8oLSpZbigFsMctuQlfayEiA5I-8_7D0kU,53619 +spotipy/util.py,sha256=Cg8U74tNWsq3Lz_VOttp_qYvnGiVLXr6f8IPeoKA9-w,3996 diff --git a/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/REQUESTED b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/WHEEL b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/WHEEL new file mode 100644 index 00000000..1f37c02f --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/top_level.txt b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/top_level.txt new file mode 100644 index 00000000..9dc31a22 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy-2.23.0.dist-info/top_level.txt @@ -0,0 +1 @@ +spotipy diff --git a/templates/skills/spotify/dependencies/spotipy/__init__.py b/templates/skills/spotify/dependencies/spotipy/__init__.py new file mode 100644 index 00000000..7f3d8599 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy/__init__.py @@ -0,0 +1,5 @@ +from .cache_handler import * # noqa +from .client import * # noqa +from .exceptions import * # noqa +from .oauth2 import * # noqa +from .util import * # noqa diff --git a/templates/skills/spotify/dependencies/spotipy/cache_handler.py b/templates/skills/spotify/dependencies/spotipy/cache_handler.py new file mode 100644 index 00000000..9a6d703b --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy/cache_handler.py @@ -0,0 +1,210 @@ +__all__ = [ + 'CacheHandler', + 'CacheFileHandler', + 'DjangoSessionCacheHandler', + 'FlaskSessionCacheHandler', + 'MemoryCacheHandler', + 'RedisCacheHandler'] + +import errno +import json +import logging +import os +from spotipy.util import CLIENT_CREDS_ENV_VARS + +from redis import RedisError + +logger = logging.getLogger(__name__) + + +class CacheHandler(): + """ + An abstraction layer for handling the caching and retrieval of + authorization tokens. + + Custom extensions of this class must implement get_cached_token + and save_token_to_cache methods with the same input and output + structure as the CacheHandler class. + """ + + def get_cached_token(self): + """ + Get and return a token_info dictionary object. + """ + # return token_info + raise NotImplementedError() + + def save_token_to_cache(self, token_info): + """ + Save a token_info dictionary object to the cache and return None. + """ + raise NotImplementedError() + return None + + +class CacheFileHandler(CacheHandler): + """ + Handles reading and writing cached Spotify authorization tokens + as json files on disk. + """ + + def __init__(self, + cache_path=None, + username=None, + encoder_cls=None): + """ + Parameters: + * cache_path: May be supplied, will otherwise be generated + (takes precedence over `username`) + * username: May be supplied or set as environment variable + (will set `cache_path` to `.cache-{username}`) + * encoder_cls: May be supplied as a means of overwriting the + default serializer used for writing tokens to disk + """ + self.encoder_cls = encoder_cls + if cache_path: + self.cache_path = cache_path + else: + cache_path = ".cache" + username = (username or os.getenv(CLIENT_CREDS_ENV_VARS["client_username"])) + if username: + cache_path += "-" + str(username) + self.cache_path = cache_path + + def get_cached_token(self): + token_info = None + + try: + f = open(self.cache_path) + token_info_string = f.read() + f.close() + token_info = json.loads(token_info_string) + + except IOError as error: + if error.errno == errno.ENOENT: + logger.debug("cache does not exist at: %s", self.cache_path) + else: + logger.warning("Couldn't read cache at: %s", self.cache_path) + + return token_info + + def save_token_to_cache(self, token_info): + try: + f = open(self.cache_path, "w") + f.write(json.dumps(token_info, cls=self.encoder_cls)) + f.close() + except IOError: + logger.warning('Couldn\'t write token to cache at: %s', + self.cache_path) + + +class MemoryCacheHandler(CacheHandler): + """ + A cache handler that simply stores the token info in memory as an + instance attribute of this class. The token info will be lost when this + instance is freed. + """ + + def __init__(self, token_info=None): + """ + Parameters: + * token_info: The token info to store in memory. Can be None. + """ + self.token_info = token_info + + def get_cached_token(self): + return self.token_info + + def save_token_to_cache(self, token_info): + self.token_info = token_info + + +class DjangoSessionCacheHandler(CacheHandler): + """ + A cache handler that stores the token info in the session framework + provided by Django. + + Read more at https://docs.djangoproject.com/en/3.2/topics/http/sessions/ + """ + + def __init__(self, request): + """ + Parameters: + * request: HttpRequest object provided by Django for every + incoming request + """ + self.request = request + + def get_cached_token(self): + token_info = None + try: + token_info = self.request.session['token_info'] + except KeyError: + logger.debug("Token not found in the session") + + return token_info + + def save_token_to_cache(self, token_info): + try: + self.request.session['token_info'] = token_info + except Exception as e: + logger.warning("Error saving token to cache: " + str(e)) + + +class FlaskSessionCacheHandler(CacheHandler): + """ + A cache handler that stores the token info in the session framework + provided by flask. + """ + + def __init__(self, session): + self.session = session + + def get_cached_token(self): + token_info = None + try: + token_info = self.session["token_info"] + except KeyError: + logger.debug("Token not found in the session") + + return token_info + + def save_token_to_cache(self, token_info): + try: + self.session["token_info"] = token_info + except Exception as e: + logger.warning("Error saving token to cache: " + str(e)) + + +class RedisCacheHandler(CacheHandler): + """ + A cache handler that stores the token info in the Redis. + """ + + def __init__(self, redis, key=None): + """ + Parameters: + * redis: Redis object provided by redis-py library + (https://github.com/redis/redis-py) + * key: May be supplied, will otherwise be generated + (takes precedence over `token_info`) + """ + self.redis = redis + self.key = key if key else 'token_info' + + def get_cached_token(self): + token_info = None + try: + token_info = self.redis.get(self.key) + if token_info: + return json.loads(token_info) + except RedisError as e: + logger.warning('Error getting token from cache: ' + str(e)) + + return token_info + + def save_token_to_cache(self, token_info): + try: + self.redis.set(self.key, json.dumps(token_info)) + except RedisError as e: + logger.warning('Error saving token to cache: ' + str(e)) diff --git a/templates/skills/spotify/dependencies/spotipy/client.py b/templates/skills/spotify/dependencies/spotipy/client.py new file mode 100644 index 00000000..d3b918f0 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy/client.py @@ -0,0 +1,2035 @@ +# -*- coding: utf-8 -*- + +""" A simple and thin Python library for the Spotify Web API """ + +__all__ = ["Spotify", "SpotifyException"] + +import json +import logging +import re +import warnings + +import requests +import six +import urllib3 + +from spotipy.exceptions import SpotifyException + +from collections import defaultdict + +logger = logging.getLogger(__name__) + + +class Spotify(object): + """ + Example usage:: + + import spotipy + + urn = 'spotify:artist:3jOstUTkEu2JkjvRdBA5Gu' + sp = spotipy.Spotify() + + artist = sp.artist(urn) + print(artist) + + user = sp.user('plamere') + print(user) + """ + max_retries = 3 + default_retry_codes = (429, 500, 502, 503, 504) + country_codes = [ + "AD", + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "EC", + "SV", + "EE", + "FI", + "FR", + "DE", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "ID", + "IE", + "IT", + "JP", + "LV", + "LI", + "LT", + "LU", + "MY", + "MT", + "MX", + "MC", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "ES", + "SK", + "SE", + "CH", + "TW", + "TR", + "GB", + "US", + "UY"] + + # Spotify URI scheme defined in [1], and the ID format as base-62 in [2]. + # + # Unfortunately the IANA specification is out of date and doesn't include the new types + # show and episode. Additionally, for the user URI, it does not specify which characters + # are valid for usernames, so the assumption is alphanumeric which coincidentially are also + # the same ones base-62 uses. + # In limited manual exploration this seems to hold true, as newly accounts are assigned an + # identifier that looks like the base-62 of all other IDs, but some older accounts only have + # numbers and even older ones seemed to have been allowed to freely pick this name. + # + # [1] https://www.iana.org/assignments/uri-schemes/prov/spotify + # [2] https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids + _regex_spotify_uri = r'^spotify:(?:(?Ptrack|artist|album|playlist|show|episode):(?P[0-9A-Za-z]+)|user:(?P[0-9A-Za-z]+):playlist:(?P[0-9A-Za-z]+))$' # noqa: E501 + + # Spotify URLs are defined at [1]. The assumption is made that they are all + # pointing to open.spotify.com, so a regex is used to parse them as well, + # instead of a more complex URL parsing function. + # + # [1] https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids + _regex_spotify_url = r'^(http[s]?:\/\/)?open.spotify.com\/(?Ptrack|artist|album|playlist|show|episode|user)\/(?P[0-9A-Za-z]+)(\?.*)?$' # noqa: E501 + + _regex_base62 = r'^[0-9A-Za-z]+$' + + def __init__( + self, + auth=None, + requests_session=True, + client_credentials_manager=None, + oauth_manager=None, + auth_manager=None, + proxies=None, + requests_timeout=5, + status_forcelist=None, + retries=max_retries, + status_retries=max_retries, + backoff_factor=0.3, + language=None, + ): + """ + Creates a Spotify API client. + + :param auth: An access token (optional) + :param requests_session: + A Requests session object or a truthy value to create one. + A falsy value disables sessions. + It should generally be a good idea to keep sessions enabled + for performance reasons (connection pooling). + :param client_credentials_manager: + SpotifyClientCredentials object + :param oauth_manager: + SpotifyOAuth object + :param auth_manager: + SpotifyOauth, SpotifyClientCredentials, + or SpotifyImplicitGrant object + :param proxies: + Definition of proxies (optional). + See Requests doc https://2.python-requests.org/en/master/user/advanced/#proxies + :param requests_timeout: + Tell Requests to stop waiting for a response after a given + number of seconds + :param status_forcelist: + Tell requests what type of status codes retries should occur on + :param retries: + Total number of retries to allow + :param status_retries: + Number of times to retry on bad status codes + :param backoff_factor: + A backoff factor to apply between attempts after the second try + See urllib3 https://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html + :param language: + The language parameter advertises what language the user prefers to see. + See ISO-639-1 language code: https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes + """ + self.prefix = "https://api.spotify.com/v1/" + self._auth = auth + self.client_credentials_manager = client_credentials_manager + self.oauth_manager = oauth_manager + self.auth_manager = auth_manager + self.proxies = proxies + self.requests_timeout = requests_timeout + self.status_forcelist = status_forcelist or self.default_retry_codes + self.backoff_factor = backoff_factor + self.retries = retries + self.status_retries = status_retries + self.language = language + + if isinstance(requests_session, requests.Session): + self._session = requests_session + else: + if requests_session: # Build a new session. + self._build_session() + else: # Use the Requests API module as a "session". + self._session = requests.api + + def set_auth(self, auth): + self._auth = auth + + @property + def auth_manager(self): + return self._auth_manager + + @auth_manager.setter + def auth_manager(self, auth_manager): + if auth_manager is not None: + self._auth_manager = auth_manager + else: + self._auth_manager = ( + self.client_credentials_manager or self.oauth_manager + ) + + def __del__(self): + """Make sure the connection (pool) gets closed""" + if isinstance(self._session, requests.Session): + self._session.close() + + def _build_session(self): + self._session = requests.Session() + retry = urllib3.Retry( + total=self.retries, + connect=None, + read=False, + allowed_methods=frozenset(['GET', 'POST', 'PUT', 'DELETE']), + status=self.status_retries, + backoff_factor=self.backoff_factor, + status_forcelist=self.status_forcelist) + + adapter = requests.adapters.HTTPAdapter(max_retries=retry) + self._session.mount('http://', adapter) + self._session.mount('https://', adapter) + + def _auth_headers(self): + if self._auth: + return {"Authorization": "Bearer {0}".format(self._auth)} + if not self.auth_manager: + return {} + try: + token = self.auth_manager.get_access_token(as_dict=False) + except TypeError: + token = self.auth_manager.get_access_token() + return {"Authorization": "Bearer {0}".format(token)} + + def _internal_call(self, method, url, payload, params): + args = dict(params=params) + if not url.startswith("http"): + url = self.prefix + url + headers = self._auth_headers() + + if "content_type" in args["params"]: + headers["Content-Type"] = args["params"]["content_type"] + del args["params"]["content_type"] + if payload: + args["data"] = payload + else: + headers["Content-Type"] = "application/json" + if payload: + args["data"] = json.dumps(payload) + + if self.language is not None: + headers["Accept-Language"] = self.language + + logger.debug('Sending %s to %s with Params: %s Headers: %s and Body: %r ', + method, url, args.get("params"), headers, args.get('data')) + + try: + response = self._session.request( + method, url, headers=headers, proxies=self.proxies, + timeout=self.requests_timeout, **args + ) + + response.raise_for_status() + results = response.json() + except requests.exceptions.HTTPError as http_error: + response = http_error.response + try: + json_response = response.json() + error = json_response.get("error", {}) + msg = error.get("message") + reason = error.get("reason") + except ValueError: + # if the response cannot be decoded into JSON (which raises a ValueError), + # then try to decode it into text + + # if we receive an empty string (which is falsy), then replace it with `None` + msg = response.text or None + reason = None + + logger.error( + 'HTTP Error for %s to %s with Params: %s returned %s due to %s', + method, url, args.get("params"), response.status_code, msg + ) + + raise SpotifyException( + response.status_code, + -1, + "%s:\n %s" % (response.url, msg), + reason=reason, + headers=response.headers, + ) + except requests.exceptions.RetryError as retry_error: + request = retry_error.request + logger.error('Max Retries reached') + try: + reason = retry_error.args[0].reason + except (IndexError, AttributeError): + reason = None + raise SpotifyException( + 429, + -1, + "%s:\n %s" % (request.path_url, "Max Retries"), + reason=reason + ) + except ValueError: + results = None + + logger.debug('RESULTS: %s', results) + return results + + def _get(self, url, args=None, payload=None, **kwargs): + if args: + kwargs.update(args) + + return self._internal_call("GET", url, payload, kwargs) + + def _post(self, url, args=None, payload=None, **kwargs): + if args: + kwargs.update(args) + return self._internal_call("POST", url, payload, kwargs) + + def _delete(self, url, args=None, payload=None, **kwargs): + if args: + kwargs.update(args) + return self._internal_call("DELETE", url, payload, kwargs) + + def _put(self, url, args=None, payload=None, **kwargs): + if args: + kwargs.update(args) + return self._internal_call("PUT", url, payload, kwargs) + + def next(self, result): + """ returns the next result given a paged result + + Parameters: + - result - a previously returned paged result + """ + if result["next"]: + return self._get(result["next"]) + else: + return None + + def previous(self, result): + """ returns the previous result given a paged result + + Parameters: + - result - a previously returned paged result + """ + if result["previous"]: + return self._get(result["previous"]) + else: + return None + + def track(self, track_id, market=None): + """ returns a single track given the track's ID, URI or URL + + Parameters: + - track_id - a spotify URI, URL or ID + - market - an ISO 3166-1 alpha-2 country code. + """ + + trid = self._get_id("track", track_id) + return self._get("tracks/" + trid, market=market) + + def tracks(self, tracks, market=None): + """ returns a list of tracks given a list of track IDs, URIs, or URLs + + Parameters: + - tracks - a list of spotify URIs, URLs or IDs. Maximum: 50 IDs. + - market - an ISO 3166-1 alpha-2 country code. + """ + + tlist = [self._get_id("track", t) for t in tracks] + return self._get("tracks/?ids=" + ",".join(tlist), market=market) + + def artist(self, artist_id): + """ returns a single artist given the artist's ID, URI or URL + + Parameters: + - artist_id - an artist ID, URI or URL + """ + + trid = self._get_id("artist", artist_id) + return self._get("artists/" + trid) + + def artists(self, artists): + """ returns a list of artists given the artist IDs, URIs, or URLs + + Parameters: + - artists - a list of artist IDs, URIs or URLs + """ + + tlist = [self._get_id("artist", a) for a in artists] + return self._get("artists/?ids=" + ",".join(tlist)) + + def artist_albums( + self, artist_id, album_type=None, country=None, limit=20, offset=0 + ): + """ Get Spotify catalog information about an artist's albums + + Parameters: + - artist_id - the artist ID, URI or URL + - album_type - 'album', 'single', 'appears_on', 'compilation' + - country - limit the response to one particular country. + - limit - the number of albums to return + - offset - the index of the first album to return + """ + + trid = self._get_id("artist", artist_id) + return self._get( + "artists/" + trid + "/albums", + album_type=album_type, + country=country, + limit=limit, + offset=offset, + ) + + def artist_top_tracks(self, artist_id, country="US"): + """ Get Spotify catalog information about an artist's top 10 tracks + by country. + + Parameters: + - artist_id - the artist ID, URI or URL + - country - limit the response to one particular country. + """ + + trid = self._get_id("artist", artist_id) + return self._get("artists/" + trid + "/top-tracks", country=country) + + def artist_related_artists(self, artist_id): + """ Get Spotify catalog information about artists similar to an + identified artist. Similarity is based on analysis of the + Spotify community's listening history. + + Parameters: + - artist_id - the artist ID, URI or URL + """ + trid = self._get_id("artist", artist_id) + return self._get("artists/" + trid + "/related-artists") + + def album(self, album_id, market=None): + """ returns a single album given the album's ID, URIs or URL + + Parameters: + - album_id - the album ID, URI or URL + - market - an ISO 3166-1 alpha-2 country code + """ + + trid = self._get_id("album", album_id) + if market is not None: + return self._get("albums/" + trid + '?market=' + market) + else: + return self._get("albums/" + trid) + + def album_tracks(self, album_id, limit=50, offset=0, market=None): + """ Get Spotify catalog information about an album's tracks + + Parameters: + - album_id - the album ID, URI or URL + - limit - the number of items to return + - offset - the index of the first item to return + - market - an ISO 3166-1 alpha-2 country code. + + """ + + trid = self._get_id("album", album_id) + return self._get( + "albums/" + trid + "/tracks/", limit=limit, offset=offset, market=market + ) + + def albums(self, albums, market=None): + """ returns a list of albums given the album IDs, URIs, or URLs + + Parameters: + - albums - a list of album IDs, URIs or URLs + - market - an ISO 3166-1 alpha-2 country code + """ + + tlist = [self._get_id("album", a) for a in albums] + if market is not None: + return self._get("albums/?ids=" + ",".join(tlist) + '&market=' + market) + else: + return self._get("albums/?ids=" + ",".join(tlist)) + + def show(self, show_id, market=None): + """ returns a single show given the show's ID, URIs or URL + + Parameters: + - show_id - the show ID, URI or URL + - market - an ISO 3166-1 alpha-2 country code. + The show must be available in the given market. + If user-based authorization is in use, the user's country + takes precedence. If neither market nor user country are + provided, the content is considered unavailable for the client. + """ + + trid = self._get_id("show", show_id) + return self._get("shows/" + trid, market=market) + + def shows(self, shows, market=None): + """ returns a list of shows given the show IDs, URIs, or URLs + + Parameters: + - shows - a list of show IDs, URIs or URLs + - market - an ISO 3166-1 alpha-2 country code. + Only shows available in the given market will be returned. + If user-based authorization is in use, the user's country + takes precedence. If neither market nor user country are + provided, the content is considered unavailable for the client. + """ + + tlist = [self._get_id("show", s) for s in shows] + return self._get("shows/?ids=" + ",".join(tlist), market=market) + + def show_episodes(self, show_id, limit=50, offset=0, market=None): + """ Get Spotify catalog information about a show's episodes + + Parameters: + - show_id - the show ID, URI or URL + - limit - the number of items to return + - offset - the index of the first item to return + - market - an ISO 3166-1 alpha-2 country code. + Only episodes available in the given market will be returned. + If user-based authorization is in use, the user's country + takes precedence. If neither market nor user country are + provided, the content is considered unavailable for the client. + """ + + trid = self._get_id("show", show_id) + return self._get( + "shows/" + trid + "/episodes/", limit=limit, offset=offset, market=market + ) + + def episode(self, episode_id, market=None): + """ returns a single episode given the episode's ID, URIs or URL + + Parameters: + - episode_id - the episode ID, URI or URL + - market - an ISO 3166-1 alpha-2 country code. + The episode must be available in the given market. + If user-based authorization is in use, the user's country + takes precedence. If neither market nor user country are + provided, the content is considered unavailable for the client. + """ + + trid = self._get_id("episode", episode_id) + return self._get("episodes/" + trid, market=market) + + def episodes(self, episodes, market=None): + """ returns a list of episodes given the episode IDs, URIs, or URLs + + Parameters: + - episodes - a list of episode IDs, URIs or URLs + - market - an ISO 3166-1 alpha-2 country code. + Only episodes available in the given market will be returned. + If user-based authorization is in use, the user's country + takes precedence. If neither market nor user country are + provided, the content is considered unavailable for the client. + """ + + tlist = [self._get_id("episode", e) for e in episodes] + return self._get("episodes/?ids=" + ",".join(tlist), market=market) + + def search(self, q, limit=10, offset=0, type="track", market=None): + """ searches for an item + + Parameters: + - q - the search query (see how to write a query in the + official documentation https://developer.spotify.com/documentation/web-api/reference/search/) # noqa + - limit - the number of items to return (min = 1, default = 10, max = 50). The limit is applied + within each type, not on the total response. + - offset - the index of the first item to return + - type - the types of items to return. One or more of 'artist', 'album', + 'track', 'playlist', 'show', and 'episode'. If multiple types are desired, + pass in a comma separated string; e.g., 'track,album,episode'. + - market - An ISO 3166-1 alpha-2 country code or the string + from_token. + """ + return self._get( + "search", q=q, limit=limit, offset=offset, type=type, market=market + ) + + def search_markets(self, q, limit=10, offset=0, type="track", markets=None, total=None): + """ (experimental) Searches multiple markets for an item + + Parameters: + - q - the search query (see how to write a query in the + official documentation https://developer.spotify.com/documentation/web-api/reference/search/) # noqa + - limit - the number of items to return (min = 1, default = 10, max = 50). If a search is to be done on multiple + markets, then this limit is applied to each market. (e.g. search US, CA, MX each with a limit of 10). + If multiple types are specified, this applies to each type. + - offset - the index of the first item to return + - type - the types of items to return. One or more of 'artist', 'album', + 'track', 'playlist', 'show', or 'episode'. If multiple types are desired, pass in a comma separated string. + - markets - A list of ISO 3166-1 alpha-2 country codes. Search all country markets by default. + - total - the total number of results to return across multiple markets and types. + """ + warnings.warn( + "Searching multiple markets is an experimental feature. " + "Please be aware that this method's inputs and outputs can change in the future.", + UserWarning, + ) + if not markets: + markets = self.country_codes + + if not (isinstance(markets, list) or isinstance(markets, tuple)): + markets = [] + + warnings.warn( + "Searching multiple markets is poorly performing.", + UserWarning, + ) + return self._search_multiple_markets(q, limit, offset, type, markets, total) + + def user(self, user): + """ Gets basic profile information about a Spotify User + + Parameters: + - user - the id of the usr + """ + return self._get("users/" + user) + + def current_user_playlists(self, limit=50, offset=0): + """ Get current user playlists without required getting his profile + Parameters: + - limit - the number of items to return + - offset - the index of the first item to return + """ + return self._get("me/playlists", limit=limit, offset=offset) + + def playlist(self, playlist_id, fields=None, market=None, additional_types=("track",)): + """ Gets playlist by id. + + Parameters: + - playlist - the id of the playlist + - fields - which fields to return + - market - An ISO 3166-1 alpha-2 country code or the + string from_token. + - additional_types - list of item types to return. + valid types are: track and episode + """ + plid = self._get_id("playlist", playlist_id) + return self._get( + "playlists/%s" % (plid), + fields=fields, + market=market, + additional_types=",".join(additional_types), + ) + + def playlist_tracks( + self, + playlist_id, + fields=None, + limit=100, + offset=0, + market=None, + additional_types=("track",) + ): + """ Get full details of the tracks of a playlist. + + Parameters: + - playlist_id - the playlist ID, URI or URL + - fields - which fields to return + - limit - the maximum number of tracks to return + - offset - the index of the first track to return + - market - an ISO 3166-1 alpha-2 country code. + - additional_types - list of item types to return. + valid types are: track and episode + """ + warnings.warn( + "You should use `playlist_items(playlist_id, ...," + "additional_types=('track',))` instead", + DeprecationWarning, + ) + return self.playlist_items(playlist_id, fields, limit, offset, + market, additional_types) + + def playlist_items( + self, + playlist_id, + fields=None, + limit=100, + offset=0, + market=None, + additional_types=("track", "episode") + ): + """ Get full details of the tracks and episodes of a playlist. + + Parameters: + - playlist_id - the playlist ID, URI or URL + - fields - which fields to return + - limit - the maximum number of tracks to return + - offset - the index of the first track to return + - market - an ISO 3166-1 alpha-2 country code. + - additional_types - list of item types to return. + valid types are: track and episode + """ + plid = self._get_id("playlist", playlist_id) + return self._get( + "playlists/%s/tracks" % (plid), + limit=limit, + offset=offset, + fields=fields, + market=market, + additional_types=",".join(additional_types) + ) + + def playlist_cover_image(self, playlist_id): + """ Get cover image of a playlist. + + Parameters: + - playlist_id - the playlist ID, URI or URL + """ + plid = self._get_id("playlist", playlist_id) + return self._get("playlists/%s/images" % (plid)) + + def playlist_upload_cover_image(self, playlist_id, image_b64): + """ Replace the image used to represent a specific playlist + + Parameters: + - playlist_id - the id of the playlist + - image_b64 - image data as a Base64 encoded JPEG image string + (maximum payload size is 256 KB) + """ + plid = self._get_id("playlist", playlist_id) + return self._put( + "playlists/{}/images".format(plid), + payload=image_b64, + content_type="image/jpeg", + ) + + def user_playlist(self, user, playlist_id=None, fields=None, market=None): + warnings.warn( + "You should use `playlist(playlist_id)` instead", + DeprecationWarning, + ) + + """ Gets a single playlist of a user + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - fields - which fields to return + """ + if playlist_id is None: + return self._get("users/%s/starred" % user) + return self.playlist(playlist_id, fields=fields, market=market) + + def user_playlist_tracks( + self, + user=None, + playlist_id=None, + fields=None, + limit=100, + offset=0, + market=None, + ): + warnings.warn( + "You should use `playlist_tracks(playlist_id)` instead", + DeprecationWarning, + ) + + """ Get full details of the tracks of a playlist owned by a user. + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - fields - which fields to return + - limit - the maximum number of tracks to return + - offset - the index of the first track to return + - market - an ISO 3166-1 alpha-2 country code. + """ + return self.playlist_tracks( + playlist_id, + limit=limit, + offset=offset, + fields=fields, + market=market, + ) + + def user_playlists(self, user, limit=50, offset=0): + """ Gets playlists of a user + + Parameters: + - user - the id of the usr + - limit - the number of items to return + - offset - the index of the first item to return + """ + return self._get( + "users/%s/playlists" % user, limit=limit, offset=offset + ) + + def user_playlist_create(self, user, name, public=True, collaborative=False, description=""): + """ Creates a playlist for a user + + Parameters: + - user - the id of the user + - name - the name of the playlist + - public - is the created playlist public + - collaborative - is the created playlist collaborative + - description - the description of the playlist + """ + data = { + "name": name, + "public": public, + "collaborative": collaborative, + "description": description + } + + return self._post("users/%s/playlists" % (user,), payload=data) + + def user_playlist_change_details( + self, + user, + playlist_id, + name=None, + public=None, + collaborative=None, + description=None, + ): + warnings.warn( + "You should use `playlist_change_details(playlist_id, ...)` instead", + DeprecationWarning, + ) + """ Changes a playlist's name and/or public/private state + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - name - optional name of the playlist + - public - optional is the playlist public + - collaborative - optional is the playlist collaborative + - description - optional description of the playlist + """ + + return self.playlist_change_details(playlist_id, name, public, + collaborative, description) + + def user_playlist_unfollow(self, user, playlist_id): + """ Unfollows (deletes) a playlist for a user + + Parameters: + - user - the id of the user + - name - the name of the playlist + """ + warnings.warn( + "You should use `current_user_unfollow_playlist(playlist_id)` instead", + DeprecationWarning, + ) + return self.current_user_unfollow_playlist(playlist_id) + + def user_playlist_add_tracks( + self, user, playlist_id, tracks, position=None + ): + warnings.warn( + "You should use `playlist_add_items(playlist_id, tracks)` instead", + DeprecationWarning, + ) + """ Adds tracks to a playlist + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - tracks - a list of track URIs, URLs or IDs + - position - the position to add the tracks + """ + tracks = [self._get_uri("track", tid) for tid in tracks] + return self.playlist_add_items(playlist_id, tracks, position) + + def user_playlist_add_episodes( + self, user, playlist_id, episodes, position=None + ): + warnings.warn( + "You should use `playlist_add_items(playlist_id, episodes)` instead", + DeprecationWarning, + ) + """ Adds episodes to a playlist + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - episodes - a list of track URIs, URLs or IDs + - position - the position to add the episodes + """ + episodes = [self._get_uri("episode", tid) for tid in episodes] + return self.playlist_add_items(playlist_id, episodes, position) + + def user_playlist_replace_tracks(self, user, playlist_id, tracks): + """ Replace all tracks in a playlist for a user + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - tracks - the list of track ids to add to the playlist + """ + warnings.warn( + "You should use `playlist_replace_items(playlist_id, tracks)` instead", + DeprecationWarning, + ) + return self.playlist_replace_items(playlist_id, tracks) + + def user_playlist_reorder_tracks( + self, + user, + playlist_id, + range_start, + insert_before, + range_length=1, + snapshot_id=None, + ): + """ Reorder tracks in a playlist from a user + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - range_start - the position of the first track to be reordered + - range_length - optional the number of tracks to be reordered + (default: 1) + - insert_before - the position where the tracks should be + inserted + - snapshot_id - optional playlist's snapshot ID + """ + warnings.warn( + "You should use `playlist_reorder_items(playlist_id, ...)` instead", + DeprecationWarning, + ) + return self.playlist_reorder_items(playlist_id, range_start, + insert_before, range_length, + snapshot_id) + + def user_playlist_remove_all_occurrences_of_tracks( + self, user, playlist_id, tracks, snapshot_id=None + ): + """ Removes all occurrences of the given tracks from the given playlist + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - tracks - the list of track ids to remove from the playlist + - snapshot_id - optional id of the playlist snapshot + + """ + warnings.warn( + "You should use `playlist_remove_all_occurrences_of_items" + "(playlist_id, tracks)` instead", + DeprecationWarning, + ) + return self.playlist_remove_all_occurrences_of_items(playlist_id, + tracks, + snapshot_id) + + def user_playlist_remove_specific_occurrences_of_tracks( + self, user, playlist_id, tracks, snapshot_id=None + ): + """ Removes all occurrences of the given tracks from the given playlist + + Parameters: + - user - the id of the user + - playlist_id - the id of the playlist + - tracks - an array of objects containing Spotify URIs of the + tracks to remove with their current positions in the + playlist. For example: + [ { "uri":"4iV5W9uYEdYUVa79Axb7Rh", "positions":[2] }, + { "uri":"1301WleyT98MSxVHPZCA6M", "positions":[7] } ] + - snapshot_id - optional id of the playlist snapshot + """ + warnings.warn( + "You should use `playlist_remove_specific_occurrences_of_items" + "(playlist_id, tracks)` instead", + DeprecationWarning, + ) + plid = self._get_id("playlist", playlist_id) + ftracks = [] + for tr in tracks: + ftracks.append( + { + "uri": self._get_uri("track", tr["uri"]), + "positions": tr["positions"], + } + ) + payload = {"tracks": ftracks} + if snapshot_id: + payload["snapshot_id"] = snapshot_id + return self._delete( + "users/%s/playlists/%s/tracks" % (user, plid), payload=payload + ) + + def user_playlist_follow_playlist(self, playlist_owner_id, playlist_id): + """ + Add the current authenticated user as a follower of a playlist. + + Parameters: + - playlist_owner_id - the user id of the playlist owner + - playlist_id - the id of the playlist + + """ + warnings.warn( + "You should use `current_user_follow_playlist(playlist_id)` instead", + DeprecationWarning, + ) + return self.current_user_follow_playlist(playlist_id) + + def user_playlist_is_following( + self, playlist_owner_id, playlist_id, user_ids + ): + """ + Check to see if the given users are following the given playlist + + Parameters: + - playlist_owner_id - the user id of the playlist owner + - playlist_id - the id of the playlist + - user_ids - the ids of the users that you want to check to see + if they follow the playlist. Maximum: 5 ids. + + """ + warnings.warn( + "You should use `playlist_is_following(playlist_id, user_ids)` instead", + DeprecationWarning, + ) + return self.playlist_is_following(playlist_id, user_ids) + + def playlist_change_details( + self, + playlist_id, + name=None, + public=None, + collaborative=None, + description=None, + ): + """ Changes a playlist's name and/or public/private state, + collaborative state, and/or description + + Parameters: + - playlist_id - the id of the playlist + - name - optional name of the playlist + - public - optional is the playlist public + - collaborative - optional is the playlist collaborative + - description - optional description of the playlist + """ + + data = {} + if isinstance(name, six.string_types): + data["name"] = name + if isinstance(public, bool): + data["public"] = public + if isinstance(collaborative, bool): + data["collaborative"] = collaborative + if isinstance(description, six.string_types): + data["description"] = description + return self._put( + "playlists/%s" % (self._get_id("playlist", playlist_id)), payload=data + ) + + def current_user_unfollow_playlist(self, playlist_id): + """ Unfollows (deletes) a playlist for the current authenticated + user + + Parameters: + - name - the name of the playlist + """ + return self._delete( + "playlists/%s/followers" % (playlist_id) + ) + + def playlist_add_items( + self, playlist_id, items, position=None + ): + """ Adds tracks/episodes to a playlist + + Parameters: + - playlist_id - the id of the playlist + - items - a list of track/episode URIs or URLs + - position - the position to add the tracks + """ + plid = self._get_id("playlist", playlist_id) + ftracks = [self._get_uri("track", tid) for tid in items] + return self._post( + "playlists/%s/tracks" % (plid), + payload=ftracks, + position=position, + ) + + def playlist_replace_items(self, playlist_id, items): + """ Replace all tracks/episodes in a playlist + + Parameters: + - playlist_id - the id of the playlist + - items - list of track/episode ids to comprise playlist + """ + plid = self._get_id("playlist", playlist_id) + ftracks = [self._get_uri("track", tid) for tid in items] + payload = {"uris": ftracks} + return self._put( + "playlists/%s/tracks" % (plid), payload=payload + ) + + def playlist_reorder_items( + self, + playlist_id, + range_start, + insert_before, + range_length=1, + snapshot_id=None, + ): + """ Reorder tracks in a playlist + + Parameters: + - playlist_id - the id of the playlist + - range_start - the position of the first track to be reordered + - range_length - optional the number of tracks to be reordered + (default: 1) + - insert_before - the position where the tracks should be + inserted + - snapshot_id - optional playlist's snapshot ID + """ + plid = self._get_id("playlist", playlist_id) + payload = { + "range_start": range_start, + "range_length": range_length, + "insert_before": insert_before, + } + if snapshot_id: + payload["snapshot_id"] = snapshot_id + return self._put( + "playlists/%s/tracks" % (plid), payload=payload + ) + + def playlist_remove_all_occurrences_of_items( + self, playlist_id, items, snapshot_id=None + ): + """ Removes all occurrences of the given tracks/episodes from the given playlist + + Parameters: + - playlist_id - the id of the playlist + - items - list of track/episode ids to remove from the playlist + - snapshot_id - optional id of the playlist snapshot + + """ + + plid = self._get_id("playlist", playlist_id) + ftracks = [self._get_uri("track", tid) for tid in items] + payload = {"tracks": [{"uri": track} for track in ftracks]} + if snapshot_id: + payload["snapshot_id"] = snapshot_id + return self._delete( + "playlists/%s/tracks" % (plid), payload=payload + ) + + def playlist_remove_specific_occurrences_of_items( + self, playlist_id, items, snapshot_id=None + ): + """ Removes all occurrences of the given tracks from the given playlist + + Parameters: + - playlist_id - the id of the playlist + - items - an array of objects containing Spotify URIs of the + tracks/episodes to remove with their current positions in + the playlist. For example: + [ { "uri":"4iV5W9uYEdYUVa79Axb7Rh", "positions":[2] }, + { "uri":"1301WleyT98MSxVHPZCA6M", "positions":[7] } ] + - snapshot_id - optional id of the playlist snapshot + """ + + plid = self._get_id("playlist", playlist_id) + ftracks = [] + for tr in items: + ftracks.append( + { + "uri": self._get_uri("track", tr["uri"]), + "positions": tr["positions"], + } + ) + payload = {"tracks": ftracks} + if snapshot_id: + payload["snapshot_id"] = snapshot_id + return self._delete( + "playlists/%s/tracks" % (plid), payload=payload + ) + + def current_user_follow_playlist(self, playlist_id): + """ + Add the current authenticated user as a follower of a playlist. + + Parameters: + - playlist_id - the id of the playlist + + """ + return self._put( + "playlists/{}/followers".format(playlist_id) + ) + + def playlist_is_following( + self, playlist_id, user_ids + ): + """ + Check to see if the given users are following the given playlist + + Parameters: + - playlist_id - the id of the playlist + - user_ids - the ids of the users that you want to check to see + if they follow the playlist. Maximum: 5 ids. + + """ + endpoint = "playlists/{}/followers/contains?ids={}" + return self._get( + endpoint.format(playlist_id, ",".join(user_ids)) + ) + + def me(self): + """ Get detailed profile information about the current user. + An alias for the 'current_user' method. + """ + return self._get("me/") + + def current_user(self): + """ Get detailed profile information about the current user. + An alias for the 'me' method. + """ + return self.me() + + def current_user_playing_track(self): + """ Get information about the current users currently playing track. + """ + return self._get("me/player/currently-playing") + + def current_user_saved_albums(self, limit=20, offset=0, market=None): + """ Gets a list of the albums saved in the current authorized user's + "Your Music" library + + Parameters: + - limit - the number of albums to return (MAX_LIMIT=50) + - offset - the index of the first album to return + - market - an ISO 3166-1 alpha-2 country code. + + """ + return self._get("me/albums", limit=limit, offset=offset, market=market) + + def current_user_saved_albums_add(self, albums=[]): + """ Add one or more albums to the current user's + "Your Music" library. + Parameters: + - albums - a list of album URIs, URLs or IDs + """ + + alist = [self._get_id("album", a) for a in albums] + return self._put("me/albums?ids=" + ",".join(alist)) + + def current_user_saved_albums_delete(self, albums=[]): + """ Remove one or more albums from the current user's + "Your Music" library. + + Parameters: + - albums - a list of album URIs, URLs or IDs + """ + alist = [self._get_id("album", a) for a in albums] + return self._delete("me/albums/?ids=" + ",".join(alist)) + + def current_user_saved_albums_contains(self, albums=[]): + """ Check if one or more albums is already saved in + the current Spotify user’s “Your Music” library. + + Parameters: + - albums - a list of album URIs, URLs or IDs + """ + alist = [self._get_id("album", a) for a in albums] + return self._get("me/albums/contains?ids=" + ",".join(alist)) + + def current_user_saved_tracks(self, limit=20, offset=0, market=None): + """ Gets a list of the tracks saved in the current authorized user's + "Your Music" library + + Parameters: + - limit - the number of tracks to return + - offset - the index of the first track to return + - market - an ISO 3166-1 alpha-2 country code + + """ + return self._get("me/tracks", limit=limit, offset=offset, market=market) + + def current_user_saved_tracks_add(self, tracks=None): + """ Add one or more tracks to the current user's + "Your Music" library. + + Parameters: + - tracks - a list of track URIs, URLs or IDs + """ + tlist = [] + if tracks is not None: + tlist = [self._get_id("track", t) for t in tracks] + return self._put("me/tracks/?ids=" + ",".join(tlist)) + + def current_user_saved_tracks_delete(self, tracks=None): + """ Remove one or more tracks from the current user's + "Your Music" library. + + Parameters: + - tracks - a list of track URIs, URLs or IDs + """ + tlist = [] + if tracks is not None: + tlist = [self._get_id("track", t) for t in tracks] + return self._delete("me/tracks/?ids=" + ",".join(tlist)) + + def current_user_saved_tracks_contains(self, tracks=None): + """ Check if one or more tracks is already saved in + the current Spotify user’s “Your Music” library. + + Parameters: + - tracks - a list of track URIs, URLs or IDs + """ + tlist = [] + if tracks is not None: + tlist = [self._get_id("track", t) for t in tracks] + return self._get("me/tracks/contains?ids=" + ",".join(tlist)) + + def current_user_saved_episodes(self, limit=20, offset=0, market=None): + """ Gets a list of the episodes saved in the current authorized user's + "Your Music" library + + Parameters: + - limit - the number of episodes to return + - offset - the index of the first episode to return + - market - an ISO 3166-1 alpha-2 country code + + """ + return self._get("me/episodes", limit=limit, offset=offset, market=market) + + def current_user_saved_episodes_add(self, episodes=None): + """ Add one or more episodes to the current user's + "Your Music" library. + + Parameters: + - episodes - a list of episode URIs, URLs or IDs + """ + elist = [] + if episodes is not None: + elist = [self._get_id("episode", e) for e in episodes] + return self._put("me/episodes/?ids=" + ",".join(elist)) + + def current_user_saved_episodes_delete(self, episodes=None): + """ Remove one or more episodes from the current user's + "Your Music" library. + + Parameters: + - episodes - a list of episode URIs, URLs or IDs + """ + elist = [] + if episodes is not None: + elist = [self._get_id("episode", e) for e in episodes] + return self._delete("me/episodes/?ids=" + ",".join(elist)) + + def current_user_saved_episodes_contains(self, episodes=None): + """ Check if one or more episodes is already saved in + the current Spotify user’s “Your Music” library. + + Parameters: + - episodes - a list of episode URIs, URLs or IDs + """ + elist = [] + if episodes is not None: + elist = [self._get_id("episode", e) for e in episodes] + return self._get("me/episodes/contains?ids=" + ",".join(elist)) + + def current_user_saved_shows(self, limit=20, offset=0, market=None): + """ Gets a list of the shows saved in the current authorized user's + "Your Music" library + + Parameters: + - limit - the number of shows to return + - offset - the index of the first show to return + - market - an ISO 3166-1 alpha-2 country code + + """ + return self._get("me/shows", limit=limit, offset=offset, market=market) + + def current_user_saved_shows_add(self, shows=[]): + """ Add one or more albums to the current user's + "Your Music" library. + Parameters: + - shows - a list of show URIs, URLs or IDs + """ + slist = [self._get_id("show", s) for s in shows] + return self._put("me/shows?ids=" + ",".join(slist)) + + def current_user_saved_shows_delete(self, shows=[]): + """ Remove one or more shows from the current user's + "Your Music" library. + + Parameters: + - shows - a list of show URIs, URLs or IDs + """ + slist = [self._get_id("show", s) for s in shows] + return self._delete("me/shows/?ids=" + ",".join(slist)) + + def current_user_saved_shows_contains(self, shows=[]): + """ Check if one or more shows is already saved in + the current Spotify user’s “Your Music” library. + + Parameters: + - shows - a list of show URIs, URLs or IDs + """ + slist = [self._get_id("show", s) for s in shows] + return self._get("me/shows/contains?ids=" + ",".join(slist)) + + def current_user_followed_artists(self, limit=20, after=None): + """ Gets a list of the artists followed by the current authorized user + + Parameters: + - limit - the number of artists to return + - after - the last artist ID retrieved from the previous + request + + """ + return self._get( + "me/following", type="artist", limit=limit, after=after + ) + + def current_user_following_artists(self, ids=None): + """ Check if the current user is following certain artists + + Returns list of booleans respective to ids + + Parameters: + - ids - a list of artist URIs, URLs or IDs + """ + idlist = [] + if ids is not None: + idlist = [self._get_id("artist", i) for i in ids] + return self._get( + "me/following/contains", ids=",".join(idlist), type="artist" + ) + + def current_user_following_users(self, ids=None): + """ Check if the current user is following certain users + + Returns list of booleans respective to ids + + Parameters: + - ids - a list of user URIs, URLs or IDs + """ + idlist = [] + if ids is not None: + idlist = [self._get_id("user", i) for i in ids] + return self._get( + "me/following/contains", ids=",".join(idlist), type="user" + ) + + def current_user_top_artists( + self, limit=20, offset=0, time_range="medium_term" + ): + """ Get the current user's top artists + + Parameters: + - limit - the number of entities to return + - offset - the index of the first entity to return + - time_range - Over what time frame are the affinities computed + Valid-values: short_term, medium_term, long_term + """ + return self._get( + "me/top/artists", time_range=time_range, limit=limit, offset=offset + ) + + def current_user_top_tracks( + self, limit=20, offset=0, time_range="medium_term" + ): + """ Get the current user's top tracks + + Parameters: + - limit - the number of entities to return + - offset - the index of the first entity to return + - time_range - Over what time frame are the affinities computed + Valid-values: short_term, medium_term, long_term + """ + return self._get( + "me/top/tracks", time_range=time_range, limit=limit, offset=offset + ) + + def current_user_recently_played(self, limit=50, after=None, before=None): + """ Get the current user's recently played tracks + + Parameters: + - limit - the number of entities to return + - after - unix timestamp in milliseconds. Returns all items + after (but not including) this cursor position. + Cannot be used if before is specified. + - before - unix timestamp in milliseconds. Returns all items + before (but not including) this cursor position. + Cannot be used if after is specified + """ + return self._get( + "me/player/recently-played", + limit=limit, + after=after, + before=before, + ) + + def user_follow_artists(self, ids=[]): + """ Follow one or more artists + Parameters: + - ids - a list of artist IDs + """ + return self._put("me/following?type=artist&ids=" + ",".join(ids)) + + def user_follow_users(self, ids=[]): + """ Follow one or more users + Parameters: + - ids - a list of user IDs + """ + return self._put("me/following?type=user&ids=" + ",".join(ids)) + + def user_unfollow_artists(self, ids=[]): + """ Unfollow one or more artists + Parameters: + - ids - a list of artist IDs + """ + return self._delete("me/following?type=artist&ids=" + ",".join(ids)) + + def user_unfollow_users(self, ids=[]): + """ Unfollow one or more users + Parameters: + - ids - a list of user IDs + """ + return self._delete("me/following?type=user&ids=" + ",".join(ids)) + + def featured_playlists( + self, locale=None, country=None, timestamp=None, limit=20, offset=0 + ): + """ Get a list of Spotify featured playlists + + Parameters: + - locale - The desired language, consisting of a lowercase ISO + 639-1 alpha-2 language code and an uppercase ISO 3166-1 alpha-2 + country code, joined by an underscore. + + - country - An ISO 3166-1 alpha-2 country code. + + - timestamp - A timestamp in ISO 8601 format: + yyyy-MM-ddTHH:mm:ss. Use this parameter to specify the user's + local time to get results tailored for that specific date and + time in the day + + - limit - The maximum number of items to return. Default: 20. + Minimum: 1. Maximum: 50 + + - offset - The index of the first item to return. Default: 0 + (the first object). Use with limit to get the next set of + items. + """ + return self._get( + "browse/featured-playlists", + locale=locale, + country=country, + timestamp=timestamp, + limit=limit, + offset=offset, + ) + + def new_releases(self, country=None, limit=20, offset=0): + """ Get a list of new album releases featured in Spotify + + Parameters: + - country - An ISO 3166-1 alpha-2 country code. + + - limit - The maximum number of items to return. Default: 20. + Minimum: 1. Maximum: 50 + + - offset - The index of the first item to return. Default: 0 + (the first object). Use with limit to get the next set of + items. + """ + return self._get( + "browse/new-releases", country=country, limit=limit, offset=offset + ) + + def category(self, category_id, country=None, locale=None): + """ Get info about a category + + Parameters: + - category_id - The Spotify category ID for the category. + + - country - An ISO 3166-1 alpha-2 country code. + - locale - The desired language, consisting of an ISO 639-1 alpha-2 + language code and an ISO 3166-1 alpha-2 country code, joined + by an underscore. + """ + return self._get( + "browse/categories/" + category_id, + country=country, + locale=locale, + ) + + def categories(self, country=None, locale=None, limit=20, offset=0): + """ Get a list of categories + + Parameters: + - country - An ISO 3166-1 alpha-2 country code. + - locale - The desired language, consisting of an ISO 639-1 alpha-2 + language code and an ISO 3166-1 alpha-2 country code, joined + by an underscore. + + - limit - The maximum number of items to return. Default: 20. + Minimum: 1. Maximum: 50 + + - offset - The index of the first item to return. Default: 0 + (the first object). Use with limit to get the next set of + items. + """ + return self._get( + "browse/categories", + country=country, + locale=locale, + limit=limit, + offset=offset, + ) + + def category_playlists( + self, category_id=None, country=None, limit=20, offset=0 + ): + """ Get a list of playlists for a specific Spotify category + + Parameters: + - category_id - The Spotify category ID for the category. + + - country - An ISO 3166-1 alpha-2 country code. + + - limit - The maximum number of items to return. Default: 20. + Minimum: 1. Maximum: 50 + + - offset - The index of the first item to return. Default: 0 + (the first object). Use with limit to get the next set of + items. + """ + return self._get( + "browse/categories/" + category_id + "/playlists", + country=country, + limit=limit, + offset=offset, + ) + + def recommendations( + self, + seed_artists=None, + seed_genres=None, + seed_tracks=None, + limit=20, + country=None, + **kwargs + ): + """ Get a list of recommended tracks for one to five seeds. + (at least one of `seed_artists`, `seed_tracks` and `seed_genres` + are needed) + + Parameters: + - seed_artists - a list of artist IDs, URIs or URLs + - seed_tracks - a list of track IDs, URIs or URLs + - seed_genres - a list of genre names. Available genres for + recommendations can be found by calling + recommendation_genre_seeds + + - country - An ISO 3166-1 alpha-2 country code. If provided, + all results will be playable in this country. + + - limit - The maximum number of items to return. Default: 20. + Minimum: 1. Maximum: 100 + + - min/max/target_ - For the tuneable track + attributes listed in the documentation, these values + provide filters and targeting on results. + """ + params = dict(limit=limit) + if seed_artists: + params["seed_artists"] = ",".join( + [self._get_id("artist", a) for a in seed_artists] + ) + if seed_genres: + params["seed_genres"] = ",".join(seed_genres) + if seed_tracks: + params["seed_tracks"] = ",".join( + [self._get_id("track", t) for t in seed_tracks] + ) + if country: + params["market"] = country + + for attribute in [ + "acousticness", + "danceability", + "duration_ms", + "energy", + "instrumentalness", + "key", + "liveness", + "loudness", + "mode", + "popularity", + "speechiness", + "tempo", + "time_signature", + "valence", + ]: + for prefix in ["min_", "max_", "target_"]: + param = prefix + attribute + if param in kwargs: + params[param] = kwargs[param] + return self._get("recommendations", **params) + + def recommendation_genre_seeds(self): + """ Get a list of genres available for the recommendations function. + """ + return self._get("recommendations/available-genre-seeds") + + def audio_analysis(self, track_id): + """ Get audio analysis for a track based upon its Spotify ID + Parameters: + - track_id - a track URI, URL or ID + """ + trid = self._get_id("track", track_id) + return self._get("audio-analysis/" + trid) + + def audio_features(self, tracks=[]): + """ Get audio features for one or multiple tracks based upon their Spotify IDs + Parameters: + - tracks - a list of track URIs, URLs or IDs, maximum: 100 ids + """ + if isinstance(tracks, str): + trackid = self._get_id("track", tracks) + results = self._get("audio-features/?ids=" + trackid) + else: + tlist = [self._get_id("track", t) for t in tracks] + results = self._get("audio-features/?ids=" + ",".join(tlist)) + # the response has changed, look for the new style first, and if + # its not there, fallback on the old style + if "audio_features" in results: + return results["audio_features"] + else: + return results + + def devices(self): + """ Get a list of user's available devices. + """ + return self._get("me/player/devices") + + def current_playback(self, market=None, additional_types=None): + """ Get information about user's current playback. + + Parameters: + - market - an ISO 3166-1 alpha-2 country code. + - additional_types - `episode` to get podcast track information + """ + return self._get("me/player", market=market, additional_types=additional_types) + + def currently_playing(self, market=None, additional_types=None): + """ Get user's currently playing track. + + Parameters: + - market - an ISO 3166-1 alpha-2 country code. + - additional_types - `episode` to get podcast track information + """ + return self._get("me/player/currently-playing", market=market, + additional_types=additional_types) + + def transfer_playback(self, device_id, force_play=True): + """ Transfer playback to another device. + Note that the API accepts a list of device ids, but only + actually supports one. + + Parameters: + - device_id - transfer playback to this device + - force_play - true: after transfer, play. false: + keep current state. + """ + data = {"device_ids": [device_id], "play": force_play} + return self._put("me/player", payload=data) + + def start_playback( + self, device_id=None, context_uri=None, uris=None, offset=None, position_ms=None + ): + """ Start or resume user's playback. + + Provide a `context_uri` to start playback of an album, + artist, or playlist. + + Provide a `uris` list to start playback of one or more + tracks. + + Provide `offset` as {"position": } or {"uri": ""} + to start playback at a particular offset. + + Parameters: + - device_id - device target for playback + - context_uri - spotify context uri to play + - uris - spotify track uris + - offset - offset into context by index or track + - position_ms - (optional) indicates from what position to start playback. + Must be a positive number. Passing in a position that is + greater than the length of the track will cause the player to + start playing the next song. + """ + if context_uri is not None and uris is not None: + logger.warning("Specify either context uri or uris, not both") + return + if uris is not None and not isinstance(uris, list): + logger.warning("URIs must be a list") + return + data = {} + if context_uri is not None: + data["context_uri"] = context_uri + if uris is not None: + data["uris"] = uris + if offset is not None: + data["offset"] = offset + if position_ms is not None: + data["position_ms"] = position_ms + return self._put( + self._append_device_id("me/player/play", device_id), payload=data + ) + + def pause_playback(self, device_id=None): + """ Pause user's playback. + + Parameters: + - device_id - device target for playback + """ + return self._put(self._append_device_id("me/player/pause", device_id)) + + def next_track(self, device_id=None): + """ Skip user's playback to next track. + + Parameters: + - device_id - device target for playback + """ + return self._post(self._append_device_id("me/player/next", device_id)) + + def previous_track(self, device_id=None): + """ Skip user's playback to previous track. + + Parameters: + - device_id - device target for playback + """ + return self._post( + self._append_device_id("me/player/previous", device_id) + ) + + def seek_track(self, position_ms, device_id=None): + """ Seek to position in current track. + + Parameters: + - position_ms - position in milliseconds to seek to + - device_id - device target for playback + """ + if not isinstance(position_ms, int): + logger.warning("Position_ms must be an integer") + return + return self._put( + self._append_device_id( + "me/player/seek?position_ms=%s" % position_ms, device_id + ) + ) + + def repeat(self, state, device_id=None): + """ Set repeat mode for playback. + + Parameters: + - state - `track`, `context`, or `off` + - device_id - device target for playback + """ + if state not in ["track", "context", "off"]: + logger.warning("Invalid state") + return + self._put( + self._append_device_id( + "me/player/repeat?state=%s" % state, device_id + ) + ) + + def volume(self, volume_percent, device_id=None): + """ Set playback volume. + + Parameters: + - volume_percent - volume between 0 and 100 + - device_id - device target for playback + """ + if not isinstance(volume_percent, int): + logger.warning("Volume must be an integer") + return + if volume_percent < 0 or volume_percent > 100: + logger.warning("Volume must be between 0 and 100, inclusive") + return + self._put( + self._append_device_id( + "me/player/volume?volume_percent=%s" % volume_percent, + device_id, + ) + ) + + def shuffle(self, state, device_id=None): + """ Toggle playback shuffling. + + Parameters: + - state - true or false + - device_id - device target for playback + """ + if not isinstance(state, bool): + logger.warning("state must be a boolean") + return + state = str(state).lower() + self._put( + self._append_device_id( + "me/player/shuffle?state=%s" % state, device_id + ) + ) + + def queue(self): + """ Gets the current user's queue """ + return self._get("me/player/queue") + + def add_to_queue(self, uri, device_id=None): + """ Adds a song to the end of a user's queue + + If device A is currently playing music and you try to add to the queue + and pass in the id for device B, you will get a + 'Player command failed: Restriction violated' error + I therefore recommend leaving device_id as None so that the active device is targeted + + :param uri: song uri, id, or url + :param device_id: + the id of a Spotify device. + If None, then the active device is used. + + """ + + uri = self._get_uri("track", uri) + + endpoint = "me/player/queue?uri=%s" % uri + + if device_id is not None: + endpoint += "&device_id=%s" % device_id + + return self._post(endpoint) + + def available_markets(self): + """ Get the list of markets where Spotify is available. + Returns a list of the countries in which Spotify is available, identified by their + ISO 3166-1 alpha-2 country code with additional country codes for special territories. + """ + return self._get("markets") + + def _append_device_id(self, path, device_id): + """ Append device ID to API path. + + Parameters: + - device_id - device id to append + """ + if device_id: + if "?" in path: + path += "&device_id=%s" % device_id + else: + path += "?device_id=%s" % device_id + return path + + def _get_id(self, type, id): + uri_match = re.search(Spotify._regex_spotify_uri, id) + if uri_match is not None: + uri_match_groups = uri_match.groupdict() + if uri_match_groups['type'] != type: + # TODO change to a ValueError in v3 + raise SpotifyException(400, -1, "Unexpected Spotify URI type.") + return uri_match_groups['id'] + + url_match = re.search(Spotify._regex_spotify_url, id) + if url_match is not None: + url_match_groups = url_match.groupdict() + if url_match_groups['type'] != type: + raise SpotifyException(400, -1, "Unexpected Spotify URL type.") + # TODO change to a ValueError in v3 + return url_match_groups['id'] + + # Raw identifiers might be passed, ensure they are also base-62 + if re.search(Spotify._regex_base62, id) is not None: + return id + + # TODO change to a ValueError in v3 + raise SpotifyException(400, -1, "Unsupported URL / URI.") + + def _get_uri(self, type, id): + if self._is_uri(id): + return id + else: + return "spotify:" + type + ":" + self._get_id(type, id) + + def _is_uri(self, uri): + return re.search(Spotify._regex_spotify_uri, uri) is not None + + def _search_multiple_markets(self, q, limit, offset, type, markets, total): + if total and limit > total: + limit = total + warnings.warn( + "limit was auto-adjusted to equal {} as it must not be higher than total".format( + total), + UserWarning, + ) + + results = defaultdict(dict) + item_types = [item_type + "s" for item_type in type.split(",")] + count = 0 + + for country in markets: + result = self._get( + "search", q=q, limit=limit, offset=offset, type=type, market=country + ) + for item_type in item_types: + results[country][item_type] = result[item_type] + + # Truncate the items list to the current limit + if len(results[country][item_type]['items']) > limit: + results[country][item_type]['items'] = \ + results[country][item_type]['items'][:limit] + + count += len(results[country][item_type]['items']) + if total and limit > total - count: + # when approaching `total` results, adjust `limit` to not request more + # items than needed + limit = total - count + + if total and count >= total: + return results + + return results diff --git a/templates/skills/spotify/dependencies/spotipy/exceptions.py b/templates/skills/spotify/dependencies/spotipy/exceptions.py new file mode 100644 index 00000000..df503f10 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy/exceptions.py @@ -0,0 +1,16 @@ +class SpotifyException(Exception): + + def __init__(self, http_status, code, msg, reason=None, headers=None): + self.http_status = http_status + self.code = code + self.msg = msg + self.reason = reason + # `headers` is used to support `Retry-After` in the event of a + # 429 status code. + if headers is None: + headers = {} + self.headers = headers + + def __str__(self): + return 'http status: {0}, code:{1} - {2}, reason: {3}'.format( + self.http_status, self.code, self.msg, self.reason) diff --git a/templates/skills/spotify/dependencies/spotipy/oauth2.py b/templates/skills/spotify/dependencies/spotipy/oauth2.py new file mode 100644 index 00000000..125c87c9 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy/oauth2.py @@ -0,0 +1,1308 @@ +# -*- coding: utf-8 -*- + +__all__ = [ + "SpotifyClientCredentials", + "SpotifyOAuth", + "SpotifyOauthError", + "SpotifyStateError", + "SpotifyImplicitGrant", + "SpotifyPKCE" +] + +import base64 +import logging +import os +import time +import warnings +import webbrowser + +import requests +# Workaround to support both python 2 & 3 +import six +import six.moves.urllib.parse as urllibparse +from six.moves.BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer +from six.moves.urllib_parse import parse_qsl, urlparse + +from spotipy.cache_handler import CacheFileHandler, CacheHandler +from spotipy.util import CLIENT_CREDS_ENV_VARS, get_host_port, normalize_scope + +logger = logging.getLogger(__name__) + + +class SpotifyOauthError(Exception): + """ Error during Auth Code or Implicit Grant flow """ + + def __init__(self, message, error=None, error_description=None, *args, **kwargs): + self.error = error + self.error_description = error_description + self.__dict__.update(kwargs) + super(SpotifyOauthError, self).__init__(message, *args, **kwargs) + + +class SpotifyStateError(SpotifyOauthError): + """ The state sent and state received were different """ + + def __init__(self, local_state=None, remote_state=None, message=None, + error=None, error_description=None, *args, **kwargs): + if not message: + message = ("Expected " + local_state + " but recieved " + + remote_state) + super(SpotifyOauthError, self).__init__(message, error, + error_description, *args, + **kwargs) + + +def _make_authorization_headers(client_id, client_secret): + auth_header = base64.b64encode( + six.text_type(client_id + ":" + client_secret).encode("ascii") + ) + return {"Authorization": "Basic %s" % auth_header.decode("ascii")} + + +def _ensure_value(value, env_key): + env_val = CLIENT_CREDS_ENV_VARS[env_key] + _val = value or os.getenv(env_val) + if _val is None: + msg = "No %s. Pass it or set a %s environment variable." % ( + env_key, + env_val, + ) + raise SpotifyOauthError(msg) + return _val + + +class SpotifyAuthBase(object): + def __init__(self, requests_session): + if isinstance(requests_session, requests.Session): + self._session = requests_session + else: + if requests_session: # Build a new session. + self._session = requests.Session() + else: # Use the Requests API module as a "session". + from requests import api + self._session = api + + def _normalize_scope(self, scope): + return normalize_scope(scope) + + @property + def client_id(self): + return self._client_id + + @client_id.setter + def client_id(self, val): + self._client_id = _ensure_value(val, "client_id") + + @property + def client_secret(self): + return self._client_secret + + @client_secret.setter + def client_secret(self, val): + self._client_secret = _ensure_value(val, "client_secret") + + @property + def redirect_uri(self): + return self._redirect_uri + + @redirect_uri.setter + def redirect_uri(self, val): + self._redirect_uri = _ensure_value(val, "redirect_uri") + + @staticmethod + def _get_user_input(prompt): + try: + return raw_input(prompt) + except NameError: + return input(prompt) + + @staticmethod + def is_token_expired(token_info): + now = int(time.time()) + return token_info["expires_at"] - now < 60 + + @staticmethod + def _is_scope_subset(needle_scope, haystack_scope): + needle_scope = set(needle_scope.split()) if needle_scope else set() + haystack_scope = ( + set(haystack_scope.split()) if haystack_scope else set() + ) + return needle_scope <= haystack_scope + + def _handle_oauth_error(self, http_error): + response = http_error.response + try: + error_payload = response.json() + error = error_payload.get('error') + error_description = error_payload.get('error_description') + except ValueError: + # if the response cannot be decoded into JSON (which raises a ValueError), + # then try to decode it into text + + # if we receive an empty string (which is falsy), then replace it with `None` + error = response.text or None + error_description = None + + raise SpotifyOauthError( + 'error: {0}, error_description: {1}'.format( + error, error_description + ), + error=error, + error_description=error_description + ) + + def __del__(self): + """Make sure the connection (pool) gets closed""" + if isinstance(self._session, requests.Session): + self._session.close() + + +class SpotifyClientCredentials(SpotifyAuthBase): + OAUTH_TOKEN_URL = "https://accounts.spotify.com/api/token" + + def __init__( + self, + client_id=None, + client_secret=None, + proxies=None, + requests_session=True, + requests_timeout=None, + cache_handler=None + ): + """ + Creates a Client Credentials Flow Manager. + + The Client Credentials flow is used in server-to-server authentication. + Only endpoints that do not access user information can be accessed. + This means that endpoints that require authorization scopes cannot be accessed. + The advantage, however, of this authorization flow is that it does not require any + user interaction + + You can either provide a client_id and client_secret to the + constructor or set SPOTIPY_CLIENT_ID and SPOTIPY_CLIENT_SECRET + environment variables + + Parameters: + * client_id: Must be supplied or set as environment variable + * client_secret: Must be supplied or set as environment variable + * proxies: Optional, proxy for the requests library to route through + * requests_session: A Requests session + * requests_timeout: Optional, tell Requests to stop waiting for a response after + a given number of seconds + * cache_handler: An instance of the `CacheHandler` class to handle + getting and saving cached authorization tokens. + Optional, will otherwise use `CacheFileHandler`. + (takes precedence over `cache_path` and `username`) + + """ + + super(SpotifyClientCredentials, self).__init__(requests_session) + + self.client_id = client_id + self.client_secret = client_secret + self.proxies = proxies + self.requests_timeout = requests_timeout + if cache_handler: + assert issubclass(cache_handler.__class__, CacheHandler), \ + "cache_handler must be a subclass of CacheHandler: " + str(type(cache_handler)) \ + + " != " + str(CacheHandler) + self.cache_handler = cache_handler + else: + self.cache_handler = CacheFileHandler() + + def get_access_token(self, as_dict=True, check_cache=True): + """ + If a valid access token is in memory, returns it + Else fetches a new token and returns it + + Parameters: + - as_dict - a boolean indicating if returning the access token + as a token_info dictionary, otherwise it will be returned + as a string. + """ + if as_dict: + warnings.warn( + "You're using 'as_dict = True'." + "get_access_token will return the token string directly in future " + "versions. Please adjust your code accordingly, or use " + "get_cached_token instead.", + DeprecationWarning, + stacklevel=2, + ) + + if check_cache: + token_info = self.cache_handler.get_cached_token() + if token_info and not self.is_token_expired(token_info): + return token_info if as_dict else token_info["access_token"] + + token_info = self._request_access_token() + token_info = self._add_custom_values_to_token_info(token_info) + self.cache_handler.save_token_to_cache(token_info) + return token_info if as_dict else token_info["access_token"] + + def _request_access_token(self): + """Gets client credentials access token """ + payload = {"grant_type": "client_credentials"} + + headers = _make_authorization_headers( + self.client_id, self.client_secret + ) + + logger.debug( + "sending POST request to %s with Headers: %s and Body: %r", + self.OAUTH_TOKEN_URL, headers, payload + ) + + try: + response = self._session.post( + self.OAUTH_TOKEN_URL, + data=payload, + headers=headers, + verify=True, + proxies=self.proxies, + timeout=self.requests_timeout, + ) + response.raise_for_status() + token_info = response.json() + return token_info + except requests.exceptions.HTTPError as http_error: + self._handle_oauth_error(http_error) + + def _add_custom_values_to_token_info(self, token_info): + """ + Store some values that aren't directly provided by a Web API + response. + """ + token_info["expires_at"] = int(time.time()) + token_info["expires_in"] + return token_info + + +class SpotifyOAuth(SpotifyAuthBase): + """ + Implements Authorization Code Flow for Spotify's OAuth implementation. + """ + OAUTH_AUTHORIZE_URL = "https://accounts.spotify.com/authorize" + OAUTH_TOKEN_URL = "https://accounts.spotify.com/api/token" + + def __init__( + self, + client_id=None, + client_secret=None, + redirect_uri=None, + state=None, + scope=None, + cache_path=None, + username=None, + proxies=None, + show_dialog=False, + requests_session=True, + requests_timeout=None, + open_browser=True, + cache_handler=None + ): + """ + Creates a SpotifyOAuth object + + Parameters: + * client_id: Must be supplied or set as environment variable + * client_secret: Must be supplied or set as environment variable + * redirect_uri: Must be supplied or set as environment variable + * state: Optional, no verification is performed + * scope: Optional, either a list of scopes or comma separated string of scopes. + e.g, "playlist-read-private,playlist-read-collaborative" + * cache_path: (deprecated) Optional, will otherwise be generated + (takes precedence over `username`) + * username: (deprecated) Optional or set as environment variable + (will set `cache_path` to `.cache-{username}`) + * proxies: Optional, proxy for the requests library to route through + * show_dialog: Optional, interpreted as boolean + * requests_session: A Requests session + * requests_timeout: Optional, tell Requests to stop waiting for a response after + a given number of seconds + * open_browser: Optional, whether or not the web browser should be opened to + authorize a user + * cache_handler: An instance of the `CacheHandler` class to handle + getting and saving cached authorization tokens. + Optional, will otherwise use `CacheFileHandler`. + (takes precedence over `cache_path` and `username`) + """ + + super(SpotifyOAuth, self).__init__(requests_session) + + self.client_id = client_id + self.client_secret = client_secret + self.redirect_uri = redirect_uri + self.state = state + self.scope = self._normalize_scope(scope) + if username or cache_path: + warnings.warn("Specifying cache_path or username as arguments to SpotifyOAuth " + + "will be deprecated. Instead, please create a CacheFileHandler " + + "instance with the desired cache_path and username and pass it " + + "to SpotifyOAuth as the cache_handler. For example:\n\n" + + "\tfrom spotipy.oauth2 import CacheFileHandler\n" + + "\thandler = CacheFileHandler(cache_path=cache_path, " + + "username=username)\n" + + "\tsp = spotipy.SpotifyOAuth(client_id, client_secret, " + + "redirect_uri," + + " cache_handler=handler)", + DeprecationWarning + ) + if cache_handler: + warnings.warn("A cache_handler has been specified along with a cache_path or " + + "username. The cache_path and username arguments will be ignored.") + if cache_handler: + assert issubclass(cache_handler.__class__, CacheHandler), \ + "cache_handler must be a subclass of CacheHandler: " + str(type(cache_handler)) \ + + " != " + str(CacheHandler) + self.cache_handler = cache_handler + else: + username = (username or os.getenv(CLIENT_CREDS_ENV_VARS["client_username"])) + self.cache_handler = CacheFileHandler( + username=username, + cache_path=cache_path + ) + self.proxies = proxies + self.requests_timeout = requests_timeout + self.show_dialog = show_dialog + self.open_browser = open_browser + + def validate_token(self, token_info): + if token_info is None: + return None + + # if scopes don't match, then bail + if "scope" not in token_info or not self._is_scope_subset( + self.scope, token_info["scope"] + ): + return None + + if self.is_token_expired(token_info): + token_info = self.refresh_access_token( + token_info["refresh_token"] + ) + + return token_info + + def get_authorize_url(self, state=None): + """ Gets the URL to use to authorize this app + """ + payload = { + "client_id": self.client_id, + "response_type": "code", + "redirect_uri": self.redirect_uri, + } + if self.scope: + payload["scope"] = self.scope + if state is None: + state = self.state + if state is not None: + payload["state"] = state + if self.show_dialog: + payload["show_dialog"] = True + + urlparams = urllibparse.urlencode(payload) + + return "%s?%s" % (self.OAUTH_AUTHORIZE_URL, urlparams) + + def parse_response_code(self, url): + """ Parse the response code in the given response url + + Parameters: + - url - the response url + """ + _, code = self.parse_auth_response_url(url) + if code is None: + return url + else: + return code + + @staticmethod + def parse_auth_response_url(url): + query_s = urlparse(url).query + form = dict(parse_qsl(query_s)) + if "error" in form: + raise SpotifyOauthError("Received error from auth server: " + "{}".format(form["error"]), + error=form["error"]) + return tuple(form.get(param) for param in ["state", "code"]) + + def _make_authorization_headers(self): + return _make_authorization_headers(self.client_id, self.client_secret) + + def _open_auth_url(self): + auth_url = self.get_authorize_url() + try: + webbrowser.open(auth_url) + logger.info("Opened %s in your browser", auth_url) + except webbrowser.Error: + logger.error("Please navigate here: %s", auth_url) + + def _get_auth_response_interactive(self, open_browser=False): + if open_browser: + self._open_auth_url() + prompt = "Enter the URL you were redirected to: " + else: + url = self.get_authorize_url() + prompt = ( + "Go to the following URL: {}\n" + "Enter the URL you were redirected to: ".format(url) + ) + response = self._get_user_input(prompt) + state, code = SpotifyOAuth.parse_auth_response_url(response) + if self.state is not None and self.state != state: + raise SpotifyStateError(self.state, state) + return code + + def _get_auth_response_local_server(self, redirect_port): + server = start_local_http_server(redirect_port) + self._open_auth_url() + server.handle_request() + + if server.error is not None: + raise server.error + elif self.state is not None and server.state != self.state: + raise SpotifyStateError(self.state, server.state) + elif server.auth_code is not None: + return server.auth_code + else: + raise SpotifyOauthError("Server listening on localhost has not been accessed") + + def get_auth_response(self, open_browser=None): + logger.info('User authentication requires interaction with your ' + 'web browser. Once you enter your credentials and ' + 'give authorization, you will be redirected to ' + 'a url. Paste that url you were directed to to ' + 'complete the authorization.') + + redirect_info = urlparse(self.redirect_uri) + redirect_host, redirect_port = get_host_port(redirect_info.netloc) + + if open_browser is None: + open_browser = self.open_browser + + if ( + open_browser + and redirect_host in ("127.0.0.1", "localhost") + and redirect_info.scheme == "http" + ): + # Only start a local http server if a port is specified + if redirect_port: + return self._get_auth_response_local_server(redirect_port) + else: + logger.warning('Using `%s` as redirect URI without a port. ' + 'Specify a port (e.g. `%s:8080`) to allow ' + 'automatic retrieval of authentication code ' + 'instead of having to copy and paste ' + 'the URL your browser is redirected to.', + redirect_host, redirect_host) + + return self._get_auth_response_interactive(open_browser=open_browser) + + def get_authorization_code(self, response=None): + if response: + return self.parse_response_code(response) + return self.get_auth_response() + + def get_access_token(self, code=None, as_dict=True, check_cache=True): + """ Gets the access token for the app given the code + + Parameters: + - code - the response code + - as_dict - a boolean indicating if returning the access token + as a token_info dictionary, otherwise it will be returned + as a string. + """ + if as_dict: + warnings.warn( + "You're using 'as_dict = True'." + "get_access_token will return the token string directly in future " + "versions. Please adjust your code accordingly, or use " + "get_cached_token instead.", + DeprecationWarning, + stacklevel=2, + ) + if check_cache: + token_info = self.validate_token(self.cache_handler.get_cached_token()) + if token_info is not None: + if self.is_token_expired(token_info): + token_info = self.refresh_access_token( + token_info["refresh_token"] + ) + return token_info if as_dict else token_info["access_token"] + + payload = { + "redirect_uri": self.redirect_uri, + "code": code or self.get_auth_response(), + "grant_type": "authorization_code", + } + if self.scope: + payload["scope"] = self.scope + if self.state: + payload["state"] = self.state + + headers = self._make_authorization_headers() + + logger.debug( + "sending POST request to %s with Headers: %s and Body: %r", + self.OAUTH_TOKEN_URL, headers, payload + ) + + try: + response = self._session.post( + self.OAUTH_TOKEN_URL, + data=payload, + headers=headers, + verify=True, + proxies=self.proxies, + timeout=self.requests_timeout, + ) + response.raise_for_status() + token_info = response.json() + token_info = self._add_custom_values_to_token_info(token_info) + self.cache_handler.save_token_to_cache(token_info) + return token_info if as_dict else token_info["access_token"] + except requests.exceptions.HTTPError as http_error: + self._handle_oauth_error(http_error) + + def refresh_access_token(self, refresh_token): + payload = { + "refresh_token": refresh_token, + "grant_type": "refresh_token", + } + + headers = self._make_authorization_headers() + + logger.debug( + "sending POST request to %s with Headers: %s and Body: %r", + self.OAUTH_TOKEN_URL, headers, payload + ) + + try: + response = self._session.post( + self.OAUTH_TOKEN_URL, + data=payload, + headers=headers, + proxies=self.proxies, + timeout=self.requests_timeout, + ) + response.raise_for_status() + token_info = response.json() + token_info = self._add_custom_values_to_token_info(token_info) + if "refresh_token" not in token_info: + token_info["refresh_token"] = refresh_token + self.cache_handler.save_token_to_cache(token_info) + return token_info + except requests.exceptions.HTTPError as http_error: + self._handle_oauth_error(http_error) + + def _add_custom_values_to_token_info(self, token_info): + """ + Store some values that aren't directly provided by a Web API + response. + """ + token_info["expires_at"] = int(time.time()) + token_info["expires_in"] + token_info["scope"] = self.scope + return token_info + + def get_cached_token(self): + warnings.warn("Calling get_cached_token directly on the SpotifyOAuth object will be " + + "deprecated. Instead, please specify a CacheFileHandler instance as " + + "the cache_handler in SpotifyOAuth and use the CacheFileHandler's " + + "get_cached_token method. You can replace:\n\tsp.get_cached_token()" + + "\n\nWith:\n\tsp.validate_token(sp.cache_handler.get_cached_token())", + DeprecationWarning + ) + return self.validate_token(self.cache_handler.get_cached_token()) + + def _save_token_info(self, token_info): + warnings.warn("Calling _save_token_info directly on the SpotifyOAuth object will be " + + "deprecated. Instead, please specify a CacheFileHandler instance as " + + "the cache_handler in SpotifyOAuth and use the CacheFileHandler's " + + "save_token_to_cache method.", + DeprecationWarning + ) + self.cache_handler.save_token_to_cache(token_info) + return None + + +class SpotifyPKCE(SpotifyAuthBase): + """ Implements PKCE Authorization Flow for client apps + + This auth manager enables *user and non-user* endpoints with only + a client ID, redirect URI, and username. When the app requests + an access token for the first time, the user is prompted to + authorize the new client app. After authorizing the app, the client + app is then given both access and refresh tokens. This is the + preferred way of authorizing a mobile/desktop client. + + """ + + OAUTH_AUTHORIZE_URL = "https://accounts.spotify.com/authorize" + OAUTH_TOKEN_URL = "https://accounts.spotify.com/api/token" + + def __init__(self, + client_id=None, + redirect_uri=None, + state=None, + scope=None, + cache_path=None, + username=None, + proxies=None, + requests_timeout=None, + requests_session=True, + open_browser=True, + cache_handler=None): + """ + Creates Auth Manager with the PKCE Auth flow. + + Parameters: + * client_id: Must be supplied or set as environment variable + * redirect_uri: Must be supplied or set as environment variable + * state: Optional, no verification is performed + * scope: Optional, either a list of scopes or comma separated string of scopes. + e.g, "playlist-read-private,playlist-read-collaborative" + * cache_path: (deprecated) Optional, will otherwise be generated + (takes precedence over `username`) + * username: (deprecated) Optional or set as environment variable + (will set `cache_path` to `.cache-{username}`) + * proxies: Optional, proxy for the requests library to route through + * requests_timeout: Optional, tell Requests to stop waiting for a response after + a given number of seconds + * requests_session: A Requests session + * open_browser: Optional, whether or not the web browser should be opened to + authorize a user + * cache_handler: An instance of the `CacheHandler` class to handle + getting and saving cached authorization tokens. + Optional, will otherwise use `CacheFileHandler`. + (takes precedence over `cache_path` and `username`) + """ + + super(SpotifyPKCE, self).__init__(requests_session) + self.client_id = client_id + self.redirect_uri = redirect_uri + self.state = state + self.scope = self._normalize_scope(scope) + if username or cache_path: + warnings.warn("Specifying cache_path or username as arguments to SpotifyPKCE " + + "will be deprecated. Instead, please create a CacheFileHandler " + + "instance with the desired cache_path and username and pass it " + + "to SpotifyPKCE as the cache_handler. For example:\n\n" + + "\tfrom spotipy.oauth2 import CacheFileHandler\n" + + "\thandler = CacheFileHandler(cache_path=cache_path, " + + "username=username)\n" + + "\tsp = spotipy.SpotifyImplicitGrant(client_id, client_secret, " + + "redirect_uri, cache_handler=handler)", + DeprecationWarning + ) + if cache_handler: + warnings.warn("A cache_handler has been specified along with a cache_path or " + + "username. The cache_path and username arguments will be ignored.") + if cache_handler: + assert issubclass(type(cache_handler), CacheHandler), \ + "type(cache_handler): " + str(type(cache_handler)) + " != " + str(CacheHandler) + self.cache_handler = cache_handler + else: + username = (username or os.getenv(CLIENT_CREDS_ENV_VARS["client_username"])) + self.cache_handler = CacheFileHandler( + username=username, + cache_path=cache_path + ) + self.proxies = proxies + self.requests_timeout = requests_timeout + + self._code_challenge_method = "S256" # Spotify requires SHA256 + self.code_verifier = None + self.code_challenge = None + self.authorization_code = None + self.open_browser = open_browser + + def _get_code_verifier(self): + """ Spotify PCKE code verifier - See step 1 of the reference guide below + Reference: + https://developer.spotify.com/documentation/general/guides/authorization-guide/#authorization-code-flow-with-proof-key-for-code-exchange-pkce + """ + # Range (33,96) is used to select between 44-128 base64 characters for the + # next operation. The range looks weird because base64 is 6 bytes + import random + length = random.randint(33, 96) + + # The seeded length generates between a 44 and 128 base64 characters encoded string + try: + import secrets + verifier = secrets.token_urlsafe(length) + except ImportError: # For python 3.5 support + import base64 + import os + rand_bytes = os.urandom(length) + verifier = base64.urlsafe_b64encode(rand_bytes).decode('utf-8').replace('=', '') + return verifier + + def _get_code_challenge(self): + """ Spotify PCKE code challenge - See step 1 of the reference guide below + Reference: + https://developer.spotify.com/documentation/general/guides/authorization-guide/#authorization-code-flow-with-proof-key-for-code-exchange-pkce + """ + import base64 + import hashlib + code_challenge_digest = hashlib.sha256(self.code_verifier.encode('utf-8')).digest() + code_challenge = base64.urlsafe_b64encode(code_challenge_digest).decode('utf-8') + return code_challenge.replace('=', '') + + def get_authorize_url(self, state=None): + """ Gets the URL to use to authorize this app """ + if not self.code_challenge: + self.get_pkce_handshake_parameters() + payload = { + "client_id": self.client_id, + "response_type": "code", + "redirect_uri": self.redirect_uri, + "code_challenge_method": self._code_challenge_method, + "code_challenge": self.code_challenge + } + if self.scope: + payload["scope"] = self.scope + if state is None: + state = self.state + if state is not None: + payload["state"] = state + urlparams = urllibparse.urlencode(payload) + return "%s?%s" % (self.OAUTH_AUTHORIZE_URL, urlparams) + + def _open_auth_url(self, state=None): + auth_url = self.get_authorize_url(state) + try: + webbrowser.open(auth_url) + logger.info("Opened %s in your browser", auth_url) + except webbrowser.Error: + logger.error("Please navigate here: %s", auth_url) + + def _get_auth_response(self, open_browser=None): + logger.info('User authentication requires interaction with your ' + 'web browser. Once you enter your credentials and ' + 'give authorization, you will be redirected to ' + 'a url. Paste that url you were directed to to ' + 'complete the authorization.') + + redirect_info = urlparse(self.redirect_uri) + redirect_host, redirect_port = get_host_port(redirect_info.netloc) + + if open_browser is None: + open_browser = self.open_browser + + if ( + open_browser + and redirect_host in ("127.0.0.1", "localhost") + and redirect_info.scheme == "http" + ): + # Only start a local http server if a port is specified + if redirect_port: + return self._get_auth_response_local_server(redirect_port) + else: + logger.warning('Using `%s` as redirect URI without a port. ' + 'Specify a port (e.g. `%s:8080`) to allow ' + 'automatic retrieval of authentication code ' + 'instead of having to copy and paste ' + 'the URL your browser is redirected to.', + redirect_host, redirect_host) + return self._get_auth_response_interactive(open_browser=open_browser) + + def _get_auth_response_local_server(self, redirect_port): + server = start_local_http_server(redirect_port) + self._open_auth_url() + server.handle_request() + + if self.state is not None and server.state != self.state: + raise SpotifyStateError(self.state, server.state) + + if server.auth_code is not None: + return server.auth_code + elif server.error is not None: + raise SpotifyOauthError("Received error from OAuth server: {}".format(server.error)) + else: + raise SpotifyOauthError("Server listening on localhost has not been accessed") + + def _get_auth_response_interactive(self, open_browser=False): + if open_browser or self.open_browser: + self._open_auth_url() + prompt = "Enter the URL you were redirected to: " + else: + url = self.get_authorize_url() + prompt = ( + "Go to the following URL: {}\n" + "Enter the URL you were redirected to: ".format(url) + ) + response = self._get_user_input(prompt) + state, code = self.parse_auth_response_url(response) + if self.state is not None and self.state != state: + raise SpotifyStateError(self.state, state) + return code + + def get_authorization_code(self, response=None): + if response: + return self.parse_response_code(response) + return self._get_auth_response() + + def validate_token(self, token_info): + if token_info is None: + return None + + # if scopes don't match, then bail + if "scope" not in token_info or not self._is_scope_subset( + self.scope, token_info["scope"] + ): + return None + + if self.is_token_expired(token_info): + token_info = self.refresh_access_token( + token_info["refresh_token"] + ) + + return token_info + + def _add_custom_values_to_token_info(self, token_info): + """ + Store some values that aren't directly provided by a Web API + response. + """ + token_info["expires_at"] = int(time.time()) + token_info["expires_in"] + return token_info + + def get_pkce_handshake_parameters(self): + self.code_verifier = self._get_code_verifier() + self.code_challenge = self._get_code_challenge() + + def get_access_token(self, code=None, check_cache=True): + """ Gets the access token for the app + + If the code is not given and no cached token is used, an + authentication window will be shown to the user to get a new + code. + + Parameters: + - code - the response code from authentication + - check_cache - if true, checks for a locally stored token + before requesting a new token + """ + + if check_cache: + token_info = self.validate_token(self.cache_handler.get_cached_token()) + if token_info is not None: + if self.is_token_expired(token_info): + token_info = self.refresh_access_token( + token_info["refresh_token"] + ) + return token_info["access_token"] + + if self.code_verifier is None or self.code_challenge is None: + self.get_pkce_handshake_parameters() + + payload = { + "client_id": self.client_id, + "grant_type": "authorization_code", + "code": code or self.get_authorization_code(), + "redirect_uri": self.redirect_uri, + "code_verifier": self.code_verifier + } + + headers = {"Content-Type": "application/x-www-form-urlencoded"} + + logger.debug( + "sending POST request to %s with Headers: %s and Body: %r", + self.OAUTH_TOKEN_URL, headers, payload + ) + + try: + response = self._session.post( + self.OAUTH_TOKEN_URL, + data=payload, + headers=headers, + verify=True, + proxies=self.proxies, + timeout=self.requests_timeout, + ) + response.raise_for_status() + token_info = response.json() + token_info = self._add_custom_values_to_token_info(token_info) + self.cache_handler.save_token_to_cache(token_info) + return token_info["access_token"] + except requests.exceptions.HTTPError as http_error: + self._handle_oauth_error(http_error) + + def refresh_access_token(self, refresh_token): + payload = { + "refresh_token": refresh_token, + "grant_type": "refresh_token", + "client_id": self.client_id, + } + + headers = {"Content-Type": "application/x-www-form-urlencoded"} + + logger.debug( + "sending POST request to %s with Headers: %s and Body: %r", + self.OAUTH_TOKEN_URL, headers, payload + ) + + try: + response = self._session.post( + self.OAUTH_TOKEN_URL, + data=payload, + headers=headers, + proxies=self.proxies, + timeout=self.requests_timeout, + ) + response.raise_for_status() + token_info = response.json() + token_info = self._add_custom_values_to_token_info(token_info) + if "refresh_token" not in token_info: + token_info["refresh_token"] = refresh_token + self.cache_handler.save_token_to_cache(token_info) + return token_info + except requests.exceptions.HTTPError as http_error: + self._handle_oauth_error(http_error) + + def parse_response_code(self, url): + """ Parse the response code in the given response url + + Parameters: + - url - the response url + """ + _, code = self.parse_auth_response_url(url) + if code is None: + return url + else: + return code + + @staticmethod + def parse_auth_response_url(url): + return SpotifyOAuth.parse_auth_response_url(url) + + def get_cached_token(self): + warnings.warn("Calling get_cached_token directly on the SpotifyPKCE object will be " + + "deprecated. Instead, please specify a CacheFileHandler instance as " + + "the cache_handler in SpotifyOAuth and use the CacheFileHandler's " + + "get_cached_token method. You can replace:\n\tsp.get_cached_token()" + + "\n\nWith:\n\tsp.validate_token(sp.cache_handler.get_cached_token())", + DeprecationWarning + ) + return self.validate_token(self.cache_handler.get_cached_token()) + + def _save_token_info(self, token_info): + warnings.warn("Calling _save_token_info directly on the SpotifyOAuth object will be " + + "deprecated. Instead, please specify a CacheFileHandler instance as " + + "the cache_handler in SpotifyOAuth and use the CacheFileHandler's " + + "save_token_to_cache method.", + DeprecationWarning + ) + self.cache_handler.save_token_to_cache(token_info) + return None + + +class SpotifyImplicitGrant(SpotifyAuthBase): + """ Implements Implicit Grant Flow for client apps + + This auth manager enables *user and non-user* endpoints with only + a client secret, redirect uri, and username. The user will need to + copy and paste a URI from the browser every hour. + + Security Warning + ----------------- + The OAuth standard no longer recommends the Implicit Grant Flow for + client-side code. Spotify has implemented the OAuth-suggested PKCE + extension that removes the need for a client secret in the + Authentication Code flow. Use the SpotifyPKCE auth manager instead + of SpotifyImplicitGrant. + + SpotifyPKCE contains all of the functionality of + SpotifyImplicitGrant, plus automatic response retrieval and + refreshable tokens. Only a few replacements need to be made: + + * get_auth_response()['access_token'] -> + get_access_token(get_authorization_code()) + * get_auth_response() -> + get_access_token(get_authorization_code()); get_cached_token() + * parse_response_token(url)['access_token'] -> + get_access_token(parse_response_code(url)) + * parse_response_token(url) -> + get_access_token(parse_response_code(url)); get_cached_token() + + The security concern in the Implicit Grant flow is that the token is + returned in the URL and can be intercepted through the browser. A + request with an authorization code and proof of origin could not be + easily intercepted without a compromised network. + """ + OAUTH_AUTHORIZE_URL = "https://accounts.spotify.com/authorize" + + def __init__(self, + client_id=None, + redirect_uri=None, + state=None, + scope=None, + cache_path=None, + username=None, + show_dialog=False, + cache_handler=None): + """ Creates Auth Manager using the Implicit Grant flow + + **See help(SpotifyImplicitGrant) for full Security Warning** + + Parameters + ---------- + * client_id: Must be supplied or set as environment variable + * redirect_uri: Must be supplied or set as environment variable + * state: May be supplied, no verification is performed + * scope: Optional, either a list of scopes or comma separated string of scopes. + e.g, "playlist-read-private,playlist-read-collaborative" + * cache_handler: An instance of the `CacheHandler` class to handle + getting and saving cached authorization tokens. + May be supplied, will otherwise use `CacheFileHandler`. + (takes precedence over `cache_path` and `username`) + * cache_path: (deprecated) May be supplied, will otherwise be generated + (takes precedence over `username`) + * username: (deprecated) May be supplied or set as environment variable + (will set `cache_path` to `.cache-{username}`) + * show_dialog: Interpreted as boolean + """ + logger.warning("The OAuth standard no longer recommends the Implicit " + "Grant Flow for client-side code. Use the SpotifyPKCE " + "auth manager instead of SpotifyImplicitGrant. For " + "more details and a guide to switching, see " + "help(SpotifyImplicitGrant).") + + self.client_id = client_id + self.redirect_uri = redirect_uri + self.state = state + if username or cache_path: + warnings.warn("Specifying cache_path or username as arguments to " + + "SpotifyImplicitGrant will be deprecated. Instead, please create " + + "a CacheFileHandler instance with the desired cache_path and " + + "username and pass it to SpotifyImplicitGrant as the " + + "cache_handler. For example:\n\n" + + "\tfrom spotipy.oauth2 import CacheFileHandler\n" + + "\thandler = CacheFileHandler(cache_path=cache_path, " + + "username=username)\n" + + "\tsp = spotipy.SpotifyImplicitGrant(client_id, client_secret, " + + "redirect_uri, cache_handler=handler)", + DeprecationWarning + ) + if cache_handler: + warnings.warn("A cache_handler has been specified along with a cache_path or " + + "username. The cache_path and username arguments will be ignored.") + if cache_handler: + assert issubclass(type(cache_handler), CacheHandler), \ + "type(cache_handler): " + str(type(cache_handler)) + " != " + str(CacheHandler) + self.cache_handler = cache_handler + else: + username = (username or os.getenv(CLIENT_CREDS_ENV_VARS["client_username"])) + self.cache_handler = CacheFileHandler( + username=username, + cache_path=cache_path + ) + self.scope = self._normalize_scope(scope) + self.show_dialog = show_dialog + self._session = None # As to not break inherited __del__ + + def validate_token(self, token_info): + if token_info is None: + return None + + # if scopes don't match, then bail + if "scope" not in token_info or not self._is_scope_subset( + self.scope, token_info["scope"] + ): + return None + + if self.is_token_expired(token_info): + return None + + return token_info + + def get_access_token(self, + state=None, + response=None, + check_cache=True): + """ Gets Auth Token from cache (preferred) or user interaction + + Parameters + ---------- + * state: May be given, overrides (without changing) self.state + * response: URI with token, can break expiration checks + * check_cache: Interpreted as boolean + """ + if check_cache: + token_info = self.validate_token(self.cache_handler.get_cached_token()) + if not (token_info is None or self.is_token_expired(token_info)): + return token_info["access_token"] + + if response: + token_info = self.parse_response_token(response) + else: + token_info = self.get_auth_response(state) + token_info = self._add_custom_values_to_token_info(token_info) + self.cache_handler.save_token_to_cache(token_info) + + return token_info["access_token"] + + def get_authorize_url(self, state=None): + """ Gets the URL to use to authorize this app """ + payload = { + "client_id": self.client_id, + "response_type": "token", + "redirect_uri": self.redirect_uri, + } + if self.scope: + payload["scope"] = self.scope + if state is None: + state = self.state + if state is not None: + payload["state"] = state + if self.show_dialog: + payload["show_dialog"] = True + + urlparams = urllibparse.urlencode(payload) + + return "%s?%s" % (self.OAUTH_AUTHORIZE_URL, urlparams) + + def parse_response_token(self, url, state=None): + """ Parse the response code in the given response url """ + remote_state, token, t_type, exp_in = self.parse_auth_response_url(url) + if state is None: + state = self.state + if state is not None and remote_state != state: + raise SpotifyStateError(state, remote_state) + return {"access_token": token, "token_type": t_type, + "expires_in": exp_in, "state": state} + + @staticmethod + def parse_auth_response_url(url): + url_components = urlparse(url) + fragment_s = url_components.fragment + query_s = url_components.query + form = dict(i.split('=') for i + in (fragment_s or query_s or url).split('&')) + if "error" in form: + raise SpotifyOauthError("Received error from auth server: " + "{}".format(form["error"]), + state=form["state"]) + if "expires_in" in form: + form["expires_in"] = int(form["expires_in"]) + return tuple(form.get(param) for param in ["state", "access_token", + "token_type", "expires_in"]) + + def _open_auth_url(self, state=None): + auth_url = self.get_authorize_url(state) + try: + webbrowser.open(auth_url) + logger.info("Opened %s in your browser", auth_url) + except webbrowser.Error: + logger.error("Please navigate here: %s", auth_url) + + def get_auth_response(self, state=None): + """ Gets a new auth **token** with user interaction """ + logger.info('User authentication requires interaction with your ' + 'web browser. Once you enter your credentials and ' + 'give authorization, you will be redirected to ' + 'a url. Paste that url you were directed to to ' + 'complete the authorization.') + + redirect_info = urlparse(self.redirect_uri) + redirect_host, redirect_port = get_host_port(redirect_info.netloc) + # Implicit Grant tokens are returned in a hash fragment + # which is only available to the browser. Therefore, interactive + # URL retrieval is required. + if (redirect_host in ("127.0.0.1", "localhost") + and redirect_info.scheme == "http" and redirect_port): + logger.warning('Using a local redirect URI with a ' + 'port, likely expecting automatic ' + 'retrieval. Due to technical limitations, ' + 'the authentication token cannot be ' + 'automatically retrieved and must be ' + 'copied and pasted.') + + self._open_auth_url(state) + logger.info('Paste that url you were directed to in order to ' + 'complete the authorization') + response = SpotifyImplicitGrant._get_user_input("Enter the URL you " + "were redirected to: ") + return self.parse_response_token(response, state) + + def _add_custom_values_to_token_info(self, token_info): + """ + Store some values that aren't directly provided by a Web API + response. + """ + token_info["expires_at"] = int(time.time()) + token_info["expires_in"] + token_info["scope"] = self.scope + return token_info + + def get_cached_token(self): + warnings.warn("Calling get_cached_token directly on the SpotifyImplicitGrant " + + "object will be deprecated. Instead, please specify a " + + "CacheFileHandler instance as the cache_handler in SpotifyOAuth " + + "and use the CacheFileHandler's get_cached_token method. " + + "You can replace:\n\tsp.get_cached_token()" + + "\n\nWith:\n\tsp.validate_token(sp.cache_handler.get_cached_token())", + DeprecationWarning + ) + return self.validate_token(self.cache_handler.get_cached_token()) + + def _save_token_info(self, token_info): + warnings.warn("Calling _save_token_info directly on the SpotifyImplicitGrant " + + "object will be deprecated. Instead, please specify a " + + "CacheFileHandler instance as the cache_handler in SpotifyOAuth " + + "and use the CacheFileHandler's save_token_to_cache method.", + DeprecationWarning + ) + self.cache_handler.save_token_to_cache(token_info) + return None + + +class RequestHandler(BaseHTTPRequestHandler): + def do_GET(self): + self.server.auth_code = self.server.error = None + try: + state, auth_code = SpotifyOAuth.parse_auth_response_url(self.path) + self.server.state = state + self.server.auth_code = auth_code + except SpotifyOauthError as error: + self.server.error = error + + self.send_response(200) + self.send_header("Content-Type", "text/html") + self.end_headers() + + if self.server.auth_code: + status = "successful" + elif self.server.error: + status = "failed ({})".format(self.server.error) + else: + self._write("

Invalid request

") + return + + self._write(""" + + +

Authentication status: {}

+This window can be closed. + + + +""".format(status)) + + def _write(self, text): + return self.wfile.write(text.encode("utf-8")) + + def log_message(self, format, *args): + return + + +def start_local_http_server(port, handler=RequestHandler): + server = HTTPServer(("127.0.0.1", port), handler) + server.allow_reuse_address = True + server.auth_code = None + server.auth_token_form = None + server.error = None + return server diff --git a/templates/skills/spotify/dependencies/spotipy/util.py b/templates/skills/spotify/dependencies/spotipy/util.py new file mode 100644 index 00000000..b949a618 --- /dev/null +++ b/templates/skills/spotify/dependencies/spotipy/util.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- + +""" Shows a user's playlists (need to be authenticated via oauth) """ + +__all__ = ["CLIENT_CREDS_ENV_VARS", "prompt_for_user_token"] + +import logging +import os +import warnings + +import spotipy + +LOGGER = logging.getLogger(__name__) + +CLIENT_CREDS_ENV_VARS = { + "client_id": "SPOTIPY_CLIENT_ID", + "client_secret": "SPOTIPY_CLIENT_SECRET", + "client_username": "SPOTIPY_CLIENT_USERNAME", + "redirect_uri": "SPOTIPY_REDIRECT_URI", +} + + +def prompt_for_user_token( + username=None, + scope=None, + client_id=None, + client_secret=None, + redirect_uri=None, + cache_path=None, + oauth_manager=None, + show_dialog=False +): + warnings.warn( + "'prompt_for_user_token' is deprecated." + "Use the following instead: " + " auth_manager=SpotifyOAuth(scope=scope)" + " spotipy.Spotify(auth_manager=auth_manager)", + DeprecationWarning + ) + """ prompts the user to login if necessary and returns + the user token suitable for use with the spotipy.Spotify + constructor + + Parameters: + + - username - the Spotify username (optional) + - scope - the desired scope of the request (optional) + - client_id - the client id of your app (required) + - client_secret - the client secret of your app (required) + - redirect_uri - the redirect URI of your app (required) + - cache_path - path to location to save tokens (optional) + - oauth_manager - Oauth manager object (optional) + - show_dialog - If true, a login prompt always shows (optional, defaults to False) + + """ + if not oauth_manager: + if not client_id: + client_id = os.getenv("SPOTIPY_CLIENT_ID") + + if not client_secret: + client_secret = os.getenv("SPOTIPY_CLIENT_SECRET") + + if not redirect_uri: + redirect_uri = os.getenv("SPOTIPY_REDIRECT_URI") + + if not client_id: + LOGGER.warning( + """ + You need to set your Spotify API credentials. + You can do this by setting environment variables like so: + + export SPOTIPY_CLIENT_ID='your-spotify-client-id' + export SPOTIPY_CLIENT_SECRET='your-spotify-client-secret' + export SPOTIPY_REDIRECT_URI='your-app-redirect-url' + + Get your credentials at + https://developer.spotify.com/my-applications + """ + ) + raise spotipy.SpotifyException(550, -1, "no credentials set") + + sp_oauth = oauth_manager or spotipy.SpotifyOAuth( + client_id, + client_secret, + redirect_uri, + scope=scope, + cache_path=cache_path, + username=username, + show_dialog=show_dialog + ) + + # try to get a valid token for this user, from the cache, + # if not in the cache, then create a new (this will send + # the user to a web page where they can authorize this app) + + token_info = sp_oauth.validate_token(sp_oauth.cache_handler.get_cached_token()) + + if not token_info: + code = sp_oauth.get_auth_response() + token = sp_oauth.get_access_token(code, as_dict=False) + else: + return token_info["access_token"] + + # Auth'ed API request + if token: + return token + else: + return None + + +def get_host_port(netloc): + if ":" in netloc: + host, port = netloc.split(":", 1) + port = int(port) + else: + host = netloc + port = None + + return host, port + + +def normalize_scope(scope): + if scope: + if isinstance(scope, str): + scopes = scope.split(',') + elif isinstance(scope, list) or isinstance(scope, tuple): + scopes = scope + else: + raise Exception( + "Unsupported scope value, please either provide a list of scopes, " + "or a string of scopes separated by commas" + ) + return " ".join(sorted(scopes)) + else: + return None diff --git a/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/INSTALLER b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/METADATA b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/METADATA new file mode 100644 index 00000000..9da6d09f --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/METADATA @@ -0,0 +1,154 @@ +Metadata-Version: 2.1 +Name: urllib3 +Version: 2.2.1 +Summary: HTTP library with thread-safe connection pooling, file post, and more. +Project-URL: Changelog, https://github.com/urllib3/urllib3/blob/main/CHANGES.rst +Project-URL: Documentation, https://urllib3.readthedocs.io +Project-URL: Code, https://github.com/urllib3/urllib3 +Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues +Author-email: Andrey Petrov +Maintainer-email: Seth Michael Larson , Quentin Pradet , Illia Volochii +License-File: LICENSE.txt +Keywords: filepost,http,httplib,https,pooling,ssl,threadsafe,urllib +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.8 +Provides-Extra: brotli +Requires-Dist: brotli>=1.0.9; (platform_python_implementation == 'CPython') and extra == 'brotli' +Requires-Dist: brotlicffi>=0.8.0; (platform_python_implementation != 'CPython') and extra == 'brotli' +Provides-Extra: h2 +Requires-Dist: h2<5,>=4; extra == 'h2' +Provides-Extra: socks +Requires-Dist: pysocks!=1.5.7,<2.0,>=1.5.6; extra == 'socks' +Provides-Extra: zstd +Requires-Dist: zstandard>=0.18.0; extra == 'zstd' +Description-Content-Type: text/markdown + +

+ +![urllib3](https://github.com/urllib3/urllib3/raw/main/docs/_static/banner_github.svg) + +

+ +

+ PyPI Version + Python Versions + Join our Discord + Coverage Status + Build Status on GitHub + Documentation Status
+ OpenSSF Scorecard + SLSA 3 + CII Best Practices +

+ +urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the +Python ecosystem already uses urllib3 and you should too. +urllib3 brings many critical features that are missing from the Python +standard libraries: + +- Thread safety. +- Connection pooling. +- Client-side SSL/TLS verification. +- File uploads with multipart encoding. +- Helpers for retrying requests and dealing with HTTP redirects. +- Support for gzip, deflate, brotli, and zstd encoding. +- Proxy support for HTTP and SOCKS. +- 100% test coverage. + +urllib3 is powerful and easy to use: + +```python3 +>>> import urllib3 +>>> resp = urllib3.request("GET", "http://httpbin.org/robots.txt") +>>> resp.status +200 +>>> resp.data +b"User-agent: *\nDisallow: /deny\n" +``` + +## Installing + +urllib3 can be installed with [pip](https://pip.pypa.io): + +```bash +$ python -m pip install urllib3 +``` + +Alternatively, you can grab the latest source code from [GitHub](https://github.com/urllib3/urllib3): + +```bash +$ git clone https://github.com/urllib3/urllib3.git +$ cd urllib3 +$ pip install . +``` + + +## Documentation + +urllib3 has usage and reference documentation at [urllib3.readthedocs.io](https://urllib3.readthedocs.io). + + +## Community + +urllib3 has a [community Discord channel](https://discord.gg/urllib3) for asking questions and +collaborating with other contributors. Drop by and say hello 👋 + + +## Contributing + +urllib3 happily accepts contributions. Please see our +[contributing documentation](https://urllib3.readthedocs.io/en/latest/contributing.html) +for some tips on getting started. + + +## Security Disclosures + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure with maintainers. + + +## Maintainers + +- [@sethmlarson](https://github.com/sethmlarson) (Seth M. Larson) +- [@pquentin](https://github.com/pquentin) (Quentin Pradet) +- [@illia-v](https://github.com/illia-v) (Illia Volochii) +- [@theacodes](https://github.com/theacodes) (Thea Flowers) +- [@haikuginger](https://github.com/haikuginger) (Jess Shapiro) +- [@lukasa](https://github.com/lukasa) (Cory Benfield) +- [@sigmavirus24](https://github.com/sigmavirus24) (Ian Stapleton Cordasco) +- [@shazow](https://github.com/shazow) (Andrey Petrov) + +👋 + + +## Sponsorship + +If your company benefits from this library, please consider [sponsoring its +development](https://urllib3.readthedocs.io/en/latest/sponsors.html). + + +## For Enterprise + +Professional support for urllib3 is available as part of the [Tidelift +Subscription][1]. Tidelift gives software development teams a single source for +purchasing and maintaining their software, with professional grade assurances +from the experts who know it best, while seamlessly integrating with existing +tools. + +[1]: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme diff --git a/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/RECORD b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/RECORD new file mode 100644 index 00000000..9d1acea8 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/RECORD @@ -0,0 +1,75 @@ +urllib3-2.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +urllib3-2.2.1.dist-info/METADATA,sha256=uROmjQwfAbwRYjV9PMdc5JF5NA3kRkpoKafPkNzybfc,6434 +urllib3-2.2.1.dist-info/RECORD,, +urllib3-2.2.1.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87 +urllib3-2.2.1.dist-info/licenses/LICENSE.txt,sha256=Ew46ZNX91dCWp1JpRjSn2d8oRGnehuVzIQAmgEHj1oY,1093 +urllib3/__init__.py,sha256=JMo1tg1nIV1AeJ2vENC_Txfl0e5h6Gzl9DGVk1rWRbo,6979 +urllib3/__pycache__/__init__.cpython-311.pyc,, +urllib3/__pycache__/_base_connection.cpython-311.pyc,, +urllib3/__pycache__/_collections.cpython-311.pyc,, +urllib3/__pycache__/_request_methods.cpython-311.pyc,, +urllib3/__pycache__/_version.cpython-311.pyc,, +urllib3/__pycache__/connection.cpython-311.pyc,, +urllib3/__pycache__/connectionpool.cpython-311.pyc,, +urllib3/__pycache__/exceptions.cpython-311.pyc,, +urllib3/__pycache__/fields.cpython-311.pyc,, +urllib3/__pycache__/filepost.cpython-311.pyc,, +urllib3/__pycache__/http2.cpython-311.pyc,, +urllib3/__pycache__/poolmanager.cpython-311.pyc,, +urllib3/__pycache__/response.cpython-311.pyc,, +urllib3/_base_connection.py,sha256=p-DOG_Me7-sJXO1R9VgDpNmdVU_kIS8VtaC7ptEllA0,5640 +urllib3/_collections.py,sha256=vzKA-7X-9resOamEWq52uV1nHshChjbYDvz47H0mMjw,17400 +urllib3/_request_methods.py,sha256=ucEpHQyQf06b9o1RxKLkCpzGH0ct-v7X2xGpU6rmmlo,9984 +urllib3/_version.py,sha256=12idLAcGmrAURPX52rGioBo33oQ__-ENJEdeqHvUUZg,98 +urllib3/connection.py,sha256=zFgaaoqrICsl7-kBp-_4va9m82sYhioAuy4-4iDpK0I,34704 +urllib3/connectionpool.py,sha256=XjTfYowLwN5ZzRMO41_OTbGNX4ANifgYVpWsVMRuC00,43556 +urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +urllib3/contrib/__pycache__/__init__.cpython-311.pyc,, +urllib3/contrib/__pycache__/pyopenssl.cpython-311.pyc,, +urllib3/contrib/__pycache__/socks.cpython-311.pyc,, +urllib3/contrib/emscripten/__init__.py,sha256=u6KNgzjlFZbuAAXa_ybCR7gQ71VJESnF-IIdDA73brw,733 +urllib3/contrib/emscripten/__pycache__/__init__.cpython-311.pyc,, +urllib3/contrib/emscripten/__pycache__/connection.cpython-311.pyc,, +urllib3/contrib/emscripten/__pycache__/fetch.cpython-311.pyc,, +urllib3/contrib/emscripten/__pycache__/request.cpython-311.pyc,, +urllib3/contrib/emscripten/__pycache__/response.cpython-311.pyc,, +urllib3/contrib/emscripten/connection.py,sha256=kaBe2tWt7Yy9vNUFRBV7CSyDnfhCYILGxju9KTZj8Sw,8755 +urllib3/contrib/emscripten/emscripten_fetch_worker.js,sha256=CDfYF_9CDobtx2lGidyJ1zjDEvwNT5F-dchmVWXDh0E,3655 +urllib3/contrib/emscripten/fetch.py,sha256=ymwJlHBBuw6WTpKgPHpdmmrNBxlsr75HqoD4Rn27YXk,14131 +urllib3/contrib/emscripten/request.py,sha256=mL28szy1KvE3NJhWor5jNmarp8gwplDU-7gwGZY5g0Q,566 +urllib3/contrib/emscripten/response.py,sha256=wIDmdJ4doFWqLl5s86l9n0V70gFjQ2HWaPgz69jM52E,9546 +urllib3/contrib/pyopenssl.py,sha256=X31eCYGwB09EkAHX8RhDKC0X0Ki7d0cCVWoMJZUM5bQ,19161 +urllib3/contrib/socks.py,sha256=gFS2-zOw4_vLGpUvExOf3fNVT8liz6vhM2t6lBPn3CY,7572 +urllib3/exceptions.py,sha256=RDaiudtR7rqbVKTKpLSgZBBtwaIqV7eZtervZV_mZag,9393 +urllib3/fields.py,sha256=8vi0PeRo_pE5chPmJA07LZtMkVls4UrBS1k2xM506jM,10843 +urllib3/filepost.py,sha256=-9qJT11cNGjO9dqnI20-oErZuTvNaM18xZZPCjZSbOE,2395 +urllib3/http2.py,sha256=4QQcjTM9UYOQZe0r8KnA8anU9ST4p_s3SB3gRTueyPc,7480 +urllib3/poolmanager.py,sha256=fcC3OwjFKxha06NsOORwbZOzrVt1pyY-bNCbKiqC0l8,22935 +urllib3/py.typed,sha256=UaCuPFa3H8UAakbt-5G8SPacldTOGvJv18pPjUJ5gDY,93 +urllib3/response.py,sha256=lmvseToQbkLXuFyA3jcSyCPjTgSfa6YPA4xUhVqq8QI,43874 +urllib3/util/__init__.py,sha256=-qeS0QceivazvBEKDNFCAI-6ACcdDOE4TMvo7SLNlAQ,1001 +urllib3/util/__pycache__/__init__.cpython-311.pyc,, +urllib3/util/__pycache__/connection.cpython-311.pyc,, +urllib3/util/__pycache__/proxy.cpython-311.pyc,, +urllib3/util/__pycache__/request.cpython-311.pyc,, +urllib3/util/__pycache__/response.cpython-311.pyc,, +urllib3/util/__pycache__/retry.cpython-311.pyc,, +urllib3/util/__pycache__/ssl_.cpython-311.pyc,, +urllib3/util/__pycache__/ssl_match_hostname.cpython-311.pyc,, +urllib3/util/__pycache__/ssltransport.cpython-311.pyc,, +urllib3/util/__pycache__/timeout.cpython-311.pyc,, +urllib3/util/__pycache__/url.cpython-311.pyc,, +urllib3/util/__pycache__/util.cpython-311.pyc,, +urllib3/util/__pycache__/wait.cpython-311.pyc,, +urllib3/util/connection.py,sha256=QeUUEuNmhznpuKNPL-B0IVOkMdMCu8oJX62OC0Vpzug,4462 +urllib3/util/proxy.py,sha256=seP8-Q5B6bB0dMtwPj-YcZZQ30vHuLqRu-tI0JZ2fzs,1148 +urllib3/util/request.py,sha256=PQnBmKUHMQ0hQQ41uhbLNAeA24ke60m6zeiwfwocpGo,8102 +urllib3/util/response.py,sha256=vQE639uoEhj1vpjEdxu5lNIhJCSUZkd7pqllUI0BZOA,3374 +urllib3/util/retry.py,sha256=WB-7x1m7fQH_-Qqtrk2OGvz93GvBTxc-pRn8Vf3p4mg,18384 +urllib3/util/ssl_.py,sha256=FeymdS68RggEROwMB9VLGSqLHq2hRUKnIbQC_bCpGJI,19109 +urllib3/util/ssl_match_hostname.py,sha256=gaWqixoYtQ_GKO8fcRGFj3VXeMoqyxQQuUTPgWeiL_M,5812 +urllib3/util/ssltransport.py,sha256=SF__JQXVcHBQniFJZp3P9q-UeHM310WVwcBwqT9dCLE,9034 +urllib3/util/timeout.py,sha256=4eT1FVeZZU7h7mYD1Jq2OXNe4fxekdNvhoWUkZusRpA,10346 +urllib3/util/url.py,sha256=wHORhp80RAXyTlAIkTqLFzSrkU7J34ZDxX-tN65MBZk,15213 +urllib3/util/util.py,sha256=j3lbZK1jPyiwD34T8IgJzdWEZVT-4E-0vYIJi9UjeNA,1146 +urllib3/util/wait.py,sha256=_ph8IrUR3sqPqi0OopQgJUlH4wzkGeM5CiyA7XGGtmI,4423 diff --git a/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/WHEEL b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/WHEEL new file mode 100644 index 00000000..5998f3aa --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.21.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/licenses/LICENSE.txt b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/licenses/LICENSE.txt new file mode 100644 index 00000000..e6183d02 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3-2.2.1.dist-info/licenses/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/templates/skills/spotify/dependencies/urllib3/__init__.py b/templates/skills/spotify/dependencies/urllib3/__init__.py new file mode 100644 index 00000000..3fe782c8 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/__init__.py @@ -0,0 +1,211 @@ +""" +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more +""" + +from __future__ import annotations + +# Set default logging handler to avoid "No handler found" warnings. +import logging +import sys +import typing +import warnings +from logging import NullHandler + +from . import exceptions +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +from .filepost import _TYPE_FIELDS, encode_multipart_formdata +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import BaseHTTPResponse, HTTPResponse +from .util.request import make_headers +from .util.retry import Retry +from .util.timeout import Timeout + +# Ensure that Python is compiled with OpenSSL 1.1.1+ +# If the 'ssl' module isn't available at all that's +# fine, we only care if the module is available. +try: + import ssl +except ImportError: + pass +else: + if not ssl.OPENSSL_VERSION.startswith("OpenSSL "): # Defensive: + warnings.warn( + "urllib3 v2 only supports OpenSSL 1.1.1+, currently " + f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " + "See: https://github.com/urllib3/urllib3/issues/3020", + exceptions.NotOpenSSLWarning, + ) + elif ssl.OPENSSL_VERSION_INFO < (1, 1, 1): # Defensive: + raise ImportError( + "urllib3 v2 only supports OpenSSL 1.1.1+, currently " + f"the 'ssl' module is compiled with {ssl.OPENSSL_VERSION!r}. " + "See: https://github.com/urllib3/urllib3/issues/2168" + ) + +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = __version__ + +__all__ = ( + "HTTPConnectionPool", + "HTTPHeaderDict", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "make_headers", + "proxy_from_url", + "request", + "BaseHTTPResponse", +) + +logging.getLogger(__name__).addHandler(NullHandler()) + + +def add_stderr_logger( + level: int = logging.DEBUG, +) -> logging.StreamHandler[typing.TextIO]: + """ + Helper for quickly adding a StreamHandler to the logger. Useful for + debugging. + + Returns the handler after adding it. + """ + # This method needs to be in this __init__.py to get the __name__ correct + # even if urllib3 is vendored within another package. + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) + logger.addHandler(handler) + logger.setLevel(level) + logger.debug("Added a stderr logging handler to logger: %s", __name__) + return handler + + +# ... Clean up. +del NullHandler + + +# All warning filters *must* be appended unless you're really certain that they +# shouldn't be: otherwise, it's very hard for users to use most Python +# mechanisms to silence them. +# SecurityWarning's always go off by default. +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) + + +def disable_warnings(category: type[Warning] = exceptions.HTTPWarning) -> None: + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter("ignore", category) + + +_DEFAULT_POOL = PoolManager() + + +def request( + method: str, + url: str, + *, + body: _TYPE_BODY | None = None, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + preload_content: bool | None = True, + decode_content: bool | None = True, + redirect: bool | None = True, + retries: Retry | bool | int | None = None, + timeout: Timeout | float | int | None = 3, + json: typing.Any | None = None, +) -> BaseHTTPResponse: + """ + A convenience, top-level request method. It uses a module-global ``PoolManager`` instance. + Therefore, its side effects could be shared across dependencies relying on it. + To avoid side effects create a new ``PoolManager`` instance and use it instead. + The method does not accept low-level ``**urlopen_kw`` keyword arguments. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. + + :param bool preload_content: + If True, the response's body will be preloaded into memory. + + :param bool decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param json: + Data to encode and send as JSON with UTF-encoded in the request body. + The ``"Content-Type"`` header will be set to ``"application/json"`` + unless specified otherwise. + """ + + return _DEFAULT_POOL.request( + method, + url, + body=body, + fields=fields, + headers=headers, + preload_content=preload_content, + decode_content=decode_content, + redirect=redirect, + retries=retries, + timeout=timeout, + json=json, + ) + + +if sys.platform == "emscripten": + from .contrib.emscripten import inject_into_urllib3 # noqa: 401 + + inject_into_urllib3() diff --git a/templates/skills/spotify/dependencies/urllib3/_base_connection.py b/templates/skills/spotify/dependencies/urllib3/_base_connection.py new file mode 100644 index 00000000..bb349c74 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/_base_connection.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +import typing + +from .util.connection import _TYPE_SOCKET_OPTIONS +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT +from .util.url import Url + +_TYPE_BODY = typing.Union[bytes, typing.IO[typing.Any], typing.Iterable[bytes], str] + + +class ProxyConfig(typing.NamedTuple): + ssl_context: ssl.SSLContext | None + use_forwarding_for_https: bool + assert_hostname: None | str | Literal[False] + assert_fingerprint: str | None + + +class _ResponseOptions(typing.NamedTuple): + # TODO: Remove this in favor of a better + # HTTP request/response lifecycle tracking. + request_method: str + request_url: str + preload_content: bool + decode_content: bool + enforce_content_length: bool + + +if typing.TYPE_CHECKING: + import ssl + from typing import Literal, Protocol + + from .response import BaseHTTPResponse + + class BaseHTTPConnection(Protocol): + default_port: typing.ClassVar[int] + default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] + + host: str + port: int + timeout: None | ( + float + ) # Instance doesn't store _DEFAULT_TIMEOUT, must be resolved. + blocksize: int + source_address: tuple[str, int] | None + socket_options: _TYPE_SOCKET_OPTIONS | None + + proxy: Url | None + proxy_config: ProxyConfig | None + + is_verified: bool + proxy_is_verified: bool | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, + socket_options: _TYPE_SOCKET_OPTIONS | None = ..., + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + ) -> None: + ... + + def set_tunnel( + self, + host: str, + port: int | None = None, + headers: typing.Mapping[str, str] | None = None, + scheme: str = "http", + ) -> None: + ... + + def connect(self) -> None: + ... + + def request( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + # We know *at least* botocore is depending on the order of the + # first 3 parameters so to be safe we only mark the later ones + # as keyword-only to ensure we have space to extend. + *, + chunked: bool = False, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> None: + ... + + def getresponse(self) -> BaseHTTPResponse: + ... + + def close(self) -> None: + ... + + @property + def is_closed(self) -> bool: + """Whether the connection either is brand new or has been previously closed. + If this property is True then both ``is_connected`` and ``has_connected_to_proxy`` + properties must be False. + """ + + @property + def is_connected(self) -> bool: + """Whether the connection is actively connected to any origin (proxy or target)""" + + @property + def has_connected_to_proxy(self) -> bool: + """Whether the connection has successfully connected to its proxy. + This returns False if no proxy is in use. Used to determine whether + errors are coming from the proxy layer or from tunnelling to the target origin. + """ + + class BaseHTTPSConnection(BaseHTTPConnection, Protocol): + default_port: typing.ClassVar[int] + default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] + + # Certificate verification methods + cert_reqs: int | str | None + assert_hostname: None | str | Literal[False] + assert_fingerprint: str | None + ssl_context: ssl.SSLContext | None + + # Trusted CAs + ca_certs: str | None + ca_cert_dir: str | None + ca_cert_data: None | str | bytes + + # TLS version + ssl_minimum_version: int | None + ssl_maximum_version: int | None + ssl_version: int | str | None # Deprecated + + # Client certificates + cert_file: str | None + key_file: str | None + key_password: str | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: _TYPE_SOCKET_OPTIONS | None = ..., + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + cert_reqs: int | str | None = None, + assert_hostname: None | str | Literal[False] = None, + assert_fingerprint: str | None = None, + server_hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_certs: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, + ssl_version: int | str | None = None, # Deprecated + cert_file: str | None = None, + key_file: str | None = None, + key_password: str | None = None, + ) -> None: + ... diff --git a/templates/skills/spotify/dependencies/urllib3/_collections.py b/templates/skills/spotify/dependencies/urllib3/_collections.py new file mode 100644 index 00000000..55b03247 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/_collections.py @@ -0,0 +1,483 @@ +from __future__ import annotations + +import typing +from collections import OrderedDict +from enum import Enum, auto +from threading import RLock + +if typing.TYPE_CHECKING: + # We can only import Protocol if TYPE_CHECKING because it's a development + # dependency, and is not available at runtime. + from typing import Protocol + + from typing_extensions import Self + + class HasGettableStringKeys(Protocol): + def keys(self) -> typing.Iterator[str]: + ... + + def __getitem__(self, key: str) -> str: + ... + + +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] + + +# Key type +_KT = typing.TypeVar("_KT") +# Value type +_VT = typing.TypeVar("_VT") +# Default type +_DT = typing.TypeVar("_DT") + +ValidHTTPHeaderSource = typing.Union[ + "HTTPHeaderDict", + typing.Mapping[str, str], + typing.Iterable[typing.Tuple[str, str]], + "HasGettableStringKeys", +] + + +class _Sentinel(Enum): + not_passed = auto() + + +def ensure_can_construct_http_header_dict( + potential: object, +) -> ValidHTTPHeaderSource | None: + if isinstance(potential, HTTPHeaderDict): + return potential + elif isinstance(potential, typing.Mapping): + # Full runtime checking of the contents of a Mapping is expensive, so for the + # purposes of typechecking, we assume that any Mapping is the right shape. + return typing.cast(typing.Mapping[str, str], potential) + elif isinstance(potential, typing.Iterable): + # Similarly to Mapping, full runtime checking of the contents of an Iterable is + # expensive, so for the purposes of typechecking, we assume that any Iterable + # is the right shape. + return typing.cast(typing.Iterable[typing.Tuple[str, str]], potential) + elif hasattr(potential, "keys") and hasattr(potential, "__getitem__"): + return typing.cast("HasGettableStringKeys", potential) + else: + return None + + +class RecentlyUsedContainer(typing.Generic[_KT, _VT], typing.MutableMapping[_KT, _VT]): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + _container: typing.OrderedDict[_KT, _VT] + _maxsize: int + dispose_func: typing.Callable[[_VT], None] | None + lock: RLock + + def __init__( + self, + maxsize: int = 10, + dispose_func: typing.Callable[[_VT], None] | None = None, + ) -> None: + super().__init__() + self._maxsize = maxsize + self.dispose_func = dispose_func + self._container = OrderedDict() + self.lock = RLock() + + def __getitem__(self, key: _KT) -> _VT: + # Re-insert the item, moving it to the end of the eviction line. + with self.lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key: _KT, value: _VT) -> None: + evicted_item = None + with self.lock: + # Possibly evict the existing value of 'key' + try: + # If the key exists, we'll overwrite it, which won't change the + # size of the pool. Because accessing a key should move it to + # the end of the eviction line, we pop it out first. + evicted_item = key, self._container.pop(key) + self._container[key] = value + except KeyError: + # When the key does not exist, we insert the value first so that + # evicting works in all cases, including when self._maxsize is 0 + self._container[key] = value + if len(self._container) > self._maxsize: + # If we didn't evict an existing value, and we've hit our maximum + # size, then we have to evict the least recently used item from + # the beginning of the container. + evicted_item = self._container.popitem(last=False) + + # After releasing the lock on the pool, dispose of any evicted value. + if evicted_item is not None and self.dispose_func: + _, evicted_value = evicted_item + self.dispose_func(evicted_value) + + def __delitem__(self, key: _KT) -> None: + with self.lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self) -> int: + with self.lock: + return len(self._container) + + def __iter__(self) -> typing.NoReturn: + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) + + def clear(self) -> None: + with self.lock: + # Copy pointers to all values, then wipe the mapping + values = list(self._container.values()) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self) -> set[_KT]: # type: ignore[override] + with self.lock: + return set(self._container.keys()) + + +class HTTPHeaderDictItemView(typing.Set[typing.Tuple[str, str]]): + """ + HTTPHeaderDict is unusual for a Mapping[str, str] in that it has two modes of + address. + + If we directly try to get an item with a particular name, we will get a string + back that is the concatenated version of all the values: + + >>> d['X-Header-Name'] + 'Value1, Value2, Value3' + + However, if we iterate over an HTTPHeaderDict's items, we will optionally combine + these values based on whether combine=True was called when building up the dictionary + + >>> d = HTTPHeaderDict({"A": "1", "B": "foo"}) + >>> d.add("A", "2", combine=True) + >>> d.add("B", "bar") + >>> list(d.items()) + [ + ('A', '1, 2'), + ('B', 'foo'), + ('B', 'bar'), + ] + + This class conforms to the interface required by the MutableMapping ABC while + also giving us the nonstandard iteration behavior we want; items with duplicate + keys, ordered by time of first insertion. + """ + + _headers: HTTPHeaderDict + + def __init__(self, headers: HTTPHeaderDict) -> None: + self._headers = headers + + def __len__(self) -> int: + return len(list(self._headers.iteritems())) + + def __iter__(self) -> typing.Iterator[tuple[str, str]]: + return self._headers.iteritems() + + def __contains__(self, item: object) -> bool: + if isinstance(item, tuple) and len(item) == 2: + passed_key, passed_val = item + if isinstance(passed_key, str) and isinstance(passed_val, str): + return self._headers._has_value_for_header(passed_key, passed_val) + return False + + +class HTTPHeaderDict(typing.MutableMapping[str, str]): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + """ + + _container: typing.MutableMapping[str, list[str]] + + def __init__(self, headers: ValidHTTPHeaderSource | None = None, **kwargs: str): + super().__init__() + self._container = {} # 'dict' is insert-ordered + if headers is not None: + if isinstance(headers, HTTPHeaderDict): + self._copy_from(headers) + else: + self.extend(headers) + if kwargs: + self.extend(kwargs) + + def __setitem__(self, key: str, val: str) -> None: + # avoid a bytes/str comparison by decoding before httplib + if isinstance(key, bytes): + key = key.decode("latin-1") + self._container[key.lower()] = [key, val] + + def __getitem__(self, key: str) -> str: + val = self._container[key.lower()] + return ", ".join(val[1:]) + + def __delitem__(self, key: str) -> None: + del self._container[key.lower()] + + def __contains__(self, key: object) -> bool: + if isinstance(key, str): + return key.lower() in self._container + return False + + def setdefault(self, key: str, default: str = "") -> str: + return super().setdefault(key, default) + + def __eq__(self, other: object) -> bool: + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return False + else: + other_as_http_header_dict = type(self)(maybe_constructable) + + return {k.lower(): v for k, v in self.itermerged()} == { + k.lower(): v for k, v in other_as_http_header_dict.itermerged() + } + + def __ne__(self, other: object) -> bool: + return not self.__eq__(other) + + def __len__(self) -> int: + return len(self._container) + + def __iter__(self) -> typing.Iterator[str]: + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + + def discard(self, key: str) -> None: + try: + del self[key] + except KeyError: + pass + + def add(self, key: str, val: str, *, combine: bool = False) -> None: + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + If this is called with combine=True, instead of adding a new header value + as a distinct item during iteration, this will instead append the value to + any existing header value with a comma. If no existing header value exists + for the key, then the value will simply be added, ignoring the combine parameter. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + >>> list(headers.items()) + [('foo', 'bar'), ('foo', 'baz')] + >>> headers.add('foo', 'quz', combine=True) + >>> list(headers.items()) + [('foo', 'bar, baz, quz')] + """ + # avoid a bytes/str comparison by decoding before httplib + if isinstance(key, bytes): + key = key.decode("latin-1") + key_lower = key.lower() + new_vals = [key, val] + # Keep the common case aka no item present as fast as possible + vals = self._container.setdefault(key_lower, new_vals) + if new_vals is not vals: + # if there are values here, then there is at least the initial + # key/value pair + assert len(vals) >= 2 + if combine: + vals[-1] = vals[-1] + ", " + val + else: + vals.append(val) + + def extend(self, *args: ValidHTTPHeaderSource, **kwargs: str) -> None: + """Generic import function for any type of header-like object. + Adapted version of MutableMapping.update in order to insert items + with self.add instead of self.__setitem__ + """ + if len(args) > 1: + raise TypeError( + f"extend() takes at most 1 positional arguments ({len(args)} given)" + ) + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, typing.Mapping): + for key, val in other.items(): + self.add(key, val) + elif isinstance(other, typing.Iterable): + other = typing.cast(typing.Iterable[typing.Tuple[str, str]], other) + for key, value in other: + self.add(key, value) + elif hasattr(other, "keys") and hasattr(other, "__getitem__"): + # THIS IS NOT A TYPESAFE BRANCH + # In this branch, the object has a `keys` attr but is not a Mapping or any of + # the other types indicated in the method signature. We do some stuff with + # it as though it partially implements the Mapping interface, but we're not + # doing that stuff safely AT ALL. + for key in other.keys(): + self.add(key, other[key]) + + for key, value in kwargs.items(): + self.add(key, value) + + @typing.overload + def getlist(self, key: str) -> list[str]: + ... + + @typing.overload + def getlist(self, key: str, default: _DT) -> list[str] | _DT: + ... + + def getlist( + self, key: str, default: _Sentinel | _DT = _Sentinel.not_passed + ) -> list[str] | _DT: + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + try: + vals = self._container[key.lower()] + except KeyError: + if default is _Sentinel.not_passed: + # _DT is unbound; empty list is instance of List[str] + return [] + # _DT is bound; default is instance of _DT + return default + else: + # _DT may or may not be bound; vals[1:] is instance of List[str], which + # meets our external interface requirement of `Union[List[str], _DT]`. + return vals[1:] + + def _prepare_for_method_change(self) -> Self: + """ + Remove content-specific header fields before changing the request + method to GET or HEAD according to RFC 9110, Section 15.4. + """ + content_specific_headers = [ + "Content-Encoding", + "Content-Language", + "Content-Location", + "Content-Type", + "Content-Length", + "Digest", + "Last-Modified", + ] + for header in content_specific_headers: + self.discard(header) + return self + + # Backwards compatibility for httplib + getheaders = getlist + getallmatchingheaders = getlist + iget = getlist + + # Backwards compatibility for http.cookiejar + get_all = getlist + + def __repr__(self) -> str: + return f"{type(self).__name__}({dict(self.itermerged())})" + + def _copy_from(self, other: HTTPHeaderDict) -> None: + for key in other: + val = other.getlist(key) + self._container[key.lower()] = [key, *val] + + def copy(self) -> HTTPHeaderDict: + clone = type(self)() + clone._copy_from(self) + return clone + + def iteritems(self) -> typing.Iterator[tuple[str, str]]: + """Iterate over all header lines, including duplicate ones.""" + for key in self: + vals = self._container[key.lower()] + for val in vals[1:]: + yield vals[0], val + + def itermerged(self) -> typing.Iterator[tuple[str, str]]: + """Iterate over all headers, merging duplicate ones together.""" + for key in self: + val = self._container[key.lower()] + yield val[0], ", ".join(val[1:]) + + def items(self) -> HTTPHeaderDictItemView: # type: ignore[override] + return HTTPHeaderDictItemView(self) + + def _has_value_for_header(self, header_name: str, potential_value: str) -> bool: + if header_name in self: + return potential_value in self._container[header_name.lower()][1:] + return False + + def __ior__(self, other: object) -> HTTPHeaderDict: + # Supports extending a header dict in-place using operator |= + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + self.extend(maybe_constructable) + return self + + def __or__(self, other: object) -> HTTPHeaderDict: + # Supports merging header dicts using operator | + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + result = self.copy() + result.extend(maybe_constructable) + return result + + def __ror__(self, other: object) -> HTTPHeaderDict: + # Supports merging header dicts using operator | when other is on left side + # combining items with add instead of __setitem__ + maybe_constructable = ensure_can_construct_http_header_dict(other) + if maybe_constructable is None: + return NotImplemented + result = type(self)(maybe_constructable) + result.extend(self) + return result diff --git a/templates/skills/spotify/dependencies/urllib3/_request_methods.py b/templates/skills/spotify/dependencies/urllib3/_request_methods.py new file mode 100644 index 00000000..632042f0 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/_request_methods.py @@ -0,0 +1,279 @@ +from __future__ import annotations + +import json as _json +import typing +from urllib.parse import urlencode + +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from .filepost import _TYPE_FIELDS, encode_multipart_formdata +from .response import BaseHTTPResponse + +__all__ = ["RequestMethods"] + +_TYPE_ENCODE_URL_FIELDS = typing.Union[ + typing.Sequence[typing.Tuple[str, typing.Union[str, bytes]]], + typing.Mapping[str, typing.Union[str, bytes]], +] + + +class RequestMethods: + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} + + def __init__(self, headers: typing.Mapping[str, str] | None = None) -> None: + self.headers = headers or {} + + def urlopen( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + encode_multipart: bool = True, + multipart_boundary: str | None = None, + **kw: typing.Any, + ) -> BaseHTTPResponse: # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) + + def request( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + json: typing.Any | None = None, + **urlopen_kw: typing.Any, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param fields: + Data to encode and send in the request body. Values are processed + by :func:`urllib.parse.urlencode`. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param json: + Data to encode and send as JSON with UTF-encoded in the request body. + The ``"Content-Type"`` header will be set to ``"application/json"`` + unless specified otherwise. + """ + method = method.upper() + + if json is not None and body is not None: + raise TypeError( + "request got values for both 'body' and 'json' parameters which are mutually exclusive" + ) + + if json is not None: + if headers is None: + headers = self.headers + + if not ("content-type" in map(str.lower, headers.keys())): + headers = HTTPHeaderDict(headers) + headers["Content-Type"] = "application/json" + + body = _json.dumps(json, separators=(",", ":"), ensure_ascii=False).encode( + "utf-8" + ) + + if body is not None: + urlopen_kw["body"] = body + + if method in self._encode_url_methods: + return self.request_encode_url( + method, + url, + fields=fields, # type: ignore[arg-type] + headers=headers, + **urlopen_kw, + ) + else: + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + + def request_encode_url( + self, + method: str, + url: str, + fields: _TYPE_ENCODE_URL_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + **urlopen_kw: str, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + """ + if headers is None: + headers = self.headers + + extra_kw: dict[str, typing.Any] = {"headers": headers} + extra_kw.update(urlopen_kw) + + if fields: + url += "?" + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body( + self, + method: str, + url: str, + fields: _TYPE_FIELDS | None = None, + headers: typing.Mapping[str, str] | None = None, + encode_multipart: bool = True, + multipart_boundary: str | None = None, + **urlopen_kw: str, + ) -> BaseHTTPResponse: + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :func:`urllib3.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :func:`urllib.parse.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param fields: + Data to encode and send in the request body. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param encode_multipart: + If True, encode the ``fields`` using the multipart/form-data MIME + format. + + :param multipart_boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + if headers is None: + headers = self.headers + + extra_kw: dict[str, typing.Any] = {"headers": HTTPHeaderDict(headers)} + body: bytes | str + + if fields: + if "body" in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one." + ) + + if encode_multipart: + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) + else: + body, content_type = ( + urlencode(fields), # type: ignore[arg-type] + "application/x-www-form-urlencoded", + ) + + extra_kw["body"] = body + extra_kw["headers"].setdefault("Content-Type", content_type) + + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/templates/skills/spotify/dependencies/urllib3/_version.py b/templates/skills/spotify/dependencies/urllib3/_version.py new file mode 100644 index 00000000..095cf3c1 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/_version.py @@ -0,0 +1,4 @@ +# This file is protected via CODEOWNERS +from __future__ import annotations + +__version__ = "2.2.1" diff --git a/templates/skills/spotify/dependencies/urllib3/connection.py b/templates/skills/spotify/dependencies/urllib3/connection.py new file mode 100644 index 00000000..aa5c547c --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/connection.py @@ -0,0 +1,930 @@ +from __future__ import annotations + +import datetime +import logging +import os +import re +import socket +import sys +import typing +import warnings +from http.client import HTTPConnection as _HTTPConnection +from http.client import HTTPException as HTTPException # noqa: F401 +from http.client import ResponseNotReady +from socket import timeout as SocketTimeout + +if typing.TYPE_CHECKING: + from typing import Literal + + from .response import HTTPResponse + from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT + from .util.ssltransport import SSLTransport + +from ._collections import HTTPHeaderDict +from .util.response import assert_header_parsing +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout +from .util.util import to_str +from .util.wait import wait_for_read + +try: # Compiled with SSL? + import ssl + + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): + ssl = None # type: ignore[assignment] + + class BaseSSLError(BaseException): # type: ignore[no-redef] + pass + + +from ._base_connection import _TYPE_BODY +from ._base_connection import ProxyConfig as ProxyConfig +from ._base_connection import _ResponseOptions as _ResponseOptions +from ._version import __version__ +from .exceptions import ( + ConnectTimeoutError, + HeaderParsingError, + NameResolutionError, + NewConnectionError, + ProxyError, + SystemTimeWarning, +) +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_ +from .util.request import body_to_chunks +from .util.ssl_ import assert_fingerprint as _assert_fingerprint +from .util.ssl_ import ( + create_urllib3_context, + is_ipaddress, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .util.ssl_match_hostname import CertificateError, match_hostname +from .util.url import Url + +# Not a no-op, we're adding this to the namespace so it can be imported. +ConnectionError = ConnectionError +BrokenPipeError = BrokenPipeError + + +log = logging.getLogger(__name__) + +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2023, 6, 1) + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") + +_HAS_SYS_AUDIT = hasattr(sys, "audit") + + +class HTTPConnection(_HTTPConnection): + """ + Based on :class:`http.client.HTTPConnection` but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: + + .. code-block:: python + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port: typing.ClassVar[int] = port_by_scheme["http"] # type: ignore[misc] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options: typing.ClassVar[connection._TYPE_SOCKET_OPTIONS] = [ + (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + ] + + #: Whether this connection verifies the host's certificate. + is_verified: bool = False + + #: Whether this proxy connection verified the proxy host's certificate. + # If no proxy is currently connected to the value will be ``None``. + proxy_is_verified: bool | None = None + + blocksize: int + source_address: tuple[str, int] | None + socket_options: connection._TYPE_SOCKET_OPTIONS | None + + _has_connected_to_proxy: bool + _response_options: _ResponseOptions | None + _tunnel_host: str | None + _tunnel_port: int | None + _tunnel_scheme: str | None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: None + | (connection._TYPE_SOCKET_OPTIONS) = default_socket_options, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + ) -> None: + super().__init__( + host=host, + port=port, + timeout=Timeout.resolve_default_timeout(timeout), + source_address=source_address, + blocksize=blocksize, + ) + self.socket_options = socket_options + self.proxy = proxy + self.proxy_config = proxy_config + + self._has_connected_to_proxy = False + self._response_options = None + self._tunnel_host: str | None = None + self._tunnel_port: int | None = None + self._tunnel_scheme: str | None = None + + @property + def host(self) -> str: + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip(".") + + @host.setter + def host(self, value: str) -> None: + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self) -> socket.socket: + """Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + try: + sock = connection.create_connection( + (self._dns_host, self.port), + self.timeout, + source_address=self.source_address, + socket_options=self.socket_options, + ) + except socket.gaierror as e: + raise NameResolutionError(self.host, self, e) from e + except SocketTimeout as e: + raise ConnectTimeoutError( + self, + f"Connection to {self.host} timed out. (connect timeout={self.timeout})", + ) from e + + except OSError as e: + raise NewConnectionError( + self, f"Failed to establish a new connection: {e}" + ) from e + + # Audit hooks are only available in Python 3.8+ + if _HAS_SYS_AUDIT: + sys.audit("http.client.connect", self, self.host, self.port) + + return sock + + def set_tunnel( + self, + host: str, + port: int | None = None, + headers: typing.Mapping[str, str] | None = None, + scheme: str = "http", + ) -> None: + if scheme not in ("http", "https"): + raise ValueError( + f"Invalid proxy scheme for tunneling: {scheme!r}, must be either 'http' or 'https'" + ) + super().set_tunnel(host, port=port, headers=headers) + self._tunnel_scheme = scheme + + def connect(self) -> None: + self.sock = self._new_conn() + if self._tunnel_host: + # If we're tunneling it means we're connected to our proxy. + self._has_connected_to_proxy = True + + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() # type: ignore[attr-defined] + + # If there's a proxy to be connected to we are fully connected. + # This is set twice (once above and here) due to forwarding proxies + # not using tunnelling. + self._has_connected_to_proxy = bool(self.proxy) + + if self._has_connected_to_proxy: + self.proxy_is_verified = False + + @property + def is_closed(self) -> bool: + return self.sock is None + + @property + def is_connected(self) -> bool: + if self.sock is None: + return False + return not wait_for_read(self.sock, timeout=0.0) + + @property + def has_connected_to_proxy(self) -> bool: + return self._has_connected_to_proxy + + @property + def proxy_is_forwarding(self) -> bool: + """ + Return True if a forwarding proxy is configured, else return False + """ + return bool(self.proxy) and self._tunnel_host is None + + def close(self) -> None: + try: + super().close() + finally: + # Reset all stateful properties so connection + # can be re-used without leaking prior configs. + self.sock = None + self.is_verified = False + self.proxy_is_verified = None + self._has_connected_to_proxy = False + self._response_options = None + self._tunnel_host = None + self._tunnel_port = None + self._tunnel_scheme = None + + def putrequest( + self, + method: str, + url: str, + skip_host: bool = False, + skip_accept_encoding: bool = False, + ) -> None: + """""" + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + f"Method cannot contain non-token characters {method!r} (found at least {match.group()!r})" + ) + + return super().putrequest( + method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding + ) + + def putheader(self, header: str, *values: str) -> None: # type: ignore[override] + """""" + if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): + super().putheader(header, *values) + elif to_str(header.lower()) not in SKIPPABLE_HEADERS: + skippable_headers = "', '".join( + [str.title(header) for header in sorted(SKIPPABLE_HEADERS)] + ) + raise ValueError( + f"urllib3.util.SKIP_HEADER only supports '{skippable_headers}'" + ) + + # `request` method's signature intentionally violates LSP. + # urllib3's API is different from `http.client.HTTPConnection` and the subclassing is only incidental. + def request( # type: ignore[override] + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + *, + chunked: bool = False, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> None: + # Update the inner socket's timeout value to send the request. + # This only triggers if the connection is re-used. + if self.sock is not None: + self.sock.settimeout(self.timeout) + + # Store these values to be fed into the HTTPResponse + # object later. TODO: Remove this in favor of a real + # HTTP lifecycle mechanism. + + # We have to store these before we call .request() + # because sometimes we can still salvage a response + # off the wire even if we aren't able to completely + # send the request body. + self._response_options = _ResponseOptions( + request_method=method, + request_url=url, + preload_content=preload_content, + decode_content=decode_content, + enforce_content_length=enforce_content_length, + ) + + if headers is None: + headers = {} + header_keys = frozenset(to_str(k.lower()) for k in headers) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys + self.putrequest( + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host + ) + + # Transform the body into an iterable of sendall()-able chunks + # and detect if an explicit Content-Length is doable. + chunks_and_cl = body_to_chunks(body, method=method, blocksize=self.blocksize) + chunks = chunks_and_cl.chunks + content_length = chunks_and_cl.content_length + + # When chunked is explicit set to 'True' we respect that. + if chunked: + if "transfer-encoding" not in header_keys: + self.putheader("Transfer-Encoding", "chunked") + else: + # Detect whether a framing mechanism is already in use. If so + # we respect that value, otherwise we pick chunked vs content-length + # depending on the type of 'body'. + if "content-length" in header_keys: + chunked = False + elif "transfer-encoding" in header_keys: + chunked = True + + # Otherwise we go off the recommendation of 'body_to_chunks()'. + else: + chunked = False + if content_length is None: + if chunks is not None: + chunked = True + self.putheader("Transfer-Encoding", "chunked") + else: + self.putheader("Content-Length", str(content_length)) + + # Now that framing headers are out of the way we send all the other headers. + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) + for header, value in headers.items(): + self.putheader(header, value) + self.endheaders() + + # If we're given a body we start sending that in chunks. + if chunks is not None: + for chunk in chunks: + # Sending empty chunks isn't allowed for TE: chunked + # as it indicates the end of the body. + if not chunk: + continue + if isinstance(chunk, str): + chunk = chunk.encode("utf-8") + if chunked: + self.send(b"%x\r\n%b\r\n" % (len(chunk), chunk)) + else: + self.send(chunk) + + # Regardless of whether we have a body or not, if we're in + # chunked mode we want to send an explicit empty chunk. + if chunked: + self.send(b"0\r\n\r\n") + + def request_chunked( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + ) -> None: + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + warnings.warn( + "HTTPConnection.request_chunked() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).", + category=DeprecationWarning, + stacklevel=2, + ) + self.request(method, url, body=body, headers=headers, chunked=True) + + def getresponse( # type: ignore[override] + self, + ) -> HTTPResponse: + """ + Get the response from the server. + + If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. + + If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. + """ + # Raise the same error as http.client.HTTPConnection + if self._response_options is None: + raise ResponseNotReady() + + # Reset this attribute for being used again. + resp_options = self._response_options + self._response_options = None + + # Since the connection's timeout value may have been updated + # we need to set the timeout on the socket. + self.sock.settimeout(self.timeout) + + # This is needed here to avoid circular import errors + from .response import HTTPResponse + + # Get the response from http.client.HTTPConnection + httplib_response = super().getresponse() + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: + log.warning( + "Failed to parse headers (url=%s): %s", + _url_from_connection(self, resp_options.request_url), + hpe, + exc_info=True, + ) + + headers = HTTPHeaderDict(httplib_response.msg.items()) + + response = HTTPResponse( + body=httplib_response, + headers=headers, + status=httplib_response.status, + version=httplib_response.version, + reason=httplib_response.reason, + preload_content=resp_options.preload_content, + decode_content=resp_options.decode_content, + original_response=httplib_response, + enforce_content_length=resp_options.enforce_content_length, + request_method=resp_options.request_method, + request_url=resp_options.request_url, + ) + return response + + +class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + + default_port = port_by_scheme["https"] # type: ignore[misc] + + cert_reqs: int | str | None = None + ca_certs: str | None = None + ca_cert_dir: str | None = None + ca_cert_data: None | str | bytes = None + ssl_version: int | str | None = None + ssl_minimum_version: int | None = None + ssl_maximum_version: int | None = None + assert_fingerprint: str | None = None + + def __init__( + self, + host: str, + port: int | None = None, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: None + | (connection._TYPE_SOCKET_OPTIONS) = HTTPConnection.default_socket_options, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + cert_reqs: int | str | None = None, + assert_hostname: None | str | Literal[False] = None, + assert_fingerprint: str | None = None, + server_hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_certs: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, + ssl_version: int | str | None = None, # Deprecated + cert_file: str | None = None, + key_file: str | None = None, + key_password: str | None = None, + ) -> None: + super().__init__( + host, + port=port, + timeout=timeout, + source_address=source_address, + blocksize=blocksize, + socket_options=socket_options, + proxy=proxy, + proxy_config=proxy_config, + ) + + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + self.ssl_context = ssl_context + self.server_hostname = server_hostname + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ssl_version = ssl_version + self.ssl_minimum_version = ssl_minimum_version + self.ssl_maximum_version = ssl_maximum_version + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + # cert_reqs depends on ssl_context so calculate last. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + self.cert_reqs = cert_reqs + + def set_cert( + self, + key_file: str | None = None, + cert_file: str | None = None, + cert_reqs: int | str | None = None, + key_password: str | None = None, + ca_certs: str | None = None, + assert_hostname: None | str | Literal[False] = None, + assert_fingerprint: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ) -> None: + """ + This method should only be called once, before the connection is used. + """ + warnings.warn( + "HTTPSConnection.set_cert() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead provide the parameters to the " + "HTTPSConnection constructor.", + category=DeprecationWarning, + stacklevel=2, + ) + + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + def connect(self) -> None: + sock: socket.socket | ssl.SSLSocket + self.sock = sock = self._new_conn() + server_hostname: str = self.host + tls_in_tls = False + + # Do we need to establish a tunnel? + if self._tunnel_host is not None: + # We're tunneling to an HTTPS origin so need to do TLS-in-TLS. + if self._tunnel_scheme == "https": + # _connect_tls_proxy will verify and assign proxy_is_verified + self.sock = sock = self._connect_tls_proxy(self.host, sock) + tls_in_tls = True + elif self._tunnel_scheme == "http": + self.proxy_is_verified = False + + # If we're tunneling it means we're connected to our proxy. + self._has_connected_to_proxy = True + + self._tunnel() # type: ignore[attr-defined] + # Override the host with the one we're requesting data from. + server_hostname = self._tunnel_host + + if self.server_hostname is not None: + server_hostname = self.server_hostname + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn( + ( + f"System time is way off (before {RECENT_DATE}). This will probably " + "lead to SSL verification errors" + ), + SystemTimeWarning, + ) + + # Remove trailing '.' from fqdn hostnames to allow certificate validation + server_hostname_rm_dot = server_hostname.rstrip(".") + + sock_and_verified = _ssl_wrap_socket_and_match_hostname( + sock=sock, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + server_hostname=server_hostname_rm_dot, + ssl_context=self.ssl_context, + tls_in_tls=tls_in_tls, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) + self.sock = sock_and_verified.socket + + # Forwarding proxies can never have a verified target since + # the proxy is the one doing the verification. Should instead + # use a CONNECT tunnel in order to verify the target. + # See: https://github.com/urllib3/urllib3/issues/3267. + if self.proxy_is_forwarding: + self.is_verified = False + else: + self.is_verified = sock_and_verified.is_verified + + # If there's a proxy to be connected to we are fully connected. + # This is set twice (once above and here) due to forwarding proxies + # not using tunnelling. + self._has_connected_to_proxy = bool(self.proxy) + + # Set `self.proxy_is_verified` unless it's already set while + # establishing a tunnel. + if self._has_connected_to_proxy and self.proxy_is_verified is None: + self.proxy_is_verified = sock_and_verified.is_verified + + def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket: + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + # `_connect_tls_proxy` is called when self._tunnel_host is truthy. + proxy_config = typing.cast(ProxyConfig, self.proxy_config) + ssl_context = proxy_config.ssl_context + sock_and_verified = _ssl_wrap_socket_and_match_hostname( + sock, + cert_reqs=self.cert_reqs, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + assert_hostname=proxy_config.assert_hostname, + assert_fingerprint=proxy_config.assert_fingerprint, + # Features that aren't implemented for proxies yet: + cert_file=None, + key_file=None, + key_password=None, + tls_in_tls=False, + ) + self.proxy_is_verified = sock_and_verified.is_verified + return sock_and_verified.socket # type: ignore[return-value] + + +class _WrappedAndVerifiedSocket(typing.NamedTuple): + """ + Wrapped socket and whether the connection is + verified after the TLS handshake + """ + + socket: ssl.SSLSocket | SSLTransport + is_verified: bool + + +def _ssl_wrap_socket_and_match_hostname( + sock: socket.socket, + *, + cert_reqs: None | str | int, + ssl_version: None | str | int, + ssl_minimum_version: int | None, + ssl_maximum_version: int | None, + cert_file: str | None, + key_file: str | None, + key_password: str | None, + ca_certs: str | None, + ca_cert_dir: str | None, + ca_cert_data: None | str | bytes, + assert_hostname: None | str | Literal[False], + assert_fingerprint: str | None, + server_hostname: str | None, + ssl_context: ssl.SSLContext | None, + tls_in_tls: bool = False, +) -> _WrappedAndVerifiedSocket: + """Logic for constructing an SSLContext from all TLS parameters, passing + that down into ssl_wrap_socket, and then doing certificate verification + either via hostname or fingerprint. This function exists to guarantee + that both proxies and targets have the same behavior when connecting via TLS. + """ + default_ssl_context = False + if ssl_context is None: + default_ssl_context = True + context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + ssl_minimum_version=ssl_minimum_version, + ssl_maximum_version=ssl_maximum_version, + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + else: + context = ssl_context + + context.verify_mode = resolve_cert_reqs(cert_reqs) + + # In some cases, we want to verify hostnames ourselves + if ( + # `ssl` can't verify fingerprints or alternate hostnames + assert_fingerprint + or assert_hostname + # assert_hostname can be set to False to disable hostname checking + or assert_hostname is False + # We still support OpenSSL 1.0.2, which prevents us from verifying + # hostnames easily: https://github.com/pyca/pyopenssl/pull/933 + or ssl_.IS_PYOPENSSL + or not ssl_.HAS_NEVER_CHECK_COMMON_NAME + ): + context.check_hostname = False + + # Try to load OS default certs if none are given. We need to do the hasattr() check + # for custom pyOpenSSL SSLContext objects because they don't support + # load_default_certs(). + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + + # Ensure that IPv6 addresses are in the proper format and don't have a + # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses + # and interprets them as DNS hostnames. + if server_hostname is not None: + normalized = server_hostname.strip("[]") + if "%" in normalized: + normalized = normalized[: normalized.rfind("%")] + if is_ipaddress(normalized): + server_hostname = normalized + + ssl_sock = ssl_wrap_socket( + sock=sock, + keyfile=key_file, + certfile=cert_file, + key_password=key_password, + ca_certs=ca_certs, + ca_cert_dir=ca_cert_dir, + ca_cert_data=ca_cert_data, + server_hostname=server_hostname, + ssl_context=context, + tls_in_tls=tls_in_tls, + ) + + try: + if assert_fingerprint: + _assert_fingerprint( + ssl_sock.getpeercert(binary_form=True), assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not context.check_hostname + and assert_hostname is not False + ): + cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment] + + # Need to signal to our match_hostname whether to use 'commonName' or not. + # If we're using our own constructed SSLContext we explicitly set 'False' + # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name. + if default_ssl_context: + hostname_checks_common_name = False + else: + hostname_checks_common_name = ( + getattr(context, "hostname_checks_common_name", False) or False + ) + + _match_hostname( + cert, + assert_hostname or server_hostname, # type: ignore[arg-type] + hostname_checks_common_name, + ) + + return _WrappedAndVerifiedSocket( + socket=ssl_sock, + is_verified=context.verify_mode == ssl.CERT_REQUIRED + or bool(assert_fingerprint), + ) + except BaseException: + ssl_sock.close() + raise + + +def _match_hostname( + cert: _TYPE_PEER_CERT_RET_DICT | None, + asserted_hostname: str, + hostname_checks_common_name: bool = False, +) -> None: + # Our upstream implementation of ssl.match_hostname() + # only applies this normalization to IP addresses so it doesn't + # match DNS SANs so we do the same thing! + stripped_hostname = asserted_hostname.strip("[]") + if is_ipaddress(stripped_hostname): + asserted_hostname = stripped_hostname + + try: + match_hostname(cert, asserted_hostname, hostname_checks_common_name) + except CertificateError as e: + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert # type: ignore[attr-defined] + raise + + +def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError: + # Look for the phrase 'wrong version number', if found + # then we should warn the user that we're very sure that + # this proxy is HTTP-only and they have a configuration issue. + error_normalized = " ".join(re.split("[^a-z]", str(err).lower())) + is_likely_http_proxy = ( + "wrong version number" in error_normalized + or "unknown protocol" in error_normalized + or "record layer failure" in error_normalized + ) + http_proxy_warning = ( + ". Your proxy appears to only use HTTP and not HTTPS, " + "try changing your proxy URL to be HTTP. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#https-proxy-error-http-proxy" + ) + new_err = ProxyError( + f"Unable to connect to proxy" + f"{http_proxy_warning if is_likely_http_proxy and proxy_scheme == 'https' else ''}", + err, + ) + new_err.__cause__ = err + return new_err + + +def _get_default_user_agent() -> str: + return f"python-urllib3/{__version__}" + + +class DummyConnection: + """Used to detect a failed ConnectionCls import.""" + + +if not ssl: + HTTPSConnection = DummyConnection # type: ignore[misc, assignment] # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection + + +def _url_from_connection( + conn: HTTPConnection | HTTPSConnection, path: str | None = None +) -> str: + """Returns the URL from a given connection. This is mainly used for testing and logging.""" + + scheme = "https" if isinstance(conn, HTTPSConnection) else "http" + + return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url diff --git a/templates/skills/spotify/dependencies/urllib3/connectionpool.py b/templates/skills/spotify/dependencies/urllib3/connectionpool.py new file mode 100644 index 00000000..bd58ff14 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/connectionpool.py @@ -0,0 +1,1186 @@ +from __future__ import annotations + +import errno +import logging +import queue +import sys +import typing +import warnings +import weakref +from socket import timeout as SocketTimeout +from types import TracebackType + +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from ._request_methods import RequestMethods +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + ProxyConfig, + _wrap_proxy_error, +) +from .connection import port_by_scheme as port_by_scheme +from .exceptions import ( + ClosedPoolError, + EmptyPoolError, + FullPoolError, + HostChangedError, + InsecureRequestWarning, + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, +) +from .response import BaseHTTPResponse +from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.request import _TYPE_BODY_POSITION, set_file_position +from .util.retry import Retry +from .util.ssl_match_hostname import CertificateError +from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import parse_url +from .util.util import to_str + +if typing.TYPE_CHECKING: + import ssl + from typing import Literal + + from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection + +log = logging.getLogger(__name__) + +_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] + +_SelfT = typing.TypeVar("_SelfT") + + +# Pool objects +class ConnectionPool: + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. + """ + + scheme: str | None = None + QueueCls = queue.LifoQueue + + def __init__(self, host: str, port: int | None = None) -> None: + if not host: + raise LocationValueError("No host specified.") + + self.host = _normalize_host(host, scheme=self.scheme) + self.port = port + + # This property uses 'normalize_host()' (not '_normalize_host()') + # to avoid removing square braces around IPv6 addresses. + # This value is sent to `HTTPConnection.set_tunnel()` if called + # because square braces are required for HTTP CONNECT tunneling. + self._tunnel_host = normalize_host(host, scheme=self.scheme).lower() + + def __str__(self) -> str: + return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" + + def __enter__(self: _SelfT) -> _SelfT: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> Literal[False]: + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self) -> None: + """ + Close all pooled connections and disable the pool. + """ + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`http.client.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`http.client.HTTPConnection`. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.ProxyManager` + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.ProxyManager` + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = "http" + ConnectionCls: ( + type[BaseHTTPConnection] | type[BaseHTTPSConnection] + ) = HTTPConnection + + def __init__( + self, + host: str, + port: int | None = None, + timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, + maxsize: int = 1, + block: bool = False, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + _proxy: Url | None = None, + _proxy_headers: typing.Mapping[str, str] | None = None, + _proxy_config: ProxyConfig | None = None, + **conn_kw: typing.Any, + ): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config + + # Fill the queue up so that doing get() on it will block properly + for _ in range(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault("socket_options", []) + + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + + # Do not pass 'self' as callback to 'finalize'. + # Then the 'finalize' would keep an endless living (leak) to self. + # By just passing a reference to the pool allows the garbage collector + # to free self if nobody else has a reference to it. + pool = self.pool + + # Close all the HTTPConnections in the pool before the + # HTTPConnectionPool object is garbage collected. + weakref.finalize(self, _close_pool_connections, pool) + + def _new_conn(self) -> BaseHTTPConnection: + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + **self.conn_kw, + ) + return conn + + def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection: + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + + if self.pool is None: + raise ClosedPoolError(self, "Pool is closed.") + + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") from None # Defensive: + + except queue.Empty: + if self.block: + raise EmptyPoolError( + self, + "Pool is empty and a new connection can't be opened due to blocking mode.", + ) from None + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + + return conn or self._new_conn() + + def _put_conn(self, conn: BaseHTTPConnection | None) -> None: + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + if self.pool is not None: + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + if self.block: + # This should never happen if you got the conn from self._get_conn + raise FullPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) from None + + log.warning( + "Connection pool is full, discarding connection: %s. Connection pool size: %s", + self.host, + self.pool.qsize(), + ) + + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn: BaseHTTPConnection) -> None: + """ + Called right before a request is made, after the socket is created. + """ + + def _prepare_proxy(self, conn: BaseHTTPConnection) -> None: + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout: + """Helper that always returns a :class:`urllib3.util.Timeout`""" + if timeout is _DEFAULT_TIMEOUT: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout( + self, + err: BaseSSLError | OSError | SocketTimeout, + url: str, + timeout_value: _TYPE_TIMEOUT | None, + ) -> None: + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={timeout_value})" + ) from err + + # See the above comment about EAGAIN in Python 3. + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={timeout_value})" + ) from err + + def _make_request( + self, + conn: BaseHTTPConnection, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | None = None, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + chunked: bool = False, + response_conn: BaseHTTPConnection | None = None, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> BaseHTTPResponse: + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param response_conn: + Set this to ``None`` if you will handle releasing the connection or + set the connection to have the response release it. + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) + + try: + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # _validate_conn() starts the connection to an HTTPS proxy + # so we need to wrap errors with 'ProxyError' here too. + except ( + OSError, + NewConnectionError, + TimeoutError, + BaseSSLError, + CertificateError, + SSLError, + ) as e: + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + # If the connection didn't successfully connect to it's proxy + # then there + if isinstance( + new_e, (OSError, NewConnectionError, TimeoutError, SSLError) + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + raise new_e + + # conn.request() calls http.client.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + try: + conn.request( + method, + url, + body=body, + headers=headers, + chunked=chunked, + preload_content=preload_content, + decode_content=decode_content, + enforce_content_length=enforce_content_length, + ) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + pass + except OSError as e: + # MacOS/Linux + # EPROTOTYPE and ECONNRESET are needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE. + if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET: + raise + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + if not conn.is_closed: + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, f"Read timed out. (read timeout={read_timeout})" + ) + conn.timeout = read_timeout + + # Receive the response from the server + try: + response = conn.getresponse() + except (BaseSSLError, OSError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # Set properties that are used by the pooling layer. + response.retries = retries + response._connection = response_conn # type: ignore[attr-defined] + response._pool = self # type: ignore[attr-defined] + + # emscripten connection doesn't have _http_vsn_str + http_version = getattr(conn, "_http_vsn_str", "HTTP/?") + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + # HTTP version + http_version, + response.status, + response.length_remaining, + ) + + return response + + def close(self) -> None: + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + # Close all the HTTPConnections in the pool. + _close_pool_connections(old_pool) + + def is_same_host(self, url: str) -> bool: + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith("/"): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, _, host, port, *_ = parse_url(url) + scheme = scheme or "http" + if host is not None: + host = _normalize_host(host, scheme=scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen( # type: ignore[override] + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + redirect: bool = True, + assert_same_host: bool = True, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + pool_timeout: int | None = None, + release_conn: bool | None = None, + chunked: bool = False, + body_pos: _TYPE_BODY_POSITION | None = None, + preload_content: bool = True, + decode_content: bool = True, + **response_kw: typing.Any, + ) -> BaseHTTPResponse: + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method + such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When ``False``, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param bool preload_content: + If True, the response's body will be preloaded into memory. + + :param bool decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of ``preload_content`` + which defaults to ``True``. + + :param bool chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + """ + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = preload_content + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = to_str(_encode_target(url)) + else: + url = to_str(parsed_url.url) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] + release_this_conn = release_conn + + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: + headers = headers.copy() # type: ignore[attr-defined] + headers.update(self.proxy_headers) # type: ignore[union-attr] + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment] + + # Is this a closed/new connection that requires CONNECT tunnelling? + if self.proxy is not None and http_tunnel_required and conn.is_closed: + try: + self._prepare_proxy(conn) + except (BaseSSLError, OSError, SocketTimeout) as e: + self._raise_timeout( + err=e, url=self.proxy.url, timeout_value=conn.timeout + ) + raise + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Make the request on the HTTPConnection object + response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + retries=retries, + response_conn=response_conn, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Everything went great! + clean_exit = True + + except EmptyPoolError: + # Didn't get a connection from the pool, no need to clean up + clean_exit = True + release_this_conn = False + raise + + except ( + TimeoutError, + HTTPException, + OSError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ProxyError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + new_e: Exception = e + if isinstance(e, (BaseSSLError, CertificateError)): + new_e = SSLError(e) + if isinstance( + new_e, + ( + OSError, + NewConnectionError, + TimeoutError, + SSLError, + HTTPException, + ), + ) and (conn and conn.proxy and not conn.has_connected_to_proxy): + new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) + elif isinstance(new_e, (OSError, HTTPException)): + new_e = ProtocolError("Connection aborted.", new_e) + + retries = retries.increment( + method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + if conn: + conn.close() + conn = None + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. + method = "GET" + # And lose the body not to transfer anything sensitive. + body = None + headers = HTTPHeaderDict(headers)._prepare_for_method_change() + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.headers.get("Retry-After")) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, + url, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + preload_content=preload_content, + decode_content=decode_content, + **response_kw, + ) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = "https" + ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection + + def __init__( + self, + host: str, + port: int | None = None, + timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, + maxsize: int = 1, + block: bool = False, + headers: typing.Mapping[str, str] | None = None, + retries: Retry | bool | int | None = None, + _proxy: Url | None = None, + _proxy_headers: typing.Mapping[str, str] | None = None, + key_file: str | None = None, + cert_file: str | None = None, + cert_reqs: int | str | None = None, + key_password: str | None = None, + ca_certs: str | None = None, + ssl_version: int | str | None = None, + ssl_minimum_version: ssl.TLSVersion | None = None, + ssl_maximum_version: ssl.TLSVersion | None = None, + assert_hostname: str | Literal[False] | None = None, + assert_fingerprint: str | None = None, + ca_cert_dir: str | None = None, + **conn_kw: typing.Any, + ) -> None: + super().__init__( + host, + port, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw, + ) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.ssl_minimum_version = ssl_minimum_version + self.ssl_maximum_version = ssl_maximum_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override] + """Establishes a tunnel connection through HTTP CONNECT.""" + if self.proxy and self.proxy.scheme == "https": + tunnel_scheme = "https" + else: + tunnel_scheme = "http" + + conn.set_tunnel( + scheme=tunnel_scheme, + host=self._tunnel_host, + port=self.port, + headers=self.proxy_headers, + ) + conn.connect() + + def _new_conn(self) -> BaseHTTPSConnection: + """ + Return a fresh :class:`urllib3.connection.HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap] + raise ImportError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) + + actual_host: str = self.host + actual_port = self.port + if self.proxy is not None and self.proxy.host is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + return self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ssl_version=self.ssl_version, + ssl_minimum_version=self.ssl_minimum_version, + ssl_maximum_version=self.ssl_maximum_version, + **self.conn_kw, + ) + + def _validate_conn(self, conn: BaseHTTPConnection) -> None: + """ + Called right before a request is made, after the socket is created. + """ + super()._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if conn.is_closed: + conn.connect() + + # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791 + if not conn.is_verified and not conn.proxy_is_verified: + warnings.warn( + ( + f"Unverified HTTPS request is being made to host '{conn.host}'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" + "#tls-warnings" + ), + InsecureRequestWarning, + ) + + +def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, _, host, port, *_ = parse_url(url) + scheme = scheme or "http" + port = port or port_by_scheme.get(scheme, 80) + if scheme == "https": + return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type] + else: + return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] + + +@typing.overload +def _normalize_host(host: None, scheme: str | None) -> None: + ... + + +@typing.overload +def _normalize_host(host: str, scheme: str | None) -> str: + ... + + +def _normalize_host(host: str | None, scheme: str | None) -> str | None: + """ + Normalize hosts for comparisons and use with sockets. + """ + + host = normalize_host(host, scheme) + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + if host and host.startswith("[") and host.endswith("]"): + host = host[1:-1] + return host + + +def _url_from_pool( + pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None +) -> str: + """Returns the URL from a given connection pool. This is mainly used for testing and logging.""" + return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url + + +def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None: + """Drains a queue of connections and closes each one.""" + try: + while True: + conn = pool.get(block=False) + if conn: + conn.close() + except queue.Empty: + pass # Done. diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/__init__.py b/templates/skills/spotify/dependencies/urllib3/contrib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/__init__.py b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/__init__.py new file mode 100644 index 00000000..8a3c5beb --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/__init__.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +import urllib3.connection + +from ...connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from .connection import EmscriptenHTTPConnection, EmscriptenHTTPSConnection + + +def inject_into_urllib3() -> None: + # override connection classes to use emscripten specific classes + # n.b. mypy complains about the overriding of classes below + # if it isn't ignored + HTTPConnectionPool.ConnectionCls = EmscriptenHTTPConnection + HTTPSConnectionPool.ConnectionCls = EmscriptenHTTPSConnection + urllib3.connection.HTTPConnection = EmscriptenHTTPConnection # type: ignore[misc,assignment] + urllib3.connection.HTTPSConnection = EmscriptenHTTPSConnection # type: ignore[misc,assignment] diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/connection.py b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/connection.py new file mode 100644 index 00000000..2ceb4579 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/connection.py @@ -0,0 +1,254 @@ +from __future__ import annotations + +import os +import typing + +# use http.client.HTTPException for consistency with non-emscripten +from http.client import HTTPException as HTTPException # noqa: F401 +from http.client import ResponseNotReady + +from ..._base_connection import _TYPE_BODY +from ...connection import HTTPConnection, ProxyConfig, port_by_scheme +from ...exceptions import TimeoutError +from ...response import BaseHTTPResponse +from ...util.connection import _TYPE_SOCKET_OPTIONS +from ...util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT +from ...util.url import Url +from .fetch import _RequestError, _TimeoutError, send_request, send_streaming_request +from .request import EmscriptenRequest +from .response import EmscriptenHttpResponseWrapper, EmscriptenResponse + +if typing.TYPE_CHECKING: + from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection + + +class EmscriptenHTTPConnection: + default_port: typing.ClassVar[int] = port_by_scheme["http"] + default_socket_options: typing.ClassVar[_TYPE_SOCKET_OPTIONS] + + timeout: None | (float) + + host: str + port: int + blocksize: int + source_address: tuple[str, int] | None + socket_options: _TYPE_SOCKET_OPTIONS | None + + proxy: Url | None + proxy_config: ProxyConfig | None + + is_verified: bool = False + proxy_is_verified: bool | None = None + + _response: EmscriptenResponse | None + + def __init__( + self, + host: str, + port: int = 0, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 8192, + socket_options: _TYPE_SOCKET_OPTIONS | None = None, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + ) -> None: + self.host = host + self.port = port + self.timeout = timeout if isinstance(timeout, float) else 0.0 + self.scheme = "http" + self._closed = True + self._response = None + # ignore these things because we don't + # have control over that stuff + self.proxy = None + self.proxy_config = None + self.blocksize = blocksize + self.source_address = None + self.socket_options = None + self.is_verified = False + + def set_tunnel( + self, + host: str, + port: int | None = 0, + headers: typing.Mapping[str, str] | None = None, + scheme: str = "http", + ) -> None: + pass + + def connect(self) -> None: + pass + + def request( + self, + method: str, + url: str, + body: _TYPE_BODY | None = None, + headers: typing.Mapping[str, str] | None = None, + # We know *at least* botocore is depending on the order of the + # first 3 parameters so to be safe we only mark the later ones + # as keyword-only to ensure we have space to extend. + *, + chunked: bool = False, + preload_content: bool = True, + decode_content: bool = True, + enforce_content_length: bool = True, + ) -> None: + self._closed = False + if url.startswith("/"): + # no scheme / host / port included, make a full url + url = f"{self.scheme}://{self.host}:{self.port}" + url + request = EmscriptenRequest( + url=url, + method=method, + timeout=self.timeout if self.timeout else 0, + decode_content=decode_content, + ) + request.set_body(body) + if headers: + for k, v in headers.items(): + request.set_header(k, v) + self._response = None + try: + if not preload_content: + self._response = send_streaming_request(request) + if self._response is None: + self._response = send_request(request) + except _TimeoutError as e: + raise TimeoutError(e.message) from e + except _RequestError as e: + raise HTTPException(e.message) from e + + def getresponse(self) -> BaseHTTPResponse: + if self._response is not None: + return EmscriptenHttpResponseWrapper( + internal_response=self._response, + url=self._response.request.url, + connection=self, + ) + else: + raise ResponseNotReady() + + def close(self) -> None: + self._closed = True + self._response = None + + @property + def is_closed(self) -> bool: + """Whether the connection either is brand new or has been previously closed. + If this property is True then both ``is_connected`` and ``has_connected_to_proxy`` + properties must be False. + """ + return self._closed + + @property + def is_connected(self) -> bool: + """Whether the connection is actively connected to any origin (proxy or target)""" + return True + + @property + def has_connected_to_proxy(self) -> bool: + """Whether the connection has successfully connected to its proxy. + This returns False if no proxy is in use. Used to determine whether + errors are coming from the proxy layer or from tunnelling to the target origin. + """ + return False + + +class EmscriptenHTTPSConnection(EmscriptenHTTPConnection): + default_port = port_by_scheme["https"] + # all this is basically ignored, as browser handles https + cert_reqs: int | str | None = None + ca_certs: str | None = None + ca_cert_dir: str | None = None + ca_cert_data: None | str | bytes = None + cert_file: str | None + key_file: str | None + key_password: str | None + ssl_context: typing.Any | None + ssl_version: int | str | None = None + ssl_minimum_version: int | None = None + ssl_maximum_version: int | None = None + assert_hostname: None | str | typing.Literal[False] + assert_fingerprint: str | None = None + + def __init__( + self, + host: str, + port: int = 0, + *, + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + blocksize: int = 16384, + socket_options: None + | _TYPE_SOCKET_OPTIONS = HTTPConnection.default_socket_options, + proxy: Url | None = None, + proxy_config: ProxyConfig | None = None, + cert_reqs: int | str | None = None, + assert_hostname: None | str | typing.Literal[False] = None, + assert_fingerprint: str | None = None, + server_hostname: str | None = None, + ssl_context: typing.Any | None = None, + ca_certs: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, + ssl_version: int | str | None = None, # Deprecated + cert_file: str | None = None, + key_file: str | None = None, + key_password: str | None = None, + ) -> None: + super().__init__( + host, + port=port, + timeout=timeout, + source_address=source_address, + blocksize=blocksize, + socket_options=socket_options, + proxy=proxy, + proxy_config=proxy_config, + ) + self.scheme = "https" + + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + self.ssl_context = ssl_context + self.server_hostname = server_hostname + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ssl_version = ssl_version + self.ssl_minimum_version = ssl_minimum_version + self.ssl_maximum_version = ssl_maximum_version + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + self.cert_reqs = None + + # The browser will automatically verify all requests. + # We have no control over that setting. + self.is_verified = True + + def set_cert( + self, + key_file: str | None = None, + cert_file: str | None = None, + cert_reqs: int | str | None = None, + key_password: str | None = None, + ca_certs: str | None = None, + assert_hostname: None | str | typing.Literal[False] = None, + assert_fingerprint: str | None = None, + ca_cert_dir: str | None = None, + ca_cert_data: None | str | bytes = None, + ) -> None: + pass + + +# verify that this class implements BaseHTTP(s) connection correctly +if typing.TYPE_CHECKING: + _supports_http_protocol: BaseHTTPConnection = EmscriptenHTTPConnection("", 0) + _supports_https_protocol: BaseHTTPSConnection = EmscriptenHTTPSConnection("", 0) diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/emscripten_fetch_worker.js b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/emscripten_fetch_worker.js new file mode 100644 index 00000000..243b8622 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/emscripten_fetch_worker.js @@ -0,0 +1,110 @@ +let Status = { + SUCCESS_HEADER: -1, + SUCCESS_EOF: -2, + ERROR_TIMEOUT: -3, + ERROR_EXCEPTION: -4, +}; + +let connections = {}; +let nextConnectionID = 1; +const encoder = new TextEncoder(); + +self.addEventListener("message", async function (event) { + if (event.data.close) { + let connectionID = event.data.close; + delete connections[connectionID]; + return; + } else if (event.data.getMore) { + let connectionID = event.data.getMore; + let { curOffset, value, reader, intBuffer, byteBuffer } = + connections[connectionID]; + // if we still have some in buffer, then just send it back straight away + if (!value || curOffset >= value.length) { + // read another buffer if required + try { + let readResponse = await reader.read(); + + if (readResponse.done) { + // read everything - clear connection and return + delete connections[connectionID]; + Atomics.store(intBuffer, 0, Status.SUCCESS_EOF); + Atomics.notify(intBuffer, 0); + // finished reading successfully + // return from event handler + return; + } + curOffset = 0; + connections[connectionID].value = readResponse.value; + value = readResponse.value; + } catch (error) { + console.log("Request exception:", error); + let errorBytes = encoder.encode(error.message); + let written = errorBytes.length; + byteBuffer.set(errorBytes); + intBuffer[1] = written; + Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION); + Atomics.notify(intBuffer, 0); + } + } + + // send as much buffer as we can + let curLen = value.length - curOffset; + if (curLen > byteBuffer.length) { + curLen = byteBuffer.length; + } + byteBuffer.set(value.subarray(curOffset, curOffset + curLen), 0); + + Atomics.store(intBuffer, 0, curLen); // store current length in bytes + Atomics.notify(intBuffer, 0); + curOffset += curLen; + connections[connectionID].curOffset = curOffset; + + return; + } else { + // start fetch + let connectionID = nextConnectionID; + nextConnectionID += 1; + const intBuffer = new Int32Array(event.data.buffer); + const byteBuffer = new Uint8Array(event.data.buffer, 8); + try { + const response = await fetch(event.data.url, event.data.fetchParams); + // return the headers first via textencoder + var headers = []; + for (const pair of response.headers.entries()) { + headers.push([pair[0], pair[1]]); + } + let headerObj = { + headers: headers, + status: response.status, + connectionID, + }; + const headerText = JSON.stringify(headerObj); + let headerBytes = encoder.encode(headerText); + let written = headerBytes.length; + byteBuffer.set(headerBytes); + intBuffer[1] = written; + // make a connection + connections[connectionID] = { + reader: response.body.getReader(), + intBuffer: intBuffer, + byteBuffer: byteBuffer, + value: undefined, + curOffset: 0, + }; + // set header ready + Atomics.store(intBuffer, 0, Status.SUCCESS_HEADER); + Atomics.notify(intBuffer, 0); + // all fetching after this goes through a new postmessage call with getMore + // this allows for parallel requests + } catch (error) { + console.log("Request exception:", error); + let errorBytes = encoder.encode(error.message); + let written = errorBytes.length; + byteBuffer.set(errorBytes); + intBuffer[1] = written; + Atomics.store(intBuffer, 0, Status.ERROR_EXCEPTION); + Atomics.notify(intBuffer, 0); + } + } +}); +self.postMessage({ inited: true }); diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/fetch.py b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/fetch.py new file mode 100644 index 00000000..8d197ea1 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/fetch.py @@ -0,0 +1,418 @@ +""" +Support for streaming http requests in emscripten. + +A few caveats - + +Firstly, you can't do streaming http in the main UI thread, because atomics.wait isn't allowed. +Streaming only works if you're running pyodide in a web worker. + +Secondly, this uses an extra web worker and SharedArrayBuffer to do the asynchronous fetch +operation, so it requires that you have crossOriginIsolation enabled, by serving over https +(or from localhost) with the two headers below set: + + Cross-Origin-Opener-Policy: same-origin + Cross-Origin-Embedder-Policy: require-corp + +You can tell if cross origin isolation is successfully enabled by looking at the global crossOriginIsolated variable in +javascript console. If it isn't, streaming requests will fallback to XMLHttpRequest, i.e. getting the whole +request into a buffer and then returning it. it shows a warning in the javascript console in this case. + +Finally, the webworker which does the streaming fetch is created on initial import, but will only be started once +control is returned to javascript. Call `await wait_for_streaming_ready()` to wait for streaming fetch. + +NB: in this code, there are a lot of javascript objects. They are named js_* +to make it clear what type of object they are. +""" +from __future__ import annotations + +import io +import json +from email.parser import Parser +from importlib.resources import files +from typing import TYPE_CHECKING, Any + +import js # type: ignore[import-not-found] +from pyodide.ffi import ( # type: ignore[import-not-found] + JsArray, + JsException, + JsProxy, + to_js, +) + +if TYPE_CHECKING: + from typing_extensions import Buffer + +from .request import EmscriptenRequest +from .response import EmscriptenResponse + +""" +There are some headers that trigger unintended CORS preflight requests. +See also https://github.com/koenvo/pyodide-http/issues/22 +""" +HEADERS_TO_IGNORE = ("user-agent",) + +SUCCESS_HEADER = -1 +SUCCESS_EOF = -2 +ERROR_TIMEOUT = -3 +ERROR_EXCEPTION = -4 + +_STREAMING_WORKER_CODE = ( + files(__package__) + .joinpath("emscripten_fetch_worker.js") + .read_text(encoding="utf-8") +) + + +class _RequestError(Exception): + def __init__( + self, + message: str | None = None, + *, + request: EmscriptenRequest | None = None, + response: EmscriptenResponse | None = None, + ): + self.request = request + self.response = response + self.message = message + super().__init__(self.message) + + +class _StreamingError(_RequestError): + pass + + +class _TimeoutError(_RequestError): + pass + + +def _obj_from_dict(dict_val: dict[str, Any]) -> JsProxy: + return to_js(dict_val, dict_converter=js.Object.fromEntries) + + +class _ReadStream(io.RawIOBase): + def __init__( + self, + int_buffer: JsArray, + byte_buffer: JsArray, + timeout: float, + worker: JsProxy, + connection_id: int, + request: EmscriptenRequest, + ): + self.int_buffer = int_buffer + self.byte_buffer = byte_buffer + self.read_pos = 0 + self.read_len = 0 + self.connection_id = connection_id + self.worker = worker + self.timeout = int(1000 * timeout) if timeout > 0 else None + self.is_live = True + self._is_closed = False + self.request: EmscriptenRequest | None = request + + def __del__(self) -> None: + self.close() + + # this is compatible with _base_connection + def is_closed(self) -> bool: + return self._is_closed + + # for compatibility with RawIOBase + @property + def closed(self) -> bool: + return self.is_closed() + + def close(self) -> None: + if not self.is_closed(): + self.read_len = 0 + self.read_pos = 0 + self.int_buffer = None + self.byte_buffer = None + self._is_closed = True + self.request = None + if self.is_live: + self.worker.postMessage(_obj_from_dict({"close": self.connection_id})) + self.is_live = False + super().close() + + def readable(self) -> bool: + return True + + def writable(self) -> bool: + return False + + def seekable(self) -> bool: + return False + + def readinto(self, byte_obj: Buffer) -> int: + if not self.int_buffer: + raise _StreamingError( + "No buffer for stream in _ReadStream.readinto", + request=self.request, + response=None, + ) + if self.read_len == 0: + # wait for the worker to send something + js.Atomics.store(self.int_buffer, 0, ERROR_TIMEOUT) + self.worker.postMessage(_obj_from_dict({"getMore": self.connection_id})) + if ( + js.Atomics.wait(self.int_buffer, 0, ERROR_TIMEOUT, self.timeout) + == "timed-out" + ): + raise _TimeoutError + data_len = self.int_buffer[0] + if data_len > 0: + self.read_len = data_len + self.read_pos = 0 + elif data_len == ERROR_EXCEPTION: + string_len = self.int_buffer[1] + # decode the error string + js_decoder = js.TextDecoder.new() + json_str = js_decoder.decode(self.byte_buffer.slice(0, string_len)) + raise _StreamingError( + f"Exception thrown in fetch: {json_str}", + request=self.request, + response=None, + ) + else: + # EOF, free the buffers and return zero + # and free the request + self.is_live = False + self.close() + return 0 + # copy from int32array to python bytes + ret_length = min(self.read_len, len(memoryview(byte_obj))) + subarray = self.byte_buffer.subarray( + self.read_pos, self.read_pos + ret_length + ).to_py() + memoryview(byte_obj)[0:ret_length] = subarray + self.read_len -= ret_length + self.read_pos += ret_length + return ret_length + + +class _StreamingFetcher: + def __init__(self) -> None: + # make web-worker and data buffer on startup + self.streaming_ready = False + + js_data_blob = js.Blob.new( + [_STREAMING_WORKER_CODE], _obj_from_dict({"type": "application/javascript"}) + ) + + def promise_resolver(js_resolve_fn: JsProxy, js_reject_fn: JsProxy) -> None: + def onMsg(e: JsProxy) -> None: + self.streaming_ready = True + js_resolve_fn(e) + + def onErr(e: JsProxy) -> None: + js_reject_fn(e) # Defensive: never happens in ci + + self.js_worker.onmessage = onMsg + self.js_worker.onerror = onErr + + js_data_url = js.URL.createObjectURL(js_data_blob) + self.js_worker = js.globalThis.Worker.new(js_data_url) + self.js_worker_ready_promise = js.globalThis.Promise.new(promise_resolver) + + def send(self, request: EmscriptenRequest) -> EmscriptenResponse: + headers = { + k: v for k, v in request.headers.items() if k not in HEADERS_TO_IGNORE + } + + body = request.body + fetch_data = {"headers": headers, "body": to_js(body), "method": request.method} + # start the request off in the worker + timeout = int(1000 * request.timeout) if request.timeout > 0 else None + js_shared_buffer = js.SharedArrayBuffer.new(1048576) + js_int_buffer = js.Int32Array.new(js_shared_buffer) + js_byte_buffer = js.Uint8Array.new(js_shared_buffer, 8) + + js.Atomics.store(js_int_buffer, 0, ERROR_TIMEOUT) + js.Atomics.notify(js_int_buffer, 0) + js_absolute_url = js.URL.new(request.url, js.location).href + self.js_worker.postMessage( + _obj_from_dict( + { + "buffer": js_shared_buffer, + "url": js_absolute_url, + "fetchParams": fetch_data, + } + ) + ) + # wait for the worker to send something + js.Atomics.wait(js_int_buffer, 0, ERROR_TIMEOUT, timeout) + if js_int_buffer[0] == ERROR_TIMEOUT: + raise _TimeoutError( + "Timeout connecting to streaming request", + request=request, + response=None, + ) + elif js_int_buffer[0] == SUCCESS_HEADER: + # got response + # header length is in second int of intBuffer + string_len = js_int_buffer[1] + # decode the rest to a JSON string + js_decoder = js.TextDecoder.new() + # this does a copy (the slice) because decode can't work on shared array + # for some silly reason + json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len)) + # get it as an object + response_obj = json.loads(json_str) + return EmscriptenResponse( + request=request, + status_code=response_obj["status"], + headers=response_obj["headers"], + body=_ReadStream( + js_int_buffer, + js_byte_buffer, + request.timeout, + self.js_worker, + response_obj["connectionID"], + request, + ), + ) + elif js_int_buffer[0] == ERROR_EXCEPTION: + string_len = js_int_buffer[1] + # decode the error string + js_decoder = js.TextDecoder.new() + json_str = js_decoder.decode(js_byte_buffer.slice(0, string_len)) + raise _StreamingError( + f"Exception thrown in fetch: {json_str}", request=request, response=None + ) + else: + raise _StreamingError( + f"Unknown status from worker in fetch: {js_int_buffer[0]}", + request=request, + response=None, + ) + + +# check if we are in a worker or not +def is_in_browser_main_thread() -> bool: + return hasattr(js, "window") and hasattr(js, "self") and js.self == js.window + + +def is_cross_origin_isolated() -> bool: + return hasattr(js, "crossOriginIsolated") and js.crossOriginIsolated + + +def is_in_node() -> bool: + return ( + hasattr(js, "process") + and hasattr(js.process, "release") + and hasattr(js.process.release, "name") + and js.process.release.name == "node" + ) + + +def is_worker_available() -> bool: + return hasattr(js, "Worker") and hasattr(js, "Blob") + + +_fetcher: _StreamingFetcher | None = None + +if is_worker_available() and ( + (is_cross_origin_isolated() and not is_in_browser_main_thread()) + and (not is_in_node()) +): + _fetcher = _StreamingFetcher() +else: + _fetcher = None + + +def send_streaming_request(request: EmscriptenRequest) -> EmscriptenResponse | None: + if _fetcher and streaming_ready(): + return _fetcher.send(request) + else: + _show_streaming_warning() + return None + + +_SHOWN_TIMEOUT_WARNING = False + + +def _show_timeout_warning() -> None: + global _SHOWN_TIMEOUT_WARNING + if not _SHOWN_TIMEOUT_WARNING: + _SHOWN_TIMEOUT_WARNING = True + message = "Warning: Timeout is not available on main browser thread" + js.console.warn(message) + + +_SHOWN_STREAMING_WARNING = False + + +def _show_streaming_warning() -> None: + global _SHOWN_STREAMING_WARNING + if not _SHOWN_STREAMING_WARNING: + _SHOWN_STREAMING_WARNING = True + message = "Can't stream HTTP requests because: \n" + if not is_cross_origin_isolated(): + message += " Page is not cross-origin isolated\n" + if is_in_browser_main_thread(): + message += " Python is running in main browser thread\n" + if not is_worker_available(): + message += " Worker or Blob classes are not available in this environment." # Defensive: this is always False in browsers that we test in + if streaming_ready() is False: + message += """ Streaming fetch worker isn't ready. If you want to be sure that streaming fetch +is working, you need to call: 'await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready()`""" + from js import console + + console.warn(message) + + +def send_request(request: EmscriptenRequest) -> EmscriptenResponse: + try: + js_xhr = js.XMLHttpRequest.new() + + if not is_in_browser_main_thread(): + js_xhr.responseType = "arraybuffer" + if request.timeout: + js_xhr.timeout = int(request.timeout * 1000) + else: + js_xhr.overrideMimeType("text/plain; charset=ISO-8859-15") + if request.timeout: + # timeout isn't available on the main thread - show a warning in console + # if it is set + _show_timeout_warning() + + js_xhr.open(request.method, request.url, False) + for name, value in request.headers.items(): + if name.lower() not in HEADERS_TO_IGNORE: + js_xhr.setRequestHeader(name, value) + + js_xhr.send(to_js(request.body)) + + headers = dict(Parser().parsestr(js_xhr.getAllResponseHeaders())) + + if not is_in_browser_main_thread(): + body = js_xhr.response.to_py().tobytes() + else: + body = js_xhr.response.encode("ISO-8859-15") + return EmscriptenResponse( + status_code=js_xhr.status, headers=headers, body=body, request=request + ) + except JsException as err: + if err.name == "TimeoutError": + raise _TimeoutError(err.message, request=request) + elif err.name == "NetworkError": + raise _RequestError(err.message, request=request) + else: + # general http error + raise _RequestError(err.message, request=request) + + +def streaming_ready() -> bool | None: + if _fetcher: + return _fetcher.streaming_ready + else: + return None # no fetcher, return None to signify that + + +async def wait_for_streaming_ready() -> bool: + if _fetcher: + await _fetcher.js_worker_ready_promise + return True + else: + return False diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/request.py b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/request.py new file mode 100644 index 00000000..e692e692 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/request.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from dataclasses import dataclass, field + +from ..._base_connection import _TYPE_BODY + + +@dataclass +class EmscriptenRequest: + method: str + url: str + params: dict[str, str] | None = None + body: _TYPE_BODY | None = None + headers: dict[str, str] = field(default_factory=dict) + timeout: float = 0 + decode_content: bool = True + + def set_header(self, name: str, value: str) -> None: + self.headers[name.capitalize()] = value + + def set_body(self, body: _TYPE_BODY | None) -> None: + self.body = body diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/response.py b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/response.py new file mode 100644 index 00000000..303b4ee0 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/emscripten/response.py @@ -0,0 +1,276 @@ +from __future__ import annotations + +import json as _json +import logging +import typing +from contextlib import contextmanager +from dataclasses import dataclass +from http.client import HTTPException as HTTPException +from io import BytesIO, IOBase + +from ...exceptions import InvalidHeader, TimeoutError +from ...response import BaseHTTPResponse +from ...util.retry import Retry +from .request import EmscriptenRequest + +if typing.TYPE_CHECKING: + from ..._base_connection import BaseHTTPConnection, BaseHTTPSConnection + +log = logging.getLogger(__name__) + + +@dataclass +class EmscriptenResponse: + status_code: int + headers: dict[str, str] + body: IOBase | bytes + request: EmscriptenRequest + + +class EmscriptenHttpResponseWrapper(BaseHTTPResponse): + def __init__( + self, + internal_response: EmscriptenResponse, + url: str | None = None, + connection: BaseHTTPConnection | BaseHTTPSConnection | None = None, + ): + self._pool = None # set by pool class + self._body = None + self._response = internal_response + self._url = url + self._connection = connection + self._closed = False + super().__init__( + headers=internal_response.headers, + status=internal_response.status_code, + request_url=url, + version=0, + reason="", + decode_content=True, + ) + self.length_remaining = self._init_length(self._response.request.method) + self.length_is_certain = False + + @property + def url(self) -> str | None: + return self._url + + @url.setter + def url(self, url: str | None) -> None: + self._url = url + + @property + def connection(self) -> BaseHTTPConnection | BaseHTTPSConnection | None: + return self._connection + + @property + def retries(self) -> Retry | None: + return self._retries + + @retries.setter + def retries(self, retries: Retry | None) -> None: + # Override the request_url if retries has a redirect location. + self._retries = retries + + def stream( + self, amt: int | None = 2**16, decode_content: bool | None = None + ) -> typing.Generator[bytes, None, None]: + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + while True: + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + else: + break + + def _init_length(self, request_method: str | None) -> int | None: + length: int | None + content_length: str | None = self.headers.get("content-length") + + if content_length is not None: + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = {int(val) for val in content_length.split(",")} + if len(lengths) > 1: + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % content_length + ) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + else: # if content_length is None + length = None + + # Check for responses that shouldn't include a body + if ( + self.status in (204, 304) + or 100 <= self.status < 200 + or request_method == "HEAD" + ): + length = 0 + + return length + + def read( + self, + amt: int | None = None, + decode_content: bool | None = None, # ignored because browser decodes always + cache_content: bool = False, + ) -> bytes: + if ( + self._closed + or self._response is None + or (isinstance(self._response.body, IOBase) and self._response.body.closed) + ): + return b"" + + with self._error_catcher(): + # body has been preloaded as a string by XmlHttpRequest + if not isinstance(self._response.body, IOBase): + self.length_remaining = len(self._response.body) + self.length_is_certain = True + # wrap body in IOStream + self._response.body = BytesIO(self._response.body) + if amt is not None: + # don't cache partial content + cache_content = False + data = self._response.body.read(amt) + if self.length_remaining is not None: + self.length_remaining = max(self.length_remaining - len(data), 0) + if (self.length_is_certain and self.length_remaining == 0) or len( + data + ) < amt: + # definitely finished reading, close response stream + self._response.body.close() + return typing.cast(bytes, data) + else: # read all we can (and cache it) + data = self._response.body.read() + if cache_content: + self._body = data + if self.length_remaining is not None: + self.length_remaining = max(self.length_remaining - len(data), 0) + if len(data) == 0 or ( + self.length_is_certain and self.length_remaining == 0 + ): + # definitely finished reading, close response stream + self._response.body.close() + return typing.cast(bytes, data) + + def read_chunked( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> typing.Generator[bytes, None, None]: + # chunked is handled by browser + while True: + bytes = self.read(amt, decode_content) + if not bytes: + break + yield bytes + + def release_conn(self) -> None: + if not self._pool or not self._connection: + return None + + self._pool._put_conn(self._connection) + self._connection = None + + def drain_conn(self) -> None: + self.close() + + @property + def data(self) -> bytes: + if self._body: + return self._body + else: + return self.read(cache_content=True) + + def json(self) -> typing.Any: + """ + Parses the body of the HTTP response as JSON. + + To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. + + This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. + + Read more :ref:`here `. + """ + data = self.data.decode("utf-8") + return _json.loads(data) + + def close(self) -> None: + if not self._closed: + if isinstance(self._response.body, IOBase): + self._response.body.close() + if self._connection: + self._connection.close() + self._connection = None + self._closed = True + + @contextmanager + def _error_catcher(self) -> typing.Generator[None, None, None]: + """ + Catch Emscripten specific exceptions thrown by fetch.py, + instead re-raising urllib3 variants, so that low-level exceptions + are not leaked in the high-level api. + + On exit, release the connection back to the pool. + """ + from .fetch import _RequestError, _TimeoutError # avoid circular import + + clean_exit = False + + try: + yield + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + except _TimeoutError as e: + raise TimeoutError(str(e)) + except _RequestError as e: + raise HTTPException(str(e)) + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now + if ( + isinstance(self._response.body, IOBase) + and not self._response.body.closed + ): + self._response.body.close() + # release the connection back to the pool + self.release_conn() + else: + # If we have read everything from the response stream, + # return the connection back to the pool. + if ( + isinstance(self._response.body, IOBase) + and self._response.body.closed + ): + self.release_conn() diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/pyopenssl.py b/templates/skills/spotify/dependencies/urllib3/contrib/pyopenssl.py new file mode 100644 index 00000000..b89a6dab --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/pyopenssl.py @@ -0,0 +1,548 @@ +""" +Module for using pyOpenSSL as a TLS backend. This module was relevant before +the standard library ``ssl`` module supported SNI, but now that we've dropped +support for Python 2.7 all relevant Python versions support SNI so +**this module is no longer recommended**. + +This needs the following packages installed: + +* `pyOpenSSL`_ (tested with 16.0.0) +* `cryptography`_ (minimum 1.3.4, from pyopenssl) +* `idna`_ (minimum 2.0) + +However, pyOpenSSL depends on cryptography, so while we use all three directly here we +end up having relatively few packages required. + +You can install them with the following command: + +.. code-block:: bash + + $ python -m pip install pyopenssl cryptography idna + +To activate certificate checking, call +:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code +before you begin making HTTP requests. This can be done in a ``sitecustomize`` +module, or at any other time before your application begins using ``urllib3``, +like this: + +.. code-block:: python + + try: + import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() + except ImportError: + pass + +.. _pyopenssl: https://www.pyopenssl.org +.. _cryptography: https://cryptography.io +.. _idna: https://github.com/kjd/idna +""" + +from __future__ import annotations + +import OpenSSL.SSL # type: ignore[import-untyped] +from cryptography import x509 + +try: + from cryptography.x509 import UnsupportedExtension # type: ignore[attr-defined] +except ImportError: + # UnsupportedExtension is gone in cryptography >= 2.1.0 + class UnsupportedExtension(Exception): # type: ignore[no-redef] + pass + + +import logging +import ssl +import typing +from io import BytesIO +from socket import socket as socket_cls +from socket import timeout + +from .. import util + +if typing.TYPE_CHECKING: + from OpenSSL.crypto import X509 # type: ignore[import-untyped] + + +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] + +# Map from urllib3 to PyOpenSSL compatible parameter-values. +_openssl_versions: dict[int, int] = { + util.ssl_.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined] + util.ssl_.PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD, # type: ignore[attr-defined] + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, +} + +if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): + _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD + +if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): + _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD + + +_stdlib_to_openssl_verify = { + ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, + ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} +_openssl_to_stdlib_verify = {v: k for k, v in _stdlib_to_openssl_verify.items()} + +# The SSLvX values are the most likely to be missing in the future +# but we check them all just to be sure. +_OP_NO_SSLv2_OR_SSLv3: int = getattr(OpenSSL.SSL, "OP_NO_SSLv2", 0) | getattr( + OpenSSL.SSL, "OP_NO_SSLv3", 0 +) +_OP_NO_TLSv1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1", 0) +_OP_NO_TLSv1_1: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_1", 0) +_OP_NO_TLSv1_2: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_2", 0) +_OP_NO_TLSv1_3: int = getattr(OpenSSL.SSL, "OP_NO_TLSv1_3", 0) + +_openssl_to_ssl_minimum_version: dict[int, int] = { + ssl.TLSVersion.MINIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3, + ssl.TLSVersion.TLSv1: _OP_NO_SSLv2_OR_SSLv3, + ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1, + ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1, + ssl.TLSVersion.TLSv1_3: ( + _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 + ), + ssl.TLSVersion.MAXIMUM_SUPPORTED: ( + _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 + ), +} +_openssl_to_ssl_maximum_version: dict[int, int] = { + ssl.TLSVersion.MINIMUM_SUPPORTED: ( + _OP_NO_SSLv2_OR_SSLv3 + | _OP_NO_TLSv1 + | _OP_NO_TLSv1_1 + | _OP_NO_TLSv1_2 + | _OP_NO_TLSv1_3 + ), + ssl.TLSVersion.TLSv1: ( + _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_1 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3 + ), + ssl.TLSVersion.TLSv1_1: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_2 | _OP_NO_TLSv1_3, + ssl.TLSVersion.TLSv1_2: _OP_NO_SSLv2_OR_SSLv3 | _OP_NO_TLSv1_3, + ssl.TLSVersion.TLSv1_3: _OP_NO_SSLv2_OR_SSLv3, + ssl.TLSVersion.MAXIMUM_SUPPORTED: _OP_NO_SSLv2_OR_SSLv3, +} + +# OpenSSL will only write 16K at a time +SSL_WRITE_BLOCKSIZE = 16384 + +orig_util_SSLContext = util.ssl_.SSLContext + + +log = logging.getLogger(__name__) + + +def inject_into_urllib3() -> None: + "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support." + + _validate_dependencies_met() + + util.SSLContext = PyOpenSSLContext # type: ignore[assignment] + util.ssl_.SSLContext = PyOpenSSLContext # type: ignore[assignment] + util.IS_PYOPENSSL = True + util.ssl_.IS_PYOPENSSL = True + + +def extract_from_urllib3() -> None: + "Undo monkey-patching by :func:`inject_into_urllib3`." + + util.SSLContext = orig_util_SSLContext + util.ssl_.SSLContext = orig_util_SSLContext + util.IS_PYOPENSSL = False + util.ssl_.IS_PYOPENSSL = False + + +def _validate_dependencies_met() -> None: + """ + Verifies that PyOpenSSL's package-level dependencies have been met. + Throws `ImportError` if they are not met. + """ + # Method added in `cryptography==1.1`; not available in older versions + from cryptography.x509.extensions import Extensions + + if getattr(Extensions, "get_extension_for_class", None) is None: + raise ImportError( + "'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer." + ) + + # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 + # attribute is only present on those versions. + from OpenSSL.crypto import X509 + + x509 = X509() + if getattr(x509, "_x509", None) is None: + raise ImportError( + "'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer." + ) + + +def _dnsname_to_stdlib(name: str) -> str | None: + """ + Converts a dNSName SubjectAlternativeName field to the form used by the + standard library on the given Python version. + + Cryptography produces a dNSName as a unicode string that was idna-decoded + from ASCII bytes. We need to idna-encode that string to get it back, and + then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib + uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). + + If the name cannot be idna-encoded then we return None signalling that + the name given should be skipped. + """ + + def idna_encode(name: str) -> bytes | None: + """ + Borrowed wholesale from the Python Cryptography Project. It turns out + that we can't just safely call `idna.encode`: it can explode for + wildcard names. This avoids that problem. + """ + import idna + + try: + for prefix in ["*.", "."]: + if name.startswith(prefix): + name = name[len(prefix) :] + return prefix.encode("ascii") + idna.encode(name) + return idna.encode(name) + except idna.core.IDNAError: + return None + + # Don't send IPv6 addresses through the IDNA encoder. + if ":" in name: + return name + + encoded_name = idna_encode(name) + if encoded_name is None: + return None + return encoded_name.decode("utf-8") + + +def get_subj_alt_name(peer_cert: X509) -> list[tuple[str, str]]: + """ + Given an PyOpenSSL certificate, provides all the subject alternative names. + """ + cert = peer_cert.to_cryptography() + + # We want to find the SAN extension. Ask Cryptography to locate it (it's + # faster than looping in Python) + try: + ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value + except x509.ExtensionNotFound: + # No such extension, return the empty list. + return [] + except ( + x509.DuplicateExtension, + UnsupportedExtension, + x509.UnsupportedGeneralNameType, + UnicodeError, + ) as e: + # A problem has been found with the quality of the certificate. Assume + # no SAN field is present. + log.warning( + "A problem was encountered with the certificate that prevented " + "urllib3 from finding the SubjectAlternativeName field. This can " + "affect certificate validation. The error was %s", + e, + ) + return [] + + # We want to return dNSName and iPAddress fields. We need to cast the IPs + # back to strings because the match_hostname function wants them as + # strings. + # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 + # decoded. This is pretty frustrating, but that's what the standard library + # does with certificates, and so we need to attempt to do the same. + # We also want to skip over names which cannot be idna encoded. + names = [ + ("DNS", name) + for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + if name is not None + ] + names.extend( + ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress) + ) + + return names + + +class WrappedSocket: + """API-compatibility wrapper for Python OpenSSL's Connection-class.""" + + def __init__( + self, + connection: OpenSSL.SSL.Connection, + socket: socket_cls, + suppress_ragged_eofs: bool = True, + ) -> None: + self.connection = connection + self.socket = socket + self.suppress_ragged_eofs = suppress_ragged_eofs + self._io_refs = 0 + self._closed = False + + def fileno(self) -> int: + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self) -> None: + if self._io_refs > 0: + self._io_refs -= 1 + if self._closed: + self.close() + + def recv(self, *args: typing.Any, **kwargs: typing.Any) -> bytes: + try: + data = self.connection.recv(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return b"" + else: + raise OSError(e.args[0], str(e)) from e + except OpenSSL.SSL.ZeroReturnError: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return b"" + else: + raise + except OpenSSL.SSL.WantReadError as e: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout("The read operation timed out") from e + else: + return self.recv(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError(f"read error: {e!r}") from e + else: + return data # type: ignore[no-any-return] + + def recv_into(self, *args: typing.Any, **kwargs: typing.Any) -> int: + try: + return self.connection.recv_into(*args, **kwargs) # type: ignore[no-any-return] + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return 0 + else: + raise OSError(e.args[0], str(e)) from e + except OpenSSL.SSL.ZeroReturnError: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return 0 + else: + raise + except OpenSSL.SSL.WantReadError as e: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout("The read operation timed out") from e + else: + return self.recv_into(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError(f"read error: {e!r}") from e + + def settimeout(self, timeout: float) -> None: + return self.socket.settimeout(timeout) + + def _send_until_done(self, data: bytes) -> int: + while True: + try: + return self.connection.send(data) # type: ignore[no-any-return] + except OpenSSL.SSL.WantWriteError as e: + if not util.wait_for_write(self.socket, self.socket.gettimeout()): + raise timeout() from e + continue + except OpenSSL.SSL.SysCallError as e: + raise OSError(e.args[0], str(e)) from e + + def sendall(self, data: bytes) -> None: + total_sent = 0 + while total_sent < len(data): + sent = self._send_until_done( + data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE] + ) + total_sent += sent + + def shutdown(self) -> None: + # FIXME rethrow compatible exceptions should we ever use this + self.connection.shutdown() + + def close(self) -> None: + self._closed = True + if self._io_refs <= 0: + self._real_close() + + def _real_close(self) -> None: + try: + return self.connection.close() # type: ignore[no-any-return] + except OpenSSL.SSL.Error: + return + + def getpeercert( + self, binary_form: bool = False + ) -> dict[str, list[typing.Any]] | None: + x509 = self.connection.get_peer_certificate() + + if not x509: + return x509 # type: ignore[no-any-return] + + if binary_form: + return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) # type: ignore[no-any-return] + + return { + "subject": ((("commonName", x509.get_subject().CN),),), # type: ignore[dict-item] + "subjectAltName": get_subj_alt_name(x509), + } + + def version(self) -> str: + return self.connection.get_protocol_version_name() # type: ignore[no-any-return] + + +WrappedSocket.makefile = socket_cls.makefile # type: ignore[attr-defined] + + +class PyOpenSSLContext: + """ + I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible + for translating the interface of the standard library ``SSLContext`` object + to calls into PyOpenSSL. + """ + + def __init__(self, protocol: int) -> None: + self.protocol = _openssl_versions[protocol] + self._ctx = OpenSSL.SSL.Context(self.protocol) + self._options = 0 + self.check_hostname = False + self._minimum_version: int = ssl.TLSVersion.MINIMUM_SUPPORTED + self._maximum_version: int = ssl.TLSVersion.MAXIMUM_SUPPORTED + + @property + def options(self) -> int: + return self._options + + @options.setter + def options(self, value: int) -> None: + self._options = value + self._set_ctx_options() + + @property + def verify_mode(self) -> int: + return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] + + @verify_mode.setter + def verify_mode(self, value: ssl.VerifyMode) -> None: + self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback) + + def set_default_verify_paths(self) -> None: + self._ctx.set_default_verify_paths() + + def set_ciphers(self, ciphers: bytes | str) -> None: + if isinstance(ciphers, str): + ciphers = ciphers.encode("utf-8") + self._ctx.set_cipher_list(ciphers) + + def load_verify_locations( + self, + cafile: str | None = None, + capath: str | None = None, + cadata: bytes | None = None, + ) -> None: + if cafile is not None: + cafile = cafile.encode("utf-8") # type: ignore[assignment] + if capath is not None: + capath = capath.encode("utf-8") # type: ignore[assignment] + try: + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError(f"unable to load trusted certificates: {e!r}") from e + + def load_cert_chain( + self, + certfile: str, + keyfile: str | None = None, + password: str | None = None, + ) -> None: + try: + self._ctx.use_certificate_chain_file(certfile) + if password is not None: + if not isinstance(password, bytes): + password = password.encode("utf-8") # type: ignore[assignment] + self._ctx.set_passwd_cb(lambda *_: password) + self._ctx.use_privatekey_file(keyfile or certfile) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError(f"Unable to load certificate chain: {e!r}") from e + + def set_alpn_protocols(self, protocols: list[bytes | str]) -> None: + protocols = [util.util.to_bytes(p, "ascii") for p in protocols] + return self._ctx.set_alpn_protos(protocols) # type: ignore[no-any-return] + + def wrap_socket( + self, + sock: socket_cls, + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: bytes | str | None = None, + ) -> WrappedSocket: + cnx = OpenSSL.SSL.Connection(self._ctx, sock) + + # If server_hostname is an IP, don't use it for SNI, per RFC6066 Section 3 + if server_hostname and not util.ssl_.is_ipaddress(server_hostname): + if isinstance(server_hostname, str): + server_hostname = server_hostname.encode("utf-8") + cnx.set_tlsext_host_name(server_hostname) + + cnx.set_connect_state() + + while True: + try: + cnx.do_handshake() + except OpenSSL.SSL.WantReadError as e: + if not util.wait_for_read(sock, sock.gettimeout()): + raise timeout("select timed out") from e + continue + except OpenSSL.SSL.Error as e: + raise ssl.SSLError(f"bad handshake: {e!r}") from e + break + + return WrappedSocket(cnx, sock) + + def _set_ctx_options(self) -> None: + self._ctx.set_options( + self._options + | _openssl_to_ssl_minimum_version[self._minimum_version] + | _openssl_to_ssl_maximum_version[self._maximum_version] + ) + + @property + def minimum_version(self) -> int: + return self._minimum_version + + @minimum_version.setter + def minimum_version(self, minimum_version: int) -> None: + self._minimum_version = minimum_version + self._set_ctx_options() + + @property + def maximum_version(self) -> int: + return self._maximum_version + + @maximum_version.setter + def maximum_version(self, maximum_version: int) -> None: + self._maximum_version = maximum_version + self._set_ctx_options() + + +def _verify_callback( + cnx: OpenSSL.SSL.Connection, + x509: X509, + err_no: int, + err_depth: int, + return_code: int, +) -> bool: + return err_no == 0 diff --git a/templates/skills/spotify/dependencies/urllib3/contrib/socks.py b/templates/skills/spotify/dependencies/urllib3/contrib/socks.py new file mode 100644 index 00000000..5a803916 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/contrib/socks.py @@ -0,0 +1,230 @@ +""" +This module contains provisional support for SOCKS proxies from within +urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and +SOCKS5. To enable its functionality, either install PySocks or install this +module with the ``socks`` extra. + +The SOCKS implementation supports the full range of urllib3 features. It also +supports the following SOCKS features: + +- SOCKS4A (``proxy_url='socks4a://...``) +- SOCKS4 (``proxy_url='socks4://...``) +- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) +- SOCKS5 with local DNS (``proxy_url='socks5://...``) +- Usernames and passwords for the SOCKS proxy + +.. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. + +SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 +supports IPv4, IPv6, and domain names. + +When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` +will be sent as the ``userid`` section of the SOCKS request: + +.. code-block:: python + + proxy_url="socks4a://@proxy-host" + +When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion +of the ``proxy_url`` will be sent as the username/password to authenticate +with the proxy: + +.. code-block:: python + + proxy_url="socks5h://:@proxy-host" + +""" + +from __future__ import annotations + +try: + import socks # type: ignore[import-not-found] +except ImportError: + import warnings + + from ..exceptions import DependencyWarning + + warnings.warn( + ( + "SOCKS support in urllib3 requires the installation of optional " + "dependencies: specifically, PySocks. For more information, see " + "https://urllib3.readthedocs.io/en/latest/advanced-usage.html#socks-proxies" + ), + DependencyWarning, + ) + raise + +import typing +from socket import timeout as SocketTimeout + +from ..connection import HTTPConnection, HTTPSConnection +from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from ..exceptions import ConnectTimeoutError, NewConnectionError +from ..poolmanager import PoolManager +from ..util.url import parse_url + +try: + import ssl +except ImportError: + ssl = None # type: ignore[assignment] + +from typing import TypedDict + + +class _TYPE_SOCKS_OPTIONS(TypedDict): + socks_version: int + proxy_host: str | None + proxy_port: str | None + username: str | None + password: str | None + rdns: bool + + +class SOCKSConnection(HTTPConnection): + """ + A plain-text HTTP connection that connects via a SOCKS proxy. + """ + + def __init__( + self, + _socks_options: _TYPE_SOCKS_OPTIONS, + *args: typing.Any, + **kwargs: typing.Any, + ) -> None: + self._socks_options = _socks_options + super().__init__(*args, **kwargs) + + def _new_conn(self) -> socks.socksocket: + """ + Establish a new connection via the SOCKS proxy. + """ + extra_kw: dict[str, typing.Any] = {} + if self.source_address: + extra_kw["source_address"] = self.source_address + + if self.socket_options: + extra_kw["socket_options"] = self.socket_options + + try: + conn = socks.create_connection( + (self.host, self.port), + proxy_type=self._socks_options["socks_version"], + proxy_addr=self._socks_options["proxy_host"], + proxy_port=self._socks_options["proxy_port"], + proxy_username=self._socks_options["username"], + proxy_password=self._socks_options["password"], + proxy_rdns=self._socks_options["rdns"], + timeout=self.timeout, + **extra_kw, + ) + + except SocketTimeout as e: + raise ConnectTimeoutError( + self, + f"Connection to {self.host} timed out. (connect timeout={self.timeout})", + ) from e + + except socks.ProxyError as e: + # This is fragile as hell, but it seems to be the only way to raise + # useful errors here. + if e.socket_err: + error = e.socket_err + if isinstance(error, SocketTimeout): + raise ConnectTimeoutError( + self, + f"Connection to {self.host} timed out. (connect timeout={self.timeout})", + ) from e + else: + # Adding `from e` messes with coverage somehow, so it's omitted. + # See #2386. + raise NewConnectionError( + self, f"Failed to establish a new connection: {error}" + ) + else: + raise NewConnectionError( + self, f"Failed to establish a new connection: {e}" + ) from e + + except OSError as e: # Defensive: PySocks should catch all these. + raise NewConnectionError( + self, f"Failed to establish a new connection: {e}" + ) from e + + return conn + + +# We don't need to duplicate the Verified/Unverified distinction from +# urllib3/connection.py here because the HTTPSConnection will already have been +# correctly set to either the Verified or Unverified form by that module. This +# means the SOCKSHTTPSConnection will automatically be the correct type. +class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): + pass + + +class SOCKSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls = SOCKSConnection + + +class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls = SOCKSHTTPSConnection + + +class SOCKSProxyManager(PoolManager): + """ + A version of the urllib3 ProxyManager that routes connections via the + defined SOCKS proxy. + """ + + pool_classes_by_scheme = { + "http": SOCKSHTTPConnectionPool, + "https": SOCKSHTTPSConnectionPool, + } + + def __init__( + self, + proxy_url: str, + username: str | None = None, + password: str | None = None, + num_pools: int = 10, + headers: typing.Mapping[str, str] | None = None, + **connection_pool_kw: typing.Any, + ): + parsed = parse_url(proxy_url) + + if username is None and password is None and parsed.auth is not None: + split = parsed.auth.split(":") + if len(split) == 2: + username, password = split + if parsed.scheme == "socks5": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = False + elif parsed.scheme == "socks5h": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = True + elif parsed.scheme == "socks4": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = False + elif parsed.scheme == "socks4a": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = True + else: + raise ValueError(f"Unable to determine SOCKS version from {proxy_url}") + + self.proxy_url = proxy_url + + socks_options = { + "socks_version": socks_version, + "proxy_host": parsed.host, + "proxy_port": parsed.port, + "username": username, + "password": password, + "rdns": rdns, + } + connection_pool_kw["_socks_options"] = socks_options + + super().__init__(num_pools, headers, **connection_pool_kw) + + self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/templates/skills/spotify/dependencies/urllib3/exceptions.py b/templates/skills/spotify/dependencies/urllib3/exceptions.py new file mode 100644 index 00000000..b0792f00 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/exceptions.py @@ -0,0 +1,321 @@ +from __future__ import annotations + +import socket +import typing +import warnings +from email.errors import MessageDefect +from http.client import IncompleteRead as httplib_IncompleteRead + +if typing.TYPE_CHECKING: + from .connection import HTTPConnection + from .connectionpool import ConnectionPool + from .response import HTTPResponse + from .util.retry import Retry + +# Base Exceptions + + +class HTTPError(Exception): + """Base exception used by this module.""" + + +class HTTPWarning(Warning): + """Base warning used by this module.""" + + +_TYPE_REDUCE_RESULT = typing.Tuple[ + typing.Callable[..., object], typing.Tuple[object, ...] +] + + +class PoolError(HTTPError): + """Base exception for errors caused within a pool.""" + + def __init__(self, pool: ConnectionPool, message: str) -> None: + self.pool = pool + super().__init__(f"{pool}: {message}") + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + """Base exception for PoolErrors that have associated URLs.""" + + def __init__(self, pool: ConnectionPool, url: str, message: str) -> None: + self.url = url + super().__init__(pool, message) + + def __reduce__(self) -> _TYPE_REDUCE_RESULT: + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + """Raised when SSL certificate fails in an HTTPS connection.""" + + +class ProxyError(HTTPError): + """Raised when the connection to a proxy fails.""" + + # The original error is also available as __cause__. + original_error: Exception + + def __init__(self, message: str, error: Exception) -> None: + super().__init__(message, error) + self.original_error = error + + +class DecodeError(HTTPError): + """Raised when automatic decoding based on Content-Type fails.""" + + +class ProtocolError(HTTPError): + """Raised when something unexpected happens mid-request/response.""" + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param str url: The requested Url + :param reason: The underlying error + :type reason: :class:`Exception` + + """ + + def __init__( + self, pool: ConnectionPool, url: str, reason: Exception | None = None + ) -> None: + self.reason = reason + + message = f"Max retries exceeded with url: {url} (Caused by {reason!r})" + + super().__init__(pool, url, message) + + +class HostChangedError(RequestError): + """Raised when an existing pool gets a request for a foreign host.""" + + def __init__( + self, pool: ConnectionPool, url: str, retries: Retry | int = 3 + ) -> None: + message = f"Tried to open a foreign host with url: {url}" + super().__init__(pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """Raised when passing an invalid state to a timeout""" + + +class TimeoutError(HTTPError): + """Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + ` and :exc:`ConnectTimeoutErrors `. + """ + + +class ReadTimeoutError(TimeoutError, RequestError): + """Raised when a socket timeout occurs while receiving data from a server""" + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + """Raised when a socket timeout occurs while connecting to a server""" + + +class NewConnectionError(ConnectTimeoutError, HTTPError): + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + + def __init__(self, conn: HTTPConnection, message: str) -> None: + self.conn = conn + super().__init__(f"{conn}: {message}") + + @property + def pool(self) -> HTTPConnection: + warnings.warn( + "The 'pool' property is deprecated and will be removed " + "in urllib3 v2.1.0. Use 'conn' instead.", + DeprecationWarning, + stacklevel=2, + ) + + return self.conn + + +class NameResolutionError(NewConnectionError): + """Raised when host name resolution fails.""" + + def __init__(self, host: str, conn: HTTPConnection, reason: socket.gaierror): + message = f"Failed to resolve '{host}' ({reason})" + super().__init__(conn, message) + + +class EmptyPoolError(PoolError): + """Raised when a pool runs out of connections and no more are allowed.""" + + +class FullPoolError(PoolError): + """Raised when we try to add a connection to a full pool in blocking mode.""" + + +class ClosedPoolError(PoolError): + """Raised when a request enters a pool after the pool has been closed.""" + + +class LocationValueError(ValueError, HTTPError): + """Raised when there is something wrong with a given URL input.""" + + +class LocationParseError(LocationValueError): + """Raised when get_host or similar fails to parse the URL input.""" + + def __init__(self, location: str) -> None: + message = f"Failed to parse: {location}" + super().__init__(message) + + self.location = location + + +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme: str): + message = f"Not supported URL scheme {scheme}" + super().__init__(message) + + self.scheme = scheme + + +class ResponseError(HTTPError): + """Used as a container for an error reason supplied in a MaxRetryError.""" + + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" + + +class SecurityWarning(HTTPWarning): + """Warned when performing security reducing actions""" + + +class InsecureRequestWarning(SecurityWarning): + """Warned when making an unverified HTTPS request.""" + + +class NotOpenSSLWarning(SecurityWarning): + """Warned when using unsupported SSL library""" + + +class SystemTimeWarning(SecurityWarning): + """Warned when system time is suspected to be wrong""" + + +class InsecurePlatformWarning(SecurityWarning): + """Warned when certain TLS/SSL configuration is not available on a platform.""" + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + + +class ResponseNotChunked(ProtocolError, ValueError): + """Response needs to be chunked in order to read it as chunks.""" + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). + """ + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. + """ + + partial: int # type: ignore[assignment] + expected: int + + def __init__(self, partial: int, expected: int) -> None: + self.partial = partial + self.expected = expected + + def __repr__(self) -> str: + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) + + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response: HTTPResponse, length: bytes) -> None: + self.partial: int = response.tell() # type: ignore[assignment] + self.expected: int | None = response.length_remaining + self.response = response + self.length = length + + def __repr__(self) -> str: + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + +class InvalidHeader(HTTPError): + """The header provided was somehow invalid.""" + + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme: str | None) -> None: + # 'localhost' is here because our URL parser parses + # localhost:8080 -> scheme=localhost, remove if we fix this. + if scheme == "localhost": + scheme = None + if scheme is None: + message = "Proxy URL had no scheme, should start with http:// or https://" + else: + message = f"Proxy URL had unsupported scheme {scheme}, should use http:// or https://" + super().__init__(message) + + +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + +class HeaderParsingError(HTTPError): + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" + + def __init__( + self, defects: list[MessageDefect], unparsed_data: bytes | str | None + ) -> None: + message = f"{defects or 'Unknown'}, unparsed data: {unparsed_data!r}" + super().__init__(message) + + +class UnrewindableBodyError(HTTPError): + """urllib3 encountered an error when trying to rewind a body""" diff --git a/templates/skills/spotify/dependencies/urllib3/fields.py b/templates/skills/spotify/dependencies/urllib3/fields.py new file mode 100644 index 00000000..3e258a5d --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/fields.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import email.utils +import mimetypes +import typing + +_TYPE_FIELD_VALUE = typing.Union[str, bytes] +_TYPE_FIELD_VALUE_TUPLE = typing.Union[ + _TYPE_FIELD_VALUE, + typing.Tuple[str, _TYPE_FIELD_VALUE], + typing.Tuple[str, _TYPE_FIELD_VALUE, str], +] + + +def guess_content_type( + filename: str | None, default: str = "application/octet-stream" +) -> str: + """ + Guess the "Content-Type" of a file. + + :param filename: + The filename to guess the "Content-Type" of using :mod:`mimetypes`. + :param default: + If no "Content-Type" can be guessed, default to `default`. + """ + if filename: + return mimetypes.guess_type(filename)[0] or default + return default + + +def format_header_param_rfc2231(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :returns: + An RFC-2231-formatted unicode string. + + .. deprecated:: 2.0.0 + Will be removed in urllib3 v2.1.0. This is not valid for + ``multipart/form-data`` header parameters. + """ + import warnings + + warnings.warn( + "'format_header_param_rfc2231' is deprecated and will be " + "removed in urllib3 v2.1.0. This is not valid for " + "multipart/form-data header parameters.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(value, bytes): + value = value.decode("utf-8") + + if not any(ch in value for ch in '"\\\r\n'): + result = f'{name}="{value}"' + try: + result.encode("ascii") + except (UnicodeEncodeError, UnicodeDecodeError): + pass + else: + return result + + value = email.utils.encode_rfc2231(value, "utf-8") + value = f"{name}*={value}" + + return value + + +def format_multipart_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Format and quote a single multipart header parameter. + + This follows the `WHATWG HTML Standard`_ as of 2021/06/10, matching + the behavior of current browser and curl versions. Values are + assumed to be UTF-8. The ``\\n``, ``\\r``, and ``"`` characters are + percent encoded. + + .. _WHATWG HTML Standard: + https://html.spec.whatwg.org/multipage/ + form-control-infrastructure.html#multipart-form-data + + :param name: + The name of the parameter, an ASCII-only ``str``. + :param value: + The value of the parameter, a ``str`` or UTF-8 encoded + ``bytes``. + :returns: + A string ``name="value"`` with the escaped value. + + .. versionchanged:: 2.0.0 + Matches the WHATWG HTML Standard as of 2021/06/10. Control + characters are no longer percent encoded. + + .. versionchanged:: 2.0.0 + Renamed from ``format_header_param_html5`` and + ``format_header_param``. The old names will be removed in + urllib3 v2.1.0. + """ + if isinstance(value, bytes): + value = value.decode("utf-8") + + # percent encode \n \r " + value = value.translate({10: "%0A", 13: "%0D", 34: "%22"}) + return f'{name}="{value}"' + + +def format_header_param_html5(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + .. deprecated:: 2.0.0 + Renamed to :func:`format_multipart_header_param`. Will be + removed in urllib3 v2.1.0. + """ + import warnings + + warnings.warn( + "'format_header_param_html5' has been renamed to " + "'format_multipart_header_param'. The old name will be " + "removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + return format_multipart_header_param(name, value) + + +def format_header_param(name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + .. deprecated:: 2.0.0 + Renamed to :func:`format_multipart_header_param`. Will be + removed in urllib3 v2.1.0. + """ + import warnings + + warnings.warn( + "'format_header_param' has been renamed to " + "'format_multipart_header_param'. The old name will be " + "removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + return format_multipart_header_param(name, value) + + +class RequestField: + """ + A data container for request body parameters. + + :param name: + The name of this request field. Must be unicode. + :param data: + The data/value body. + :param filename: + An optional filename of the request field. Must be unicode. + :param headers: + An optional dict-like object of headers to initially use for the field. + + .. versionchanged:: 2.0.0 + The ``header_formatter`` parameter is deprecated and will + be removed in urllib3 v2.1.0. + """ + + def __init__( + self, + name: str, + data: _TYPE_FIELD_VALUE, + filename: str | None = None, + headers: typing.Mapping[str, str] | None = None, + header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, + ): + self._name = name + self._filename = filename + self.data = data + self.headers: dict[str, str | None] = {} + if headers: + self.headers = dict(headers) + + if header_formatter is not None: + import warnings + + warnings.warn( + "The 'header_formatter' parameter is deprecated and " + "will be removed in urllib3 v2.1.0.", + DeprecationWarning, + stacklevel=2, + ) + self.header_formatter = header_formatter + else: + self.header_formatter = format_multipart_header_param + + @classmethod + def from_tuples( + cls, + fieldname: str, + value: _TYPE_FIELD_VALUE_TUPLE, + header_formatter: typing.Callable[[str, _TYPE_FIELD_VALUE], str] | None = None, + ) -> RequestField: + """ + A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. + + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: + + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + + Field names and filenames must be unicode. + """ + filename: str | None + content_type: str | None + data: _TYPE_FIELD_VALUE + + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = guess_content_type(filename) + else: + filename = None + content_type = None + data = value + + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter + ) + request_param.make_multipart(content_type=content_type) + + return request_param + + def _render_part(self, name: str, value: _TYPE_FIELD_VALUE) -> str: + """ + Override this method to change how each multipart header + parameter is formatted. By default, this calls + :func:`format_multipart_header_param`. + + :param name: + The name of the parameter, an ASCII-only ``str``. + :param value: + The value of the parameter, a ``str`` or UTF-8 encoded + ``bytes``. + + :meta public: + """ + return self.header_formatter(name, value) + + def _render_parts( + self, + header_parts: ( + dict[str, _TYPE_FIELD_VALUE | None] + | typing.Sequence[tuple[str, _TYPE_FIELD_VALUE | None]] + ), + ) -> str: + """ + Helper function to format and quote a single header. + + Useful for single headers that are composed of multiple items. E.g., + 'Content-Disposition' fields. + + :param header_parts: + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. + """ + iterable: typing.Iterable[tuple[str, _TYPE_FIELD_VALUE | None]] + + parts = [] + if isinstance(header_parts, dict): + iterable = header_parts.items() + else: + iterable = header_parts + + for name, value in iterable: + if value is not None: + parts.append(self._render_part(name, value)) + + return "; ".join(parts) + + def render_headers(self) -> str: + """ + Renders the headers for this request field. + """ + lines = [] + + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] + for sort_key in sort_keys: + if self.headers.get(sort_key, False): + lines.append(f"{sort_key}: {self.headers[sort_key]}") + + for header_name, header_value in self.headers.items(): + if header_name not in sort_keys: + if header_value: + lines.append(f"{header_name}: {header_value}") + + lines.append("\r\n") + return "\r\n".join(lines) + + def make_multipart( + self, + content_disposition: str | None = None, + content_type: str | None = None, + content_location: str | None = None, + ) -> None: + """ + Makes this request field into a multipart request field. + + This method overrides "Content-Disposition", "Content-Type" and + "Content-Location" headers to the request parameter. + + :param content_disposition: + The 'Content-Disposition' of the request body. Defaults to 'form-data' + :param content_type: + The 'Content-Type' of the request body. + :param content_location: + The 'Content-Location' of the request body. + + """ + content_disposition = (content_disposition or "form-data") + "; ".join( + [ + "", + self._render_parts( + (("name", self._name), ("filename", self._filename)) + ), + ] + ) + + self.headers["Content-Disposition"] = content_disposition + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/templates/skills/spotify/dependencies/urllib3/filepost.py b/templates/skills/spotify/dependencies/urllib3/filepost.py new file mode 100644 index 00000000..1c90a211 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/filepost.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import binascii +import codecs +import os +import typing +from io import BytesIO + +from .fields import _TYPE_FIELD_VALUE_TUPLE, RequestField + +writer = codecs.lookup("utf-8")[3] + +_TYPE_FIELDS_SEQUENCE = typing.Sequence[ + typing.Union[typing.Tuple[str, _TYPE_FIELD_VALUE_TUPLE], RequestField] +] +_TYPE_FIELDS = typing.Union[ + _TYPE_FIELDS_SEQUENCE, + typing.Mapping[str, _TYPE_FIELD_VALUE_TUPLE], +] + + +def choose_boundary() -> str: + """ + Our embarrassingly-simple replacement for mimetools.choose_boundary. + """ + return binascii.hexlify(os.urandom(16)).decode() + + +def iter_field_objects(fields: _TYPE_FIELDS) -> typing.Iterable[RequestField]: + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts, and lists of + :class:`~urllib3.fields.RequestField`. + + """ + iterable: typing.Iterable[RequestField | tuple[str, _TYPE_FIELD_VALUE_TUPLE]] + + if isinstance(fields, typing.Mapping): + iterable = fields.items() + else: + iterable = fields + + for field in iterable: + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) + + +def encode_multipart_formdata( + fields: _TYPE_FIELDS, boundary: str | None = None +) -> tuple[bytes, str]: + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). + Values are processed by :func:`urllib3.fields.RequestField.from_tuples`. + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for field in iter_field_objects(fields): + body.write(f"--{boundary}\r\n".encode("latin-1")) + + writer(body).write(field.render_headers()) + data = field.data + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, str): + writer(body).write(data) + else: + body.write(data) + + body.write(b"\r\n") + + body.write(f"--{boundary}--\r\n".encode("latin-1")) + + content_type = f"multipart/form-data; boundary={boundary}" + + return body.getvalue(), content_type diff --git a/templates/skills/spotify/dependencies/urllib3/http2.py b/templates/skills/spotify/dependencies/urllib3/http2.py new file mode 100644 index 00000000..15fa9d91 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/http2.py @@ -0,0 +1,229 @@ +from __future__ import annotations + +import threading +import types +import typing + +import h2.config # type: ignore[import-untyped] +import h2.connection # type: ignore[import-untyped] +import h2.events # type: ignore[import-untyped] + +import urllib3.connection +import urllib3.util.ssl_ +from urllib3.response import BaseHTTPResponse + +from ._collections import HTTPHeaderDict +from .connection import HTTPSConnection +from .connectionpool import HTTPSConnectionPool + +orig_HTTPSConnection = HTTPSConnection + +T = typing.TypeVar("T") + + +class _LockedObject(typing.Generic[T]): + """ + A wrapper class that hides a specific object behind a lock. + + The goal here is to provide a simple way to protect access to an object + that cannot safely be simultaneously accessed from multiple threads. The + intended use of this class is simple: take hold of it with a context + manager, which returns the protected object. + """ + + def __init__(self, obj: T): + self.lock = threading.RLock() + self._obj = obj + + def __enter__(self) -> T: + self.lock.acquire() + return self._obj + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + self.lock.release() + + +class HTTP2Connection(HTTPSConnection): + def __init__( + self, host: str, port: int | None = None, **kwargs: typing.Any + ) -> None: + self._h2_conn = self._new_h2_conn() + self._h2_stream: int | None = None + self._h2_headers: list[tuple[bytes, bytes]] = [] + + if "proxy" in kwargs or "proxy_config" in kwargs: # Defensive: + raise NotImplementedError("Proxies aren't supported with HTTP/2") + + super().__init__(host, port, **kwargs) + + def _new_h2_conn(self) -> _LockedObject[h2.connection.H2Connection]: + config = h2.config.H2Configuration(client_side=True) + return _LockedObject(h2.connection.H2Connection(config=config)) + + def connect(self) -> None: + super().connect() + + with self._h2_conn as h2_conn: + h2_conn.initiate_connection() + self.sock.sendall(h2_conn.data_to_send()) + + def putrequest( + self, + method: str, + url: str, + skip_host: bool = False, + skip_accept_encoding: bool = False, + ) -> None: + with self._h2_conn as h2_conn: + self._request_url = url + self._h2_stream = h2_conn.get_next_available_stream_id() + + if ":" in self.host: + authority = f"[{self.host}]:{self.port or 443}" + else: + authority = f"{self.host}:{self.port or 443}" + + self._h2_headers.extend( + ( + (b":scheme", b"https"), + (b":method", method.encode()), + (b":authority", authority.encode()), + (b":path", url.encode()), + ) + ) + + def putheader(self, header: str, *values: str) -> None: # type: ignore[override] + for value in values: + self._h2_headers.append( + (header.encode("utf-8").lower(), value.encode("utf-8")) + ) + + def endheaders(self) -> None: # type: ignore[override] + with self._h2_conn as h2_conn: + h2_conn.send_headers( + stream_id=self._h2_stream, + headers=self._h2_headers, + end_stream=True, + ) + if data_to_send := h2_conn.data_to_send(): + self.sock.sendall(data_to_send) + + def send(self, data: bytes) -> None: # type: ignore[override] # Defensive: + if not data: + return + raise NotImplementedError("Sending data isn't supported yet") + + def getresponse( # type: ignore[override] + self, + ) -> HTTP2Response: + status = None + data = bytearray() + with self._h2_conn as h2_conn: + end_stream = False + while not end_stream: + # TODO: Arbitrary read value. + if received_data := self.sock.recv(65535): + events = h2_conn.receive_data(received_data) + for event in events: + if isinstance(event, h2.events.ResponseReceived): + headers = HTTPHeaderDict() + for header, value in event.headers: + if header == b":status": + status = int(value.decode()) + else: + headers.add( + header.decode("ascii"), value.decode("ascii") + ) + + elif isinstance(event, h2.events.DataReceived): + data += event.data + h2_conn.acknowledge_received_data( + event.flow_controlled_length, event.stream_id + ) + + elif isinstance(event, h2.events.StreamEnded): + end_stream = True + + if data_to_send := h2_conn.data_to_send(): + self.sock.sendall(data_to_send) + + # We always close to not have to handle connection management. + self.close() + + assert status is not None + return HTTP2Response( + status=status, + headers=headers, + request_url=self._request_url, + data=bytes(data), + ) + + def close(self) -> None: + with self._h2_conn as h2_conn: + try: + h2_conn.close_connection() + if data := h2_conn.data_to_send(): + self.sock.sendall(data) + except Exception: + pass + + # Reset all our HTTP/2 connection state. + self._h2_conn = self._new_h2_conn() + self._h2_stream = None + self._h2_headers = [] + + super().close() + + +class HTTP2Response(BaseHTTPResponse): + # TODO: This is a woefully incomplete response object, but works for non-streaming. + def __init__( + self, + status: int, + headers: HTTPHeaderDict, + request_url: str, + data: bytes, + decode_content: bool = False, # TODO: support decoding + ) -> None: + super().__init__( + status=status, + headers=headers, + # Following CPython, we map HTTP versions to major * 10 + minor integers + version=20, + # No reason phrase in HTTP/2 + reason=None, + decode_content=decode_content, + request_url=request_url, + ) + self._data = data + self.length_remaining = 0 + + @property + def data(self) -> bytes: + return self._data + + def get_redirect_location(self) -> None: + return None + + def close(self) -> None: + pass + + +def inject_into_urllib3() -> None: + HTTPSConnectionPool.ConnectionCls = HTTP2Connection + urllib3.connection.HTTPSConnection = HTTP2Connection # type: ignore[misc] + + # TODO: Offer 'http/1.1' as well, but for testing purposes this is handy. + urllib3.util.ssl_.ALPN_PROTOCOLS = ["h2"] + + +def extract_from_urllib3() -> None: + HTTPSConnectionPool.ConnectionCls = orig_HTTPSConnection + urllib3.connection.HTTPSConnection = orig_HTTPSConnection # type: ignore[misc] + + urllib3.util.ssl_.ALPN_PROTOCOLS = ["http/1.1"] diff --git a/templates/skills/spotify/dependencies/urllib3/poolmanager.py b/templates/skills/spotify/dependencies/urllib3/poolmanager.py new file mode 100644 index 00000000..32da0a00 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/poolmanager.py @@ -0,0 +1,638 @@ +from __future__ import annotations + +import functools +import logging +import typing +import warnings +from types import TracebackType +from urllib.parse import urljoin + +from ._collections import HTTPHeaderDict, RecentlyUsedContainer +from ._request_methods import RequestMethods +from .connection import ProxyConfig +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + URLSchemeUnknown, +) +from .response import BaseHTTPResponse +from .util.connection import _TYPE_SOCKET_OPTIONS +from .util.proxy import connection_requires_http_tunnel +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import Url, parse_url + +if typing.TYPE_CHECKING: + import ssl + from typing import Literal + +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ca_cert_data", + "ssl_version", + "ssl_minimum_version", + "ssl_maximum_version", + "ca_cert_dir", + "ssl_context", + "key_password", + "server_hostname", +) +# Default value for `blocksize` - a new parameter introduced to +# http.client.HTTPConnection & http.client.HTTPSConnection in Python 3.7 +_DEFAULT_BLOCKSIZE = 16384 + +_SelfT = typing.TypeVar("_SelfT") + + +class PoolKey(typing.NamedTuple): + """ + All known keyword arguments that could be provided to the pool manager, its + pools, or the underlying connections. + + All custom key schemes should include the fields in this key at a minimum. + """ + + key_scheme: str + key_host: str + key_port: int | None + key_timeout: Timeout | float | int | None + key_retries: Retry | bool | int | None + key_block: bool | None + key_source_address: tuple[str, int] | None + key_key_file: str | None + key_key_password: str | None + key_cert_file: str | None + key_cert_reqs: str | None + key_ca_certs: str | None + key_ca_cert_data: str | bytes | None + key_ssl_version: int | str | None + key_ssl_minimum_version: ssl.TLSVersion | None + key_ssl_maximum_version: ssl.TLSVersion | None + key_ca_cert_dir: str | None + key_ssl_context: ssl.SSLContext | None + key_maxsize: int | None + key_headers: frozenset[tuple[str, str]] | None + key__proxy: Url | None + key__proxy_headers: frozenset[tuple[str, str]] | None + key__proxy_config: ProxyConfig | None + key_socket_options: _TYPE_SOCKET_OPTIONS | None + key__socks_options: frozenset[tuple[str, str]] | None + key_assert_hostname: bool | str | None + key_assert_fingerprint: str | None + key_server_hostname: str | None + key_blocksize: int | None + + +def _default_key_normalizer( + key_class: type[PoolKey], request_context: dict[str, typing.Any] +) -> PoolKey: + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ("headers", "_proxy_headers", "_socks_options"): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get("socket_options") + if socket_opts is not None: + context["socket_options"] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context["key_" + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + # Default key_blocksize to _DEFAULT_BLOCKSIZE if missing from the context + if context.get("key_blocksize") is None: + context["key_blocksize"] = _DEFAULT_BLOCKSIZE + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example: + + .. code-block:: python + + import urllib3 + + http = urllib3.PoolManager(num_pools=2) + + resp1 = http.request("GET", "https://google.com/") + resp2 = http.request("GET", "https://google.com/mail") + resp3 = http.request("GET", "https://yahoo.com/") + + print(len(http.pools)) + # 2 + + """ + + proxy: Url | None = None + proxy_config: ProxyConfig | None = None + + def __init__( + self, + num_pools: int = 10, + headers: typing.Mapping[str, str] | None = None, + **connection_pool_kw: typing.Any, + ) -> None: + super().__init__(headers) + self.connection_pool_kw = connection_pool_kw + + self.pools: RecentlyUsedContainer[PoolKey, HTTPConnectionPool] + self.pools = RecentlyUsedContainer(num_pools) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self: _SelfT) -> _SelfT: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> Literal[False]: + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool( + self, + scheme: str, + host: str, + port: int, + request_context: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + """ + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls: type[HTTPConnectionPool] = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Default blocksize to _DEFAULT_BLOCKSIZE if missing or explicitly + # set to 'None' in the request_context. + if request_context.get("blocksize") is None: + request_context["blocksize"] = _DEFAULT_BLOCKSIZE + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ("scheme", "host", "port"): + request_context.pop(key, None) + + if scheme == "http": + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self) -> None: + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host( + self, + host: str | None, + port: int | None = None, + scheme: str | None = "http", + pool_kwargs: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context["scheme"] = scheme or "http" + if not port: + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host + + return self.connection_from_context(request_context) + + def connection_from_context( + self, request_context: dict[str, typing.Any] + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + if "strict" in request_context: + warnings.warn( + "The 'strict' parameter is no longer needed on Python 3+. " + "This will raise an error in urllib3 v2.1.0.", + DeprecationWarning, + ) + request_context.pop("strict") + + scheme = request_context["scheme"].lower() + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key( + self, pool_key: PoolKey, request_context: dict[str, typing.Any] + ) -> HTTPConnectionPool: + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url( + self, url: str, pool_kwargs: dict[str, typing.Any] | None = None + ) -> HTTPConnectionPool: + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) + + def _merge_pool_kwargs( + self, override: dict[str, typing.Any] | None + ) -> dict[str, typing.Any]: + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def _proxy_requires_url_absolute_form(self, parsed_url: Url) -> bool: + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def urlopen( # type: ignore[override] + self, method: str, url: str, redirect: bool = True, **kw: typing.Any + ) -> BaseHTTPResponse: + """ + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + + if u.scheme is None: + warnings.warn( + "URLs without a scheme (ie 'https://') are deprecated and will raise an error " + "in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs " + "start with 'https://' or 'http://'. Read more in this issue: " + "https://github.com/urllib3/urllib3/issues/2920", + category=DeprecationWarning, + stacklevel=2, + ) + + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw["assert_same_host"] = False + kw["redirect"] = False + + if "headers" not in kw: + kw["headers"] = self.headers + + if self._proxy_requires_url_absolute_form(u): + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + if response.status == 303: + # Change the method according to RFC 9110, Section 15.4.4. + method = "GET" + # And lose the body not to transfer anything sensitive. + kw["body"] = None + kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change() + + retries = kw.get("retries") + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + new_headers = kw["headers"].copy() + for header in kw["headers"]: + if header.lower() in retries.remove_headers_on_redirect: + new_headers.pop(header, None) + kw["headers"] = new_headers + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + kw["retries"] = retries + kw["redirect"] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + + :param proxy_assert_hostname: + The hostname of the certificate to verify against. + + :param proxy_assert_fingerprint: + The fingerprint of the certificate to verify against. + + Example: + + .. code-block:: python + + import urllib3 + + proxy = urllib3.ProxyManager("https://localhost:3128/") + + resp1 = proxy.request("GET", "https://google.com/") + resp2 = proxy.request("GET", "https://httpbin.org/") + + print(len(proxy.pools)) + # 1 + + resp3 = proxy.request("GET", "https://httpbin.org/") + resp4 = proxy.request("GET", "https://twitter.com/") + + print(len(proxy.pools)) + # 3 + + """ + + def __init__( + self, + proxy_url: str, + num_pools: int = 10, + headers: typing.Mapping[str, str] | None = None, + proxy_headers: typing.Mapping[str, str] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + use_forwarding_for_https: bool = False, + proxy_assert_hostname: None | str | Literal[False] = None, + proxy_assert_fingerprint: str | None = None, + **connection_pool_kw: typing.Any, + ) -> None: + if isinstance(proxy_url, HTTPConnectionPool): + str_proxy_url = f"{proxy_url.scheme}://{proxy_url.host}:{proxy_url.port}" + else: + str_proxy_url = proxy_url + proxy = parse_url(str_proxy_url) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig( + proxy_ssl_context, + use_forwarding_for_https, + proxy_assert_hostname, + proxy_assert_fingerprint, + ) + + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config + + super().__init__(num_pools, headers, **connection_pool_kw) + + def connection_from_host( + self, + host: str | None, + port: int | None = None, + scheme: str | None = "http", + pool_kwargs: dict[str, typing.Any] | None = None, + ) -> HTTPConnectionPool: + if scheme == "https": + return super().connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs + ) + + return super().connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs # type: ignore[union-attr] + ) + + def _set_proxy_headers( + self, url: str, headers: typing.Mapping[str, str] | None = None + ) -> typing.Mapping[str, str]: + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {"Accept": "*/*"} + + netloc = parse_url(url).netloc + if netloc: + headers_["Host"] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen( # type: ignore[override] + self, method: str, url: str, redirect: bool = True, **kw: typing.Any + ) -> BaseHTTPResponse: + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) + + return super().urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url: str, **kw: typing.Any) -> ProxyManager: + return ProxyManager(proxy_url=url, **kw) diff --git a/templates/skills/spotify/dependencies/urllib3/py.typed b/templates/skills/spotify/dependencies/urllib3/py.typed new file mode 100644 index 00000000..5f3ea3d9 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/py.typed @@ -0,0 +1,2 @@ +# Instruct type checkers to look for inline type annotations in this package. +# See PEP 561. diff --git a/templates/skills/spotify/dependencies/urllib3/response.py b/templates/skills/spotify/dependencies/urllib3/response.py new file mode 100644 index 00000000..d31fac9b --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/response.py @@ -0,0 +1,1243 @@ +from __future__ import annotations + +import collections +import io +import json as _json +import logging +import re +import sys +import typing +import warnings +import zlib +from contextlib import contextmanager +from http.client import HTTPMessage as _HttplibHTTPMessage +from http.client import HTTPResponse as _HttplibHTTPResponse +from socket import timeout as SocketTimeout + +if typing.TYPE_CHECKING: + from ._base_connection import BaseHTTPConnection + +try: + try: + import brotlicffi as brotli # type: ignore[import-not-found] + except ImportError: + import brotli # type: ignore[import-not-found] +except ImportError: + brotli = None + +try: + import zstandard as zstd # type: ignore[import-not-found] + + # The package 'zstandard' added the 'eof' property starting + # in v0.18.0 which we require to ensure a complete and + # valid zstd stream was fed into the ZstdDecoder. + # See: https://github.com/urllib3/urllib3/pull/2624 + _zstd_version = _zstd_version = tuple( + map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] + ) + if _zstd_version < (0, 18): # Defensive: + zstd = None + +except (AttributeError, ImportError, ValueError): # Defensive: + zstd = None + +from . import util +from ._base_connection import _TYPE_BODY +from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPConnection, HTTPException +from .exceptions import ( + BodyNotHttplibCompatible, + DecodeError, + HTTPError, + IncompleteRead, + InvalidChunkLength, + InvalidHeader, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, +) +from .util.response import is_fp_closed, is_response_to_head +from .util.retry import Retry + +if typing.TYPE_CHECKING: + from typing import Literal + + from .connectionpool import HTTPConnectionPool + +log = logging.getLogger(__name__) + + +class ContentDecoder: + def decompress(self, data: bytes) -> bytes: + raise NotImplementedError() + + def flush(self) -> bytes: + raise NotImplementedError() + + +class DeflateDecoder(ContentDecoder): + def __init__(self) -> None: + self._first_try = True + self._data = b"" + self._obj = zlib.decompressobj() + + def decompress(self, data: bytes) -> bytes: + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None # type: ignore[assignment] + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None # type: ignore[assignment] + + def flush(self) -> bytes: + return self._obj.flush() + + +class GzipDecoderState: + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(ContentDecoder): + def __init__(self) -> None: + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def decompress(self, data: bytes) -> bytes: + ret = bytearray() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return bytes(ret) + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return bytes(ret) + raise + data = self._obj.unused_data + if not data: + return bytes(ret) + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + def flush(self) -> bytes: + return self._obj.flush() + + +if brotli is not None: + + class BrotliDecoder(ContentDecoder): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + self._obj = brotli.Decompressor() + if hasattr(self._obj, "decompress"): + setattr(self, "decompress", self._obj.decompress) + else: + setattr(self, "decompress", self._obj.process) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return self._obj.flush() # type: ignore[no-any-return] + return b"" + + +if zstd is not None: + + class ZstdDecoder(ContentDecoder): + def __init__(self) -> None: + self._obj = zstd.ZstdDecompressor().decompressobj() + + def decompress(self, data: bytes) -> bytes: + if not data: + return b"" + data_parts = [self._obj.decompress(data)] + while self._obj.eof and self._obj.unused_data: + unused_data = self._obj.unused_data + self._obj = zstd.ZstdDecompressor().decompressobj() + data_parts.append(self._obj.decompress(unused_data)) + return b"".join(data_parts) + + def flush(self) -> bytes: + ret = self._obj.flush() # note: this is a no-op + if not self._obj.eof: + raise DecodeError("Zstandard data is incomplete") + return ret # type: ignore[no-any-return] + + +class MultiDecoder(ContentDecoder): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes: str) -> None: + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] + + def flush(self) -> bytes: + return self._decoders[0].flush() + + def decompress(self, data: bytes) -> bytes: + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + +def _get_decoder(mode: str) -> ContentDecoder: + if "," in mode: + return MultiDecoder(mode) + + # According to RFC 9110 section 8.4.1.3, recipients should + # consider x-gzip equivalent to gzip + if mode in ("gzip", "x-gzip"): + return GzipDecoder() + + if brotli is not None and mode == "br": + return BrotliDecoder() + + if zstd is not None and mode == "zstd": + return ZstdDecoder() + + return DeflateDecoder() + + +class BytesQueueBuffer: + """Memory-efficient bytes buffer + + To return decoded data in read() and still follow the BufferedIOBase API, we need a + buffer to always return the correct amount of bytes. + + This buffer should be filled using calls to put() + + Our maximum memory usage is determined by the sum of the size of: + + * self.buffer, which contains the full data + * the largest chunk that we will copy in get() + + The worst case scenario is a single chunk, in which case we'll make a full copy of + the data inside get(). + """ + + def __init__(self) -> None: + self.buffer: typing.Deque[bytes] = collections.deque() + self._size: int = 0 + + def __len__(self) -> int: + return self._size + + def put(self, data: bytes) -> None: + self.buffer.append(data) + self._size += len(data) + + def get(self, n: int) -> bytes: + if n == 0: + return b"" + elif not self.buffer: + raise RuntimeError("buffer is empty") + elif n < 0: + raise ValueError("n should be > 0") + + fetched = 0 + ret = io.BytesIO() + while fetched < n: + remaining = n - fetched + chunk = self.buffer.popleft() + chunk_length = len(chunk) + if remaining < chunk_length: + left_chunk, right_chunk = chunk[:remaining], chunk[remaining:] + ret.write(left_chunk) + self.buffer.appendleft(right_chunk) + self._size -= remaining + break + else: + ret.write(chunk) + self._size -= chunk_length + fetched += chunk_length + + if not self.buffer: + break + + return ret.getvalue() + + def get_all(self) -> bytes: + buffer = self.buffer + if not buffer: + assert self._size == 0 + return b"" + if len(buffer) == 1: + result = buffer.pop() + else: + ret = io.BytesIO() + ret.writelines(buffer.popleft() for _ in range(len(buffer))) + result = ret.getvalue() + self._size = 0 + return result + + +class BaseHTTPResponse(io.IOBase): + CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] + if zstd is not None: + CONTENT_DECODERS += ["zstd"] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + + if zstd is not None: + DECODER_ERROR_CLASSES += (zstd.ZstdError,) + + def __init__( + self, + *, + headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, + status: int, + version: int, + reason: str | None, + decode_content: bool, + request_url: str | None, + retries: Retry | None = None, + ) -> None: + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type] + self.status = status + self.version = version + self.reason = reason + self.decode_content = decode_content + self._has_decoded_content = False + self._request_url: str | None = request_url + self.retries = retries + + self.chunked = False + tr_enc = self.headers.get("transfer-encoding", "").lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + self._decoder: ContentDecoder | None = None + self.length_remaining: int | None + + def get_redirect_location(self) -> str | None | Literal[False]: + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get("location") + return False + + @property + def data(self) -> bytes: + raise NotImplementedError() + + def json(self) -> typing.Any: + """ + Parses the body of the HTTP response as JSON. + + To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder. + + This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`. + + Read more :ref:`here `. + """ + data = self.data.decode("utf-8") + return _json.loads(data) + + @property + def url(self) -> str | None: + raise NotImplementedError() + + @url.setter + def url(self, url: str | None) -> None: + raise NotImplementedError() + + @property + def connection(self) -> BaseHTTPConnection | None: + raise NotImplementedError() + + @property + def retries(self) -> Retry | None: + return self._retries + + @retries.setter + def retries(self, retries: Retry | None) -> None: + # Override the request_url if retries has a redirect location. + if retries is not None and retries.history: + self.url = retries.history[-1].redirect_location + self._retries = retries + + def stream( + self, amt: int | None = 2**16, decode_content: bool | None = None + ) -> typing.Iterator[bytes]: + raise NotImplementedError() + + def read( + self, + amt: int | None = None, + decode_content: bool | None = None, + cache_content: bool = False, + ) -> bytes: + raise NotImplementedError() + + def read1( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> bytes: + raise NotImplementedError() + + def read_chunked( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> typing.Iterator[bytes]: + raise NotImplementedError() + + def release_conn(self) -> None: + raise NotImplementedError() + + def drain_conn(self) -> None: + raise NotImplementedError() + + def close(self) -> None: + raise NotImplementedError() + + def _init_decoder(self) -> None: + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get("content-encoding", "").lower() + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] + if encodings: + self._decoder = _get_decoder(content_encoding) + + def _decode( + self, data: bytes, decode_content: bool | None, flush_decoder: bool + ) -> bytes: + """ + Decode the data passed in and potentially flush the decoder. + """ + if not decode_content: + if self._has_decoded_content: + raise RuntimeError( + "Calling read(decode_content=False) is not supported after " + "read(decode_content=True) was called." + ) + return data + + try: + if self._decoder: + data = self._decoder.decompress(data) + self._has_decoded_content = True + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, + e, + ) from e + if flush_decoder: + data += self._flush_decoder() + + return data + + def _flush_decoder(self) -> bytes: + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + return self._decoder.decompress(b"") + self._decoder.flush() + return b"" + + # Compatibility methods for `io` module + def readinto(self, b: bytearray) -> int: + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[: len(temp)] = temp + return len(temp) + + # Compatibility methods for http.client.HTTPResponse + def getheaders(self) -> HTTPHeaderDict: + warnings.warn( + "HTTPResponse.getheaders() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.", + category=DeprecationWarning, + stacklevel=2, + ) + return self.headers + + def getheader(self, name: str, default: str | None = None) -> str | None: + warnings.warn( + "HTTPResponse.getheader() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).", + category=DeprecationWarning, + stacklevel=2, + ) + return self.headers.get(name, default) + + # Compatibility method for http.cookiejar + def info(self) -> HTTPHeaderDict: + return self.headers + + def geturl(self) -> str | None: + return self.url + + +class HTTPResponse(BaseHTTPResponse): + """ + HTTP Response container. + + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + def __init__( + self, + body: _TYPE_BODY = "", + headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None, + status: int = 0, + version: int = 0, + reason: str | None = None, + preload_content: bool = True, + decode_content: bool = True, + original_response: _HttplibHTTPResponse | None = None, + pool: HTTPConnectionPool | None = None, + connection: HTTPConnection | None = None, + msg: _HttplibHTTPMessage | None = None, + retries: Retry | None = None, + enforce_content_length: bool = True, + request_method: str | None = None, + request_url: str | None = None, + auto_close: bool = True, + ) -> None: + super().__init__( + headers=headers, + status=status, + version=version, + reason=reason, + decode_content=decode_content, + request_url=request_url, + retries=retries, + ) + + self.enforce_content_length = enforce_content_length + self.auto_close = auto_close + + self._body = None + self._fp: _HttplibHTTPResponse | None = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + + if body and isinstance(body, (str, bytes)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, "read"): + self._fp = body # type: ignore[assignment] + + # Are we using the chunked-style of transfer encoding? + self.chunk_left: int | None = None + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # Used to return the correct amount of bytes for partial read()s + self._decoded_buffer = BytesQueueBuffer() + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def release_conn(self) -> None: + if not self._pool or not self._connection: + return None + + self._pool._put_conn(self._connection) + self._connection = None + + def drain_conn(self) -> None: + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, OSError, BaseSSLError, HTTPException): + pass + + @property + def data(self) -> bytes: + # For backwards-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body # type: ignore[return-value] + + if self._fp: + return self.read(cache_content=True) + + return None # type: ignore[return-value] + + @property + def connection(self) -> HTTPConnection | None: + return self._connection + + def isclosed(self) -> bool: + return is_fp_closed(self._fp) + + def tell(self) -> int: + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method: str | None) -> int | None: + """ + Set initial length value for Response content if available. + """ + length: int | None + content_length: str | None = self.headers.get("content-length") + + if content_length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = {int(val) for val in content_length.split(",")} + if len(lengths) > 1: + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % content_length + ) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + else: # if content_length is None + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": + length = 0 + + return length + + @contextmanager + def _error_catcher(self) -> typing.Generator[None, None, None]: + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout as e: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) from e + + raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type] + + except IncompleteRead as e: + if ( + e.expected is not None + and e.partial is not None + and e.expected == -e.partial + ): + arg = "Response may not contain content." + else: + arg = f"Connection broken: {e!r}" + raise ProtocolError(arg, e) from e + + except (HTTPException, OSError) as e: + raise ProtocolError(f"Connection broken: {e!r}", e) from e + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def _fp_read( + self, + amt: int | None = None, + *, + read1: bool = False, + ) -> bytes: + """ + Read a response with the thought that reading the number of bytes + larger than can fit in a 32-bit int at a time via SSL in some + known cases leads to an overflow error that has to be prevented + if `amt` or `self.length_remaining` indicate that a problem may + happen. + + The known cases: + * 3.8 <= CPython < 3.9.7 because of a bug + https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900. + * urllib3 injected with pyOpenSSL-backed SSL-support. + * CPython < 3.10 only when `amt` does not fit 32-bit int. + """ + assert self._fp + c_int_max = 2**31 - 1 + if ( + (amt and amt > c_int_max) + or ( + amt is None + and self.length_remaining + and self.length_remaining > c_int_max + ) + ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)): + if read1: + return self._fp.read1(c_int_max) + buffer = io.BytesIO() + # Besides `max_chunk_amt` being a maximum chunk size, it + # affects memory overhead of reading a response by this + # method in CPython. + # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum + # chunk size that does not lead to an overflow error, but + # 256 MiB is a compromise. + max_chunk_amt = 2**28 + while amt is None or amt != 0: + if amt is not None: + chunk_amt = min(amt, max_chunk_amt) + amt -= chunk_amt + else: + chunk_amt = max_chunk_amt + data = self._fp.read(chunk_amt) + if not data: + break + buffer.write(data) + del data # to reduce peak memory usage by `max_chunk_amt`. + return buffer.getvalue() + elif read1: + return self._fp.read1(amt) if amt is not None else self._fp.read1() + else: + # StringIO doesn't like amt=None + return self._fp.read(amt) if amt is not None else self._fp.read() + + def _raw_read( + self, + amt: int | None = None, + *, + read1: bool = False, + ) -> bytes: + """ + Reads `amt` of bytes from the socket. + """ + if self._fp is None: + return None # type: ignore[return-value] + + fp_closed = getattr(self._fp, "closed", False) + + with self._error_catcher(): + data = self._fp_read(amt, read1=read1) if not fp_closed else b"" + if amt is not None and amt != 0 and not data: + # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + if ( + self.enforce_content_length + and self.length_remaining is not None + and self.length_remaining != 0 + ): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + elif read1 and ( + (amt != 0 and not data) or self.length_remaining == len(data) + ): + # All data has been read, but `self._fp.read1` in + # CPython 3.12 and older doesn't always close + # `http.client.HTTPResponse`, so we close it here. + # See https://github.com/python/cpython/issues/113199 + self._fp.close() + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + return data + + def read( + self, + amt: int | None = None, + decode_content: bool | None = None, + cache_content: bool = False, + ) -> bytes: + """ + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if amt is not None: + cache_content = False + + if len(self._decoded_buffer) >= amt: + return self._decoded_buffer.get(amt) + + data = self._raw_read(amt) + + flush_decoder = amt is None or (amt != 0 and not data) + + if not data and len(self._decoded_buffer) == 0: + return data + + if amt is None: + data = self._decode(data, decode_content, flush_decoder) + if cache_content: + self._body = data + else: + # do not waste memory on buffer when not decoding + if not decode_content: + if self._has_decoded_content: + raise RuntimeError( + "Calling read(decode_content=False) is not supported after " + "read(decode_content=True) was called." + ) + return data + + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + + while len(self._decoded_buffer) < amt and data: + # TODO make sure to initially read enough data to get past the headers + # For example, the GZ file header takes 10 bytes, we don't want to read + # it one byte at a time + data = self._raw_read(amt) + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + data = self._decoded_buffer.get(amt) + + return data + + def read1( + self, + amt: int | None = None, + decode_content: bool | None = None, + ) -> bytes: + """ + Similar to ``http.client.HTTPResponse.read1`` and documented + in :meth:`io.BufferedReader.read1`, but with an additional parameter: + ``decode_content``. + + :param amt: + How much of the content to read. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if decode_content is None: + decode_content = self.decode_content + # try and respond without going to the network + if self._has_decoded_content: + if not decode_content: + raise RuntimeError( + "Calling read1(decode_content=False) is not supported after " + "read1(decode_content=True) was called." + ) + if len(self._decoded_buffer) > 0: + if amt is None: + return self._decoded_buffer.get_all() + return self._decoded_buffer.get(amt) + if amt == 0: + return b"" + + # FIXME, this method's type doesn't say returning None is possible + data = self._raw_read(amt, read1=True) + if not decode_content or data is None: + return data + + self._init_decoder() + while True: + flush_decoder = not data + decoded_data = self._decode(data, decode_content, flush_decoder) + self._decoded_buffer.put(decoded_data) + if decoded_data or flush_decoder: + break + data = self._raw_read(8192, read1=True) + + if amt is None: + return self._decoded_buffer.get_all() + return self._decoded_buffer.get(amt) + + def stream( + self, amt: int | None = 2**16, decode_content: bool | None = None + ) -> typing.Generator[bytes, None, None]: + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + yield from self.read_chunked(amt, decode_content=decode_content) + else: + while not is_fp_closed(self._fp) or len(self._decoded_buffer) > 0: + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + # Overrides from io.IOBase + def readable(self) -> bool: + return True + + def close(self) -> None: + if not self.closed and self._fp: + self._fp.close() + + if self._connection: + self._connection.close() + + if not self.auto_close: + io.IOBase.close(self) + + @property + def closed(self) -> bool: + if not self.auto_close: + return io.IOBase.closed.__get__(self) # type: ignore[no-any-return] + elif self._fp is None: + return True + elif hasattr(self._fp, "isclosed"): + return self._fp.isclosed() + elif hasattr(self._fp, "closed"): + return self._fp.closed + else: + return True + + def fileno(self) -> int: + if self._fp is None: + raise OSError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise OSError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) + + def flush(self) -> None: + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): + return self._fp.flush() + + def supports_chunked_reads(self) -> bool: + """ + Checks if the underlying file-like object looks like a + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, "fp") + + def _update_chunk_length(self) -> None: + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return None + line = self._fp.fp.readline() # type: ignore[union-attr] + line = line.split(b";", 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + self.close() + if line: + # Invalid chunked protocol response, abort. + raise InvalidChunkLength(self, line) from None + else: + # Truncated at start of next chunk + raise ProtocolError("Response ended prematurely") from None + + def _handle_chunk(self, amt: int | None) -> bytes: + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] + returned_chunk = chunk + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif self.chunk_left is not None and amt < self.chunk_left: + value = self._fp._safe_read(amt) # type: ignore[union-attr] + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) # type: ignore[union-attr] + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr] + self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk # type: ignore[no-any-return] + + def read_chunked( + self, amt: int | None = None, decode_content: bool | None = None + ) -> typing.Generator[bytes, None, None]: + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing." + ) + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be http.client.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks." + ) + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return None + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: # type: ignore[union-attr] + return None + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while self._fp is not None: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b"\r\n": + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + @property + def url(self) -> str | None: + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + return self._request_url + + @url.setter + def url(self, url: str) -> None: + self._request_url = url + + def __iter__(self) -> typing.Iterator[bytes]: + buffer: list[bytes] = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunks = chunk.split(b"\n") + yield b"".join(buffer) + chunks[0] + b"\n" + for x in chunks[1:-1]: + yield x + b"\n" + if chunks[-1]: + buffer = [chunks[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/templates/skills/spotify/dependencies/urllib3/util/__init__.py b/templates/skills/spotify/dependencies/urllib3/util/__init__.py new file mode 100644 index 00000000..53412603 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/__init__.py @@ -0,0 +1,42 @@ +# For backwards compatibility, provide imports that used to be here. +from __future__ import annotations + +from .connection import is_connection_dropped +from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers +from .response import is_fp_closed +from .retry import Retry +from .ssl_ import ( + ALPN_PROTOCOLS, + IS_PYOPENSSL, + SSLContext, + assert_fingerprint, + create_urllib3_context, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .timeout import Timeout +from .url import Url, parse_url +from .wait import wait_for_read, wait_for_write + +__all__ = ( + "IS_PYOPENSSL", + "SSLContext", + "ALPN_PROTOCOLS", + "Retry", + "Timeout", + "Url", + "assert_fingerprint", + "create_urllib3_context", + "is_connection_dropped", + "is_fp_closed", + "parse_url", + "make_headers", + "resolve_cert_reqs", + "resolve_ssl_version", + "ssl_wrap_socket", + "wait_for_read", + "wait_for_write", + "SKIP_HEADER", + "SKIPPABLE_HEADERS", +) diff --git a/templates/skills/spotify/dependencies/urllib3/util/connection.py b/templates/skills/spotify/dependencies/urllib3/util/connection.py new file mode 100644 index 00000000..5c7da73f --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/connection.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import socket +import typing + +from ..exceptions import LocationParseError +from .timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT + +_TYPE_SOCKET_OPTIONS = typing.Sequence[typing.Tuple[int, int, typing.Union[int, bytes]]] + +if typing.TYPE_CHECKING: + from .._base_connection import BaseHTTPConnection + + +def is_connection_dropped(conn: BaseHTTPConnection) -> bool: # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + :param conn: :class:`urllib3.connection.HTTPConnection` object. + """ + return not conn.is_connected + + +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +# One additional modification is that we avoid binding to IPv6 servers +# discovered in DNS if the system doesn't have IPv6 functionality. +def create_connection( + address: tuple[str, int], + timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + source_address: tuple[str, int] | None = None, + socket_options: _TYPE_SOCKET_OPTIONS | None = None, +) -> socket.socket: + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`socket.getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + if host.startswith("["): + host = host.strip("[]") + err = None + + # Using the value from allowed_gai_family() in the context of getaddrinfo lets + # us select whether to work with IPv4 DNS records, IPv6 records, or both. + # The original create_connection function always returns all records. + family = allowed_gai_family() + + try: + host.encode("idna") + except UnicodeError: + raise LocationParseError(f"'{host}', label empty or too long") from None + + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + _set_socket_options(sock, socket_options) + + if timeout is not _DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + # Break explicitly a reference cycle + err = None + return sock + + except OSError as _: + err = _ + if sock is not None: + sock.close() + + if err is not None: + try: + raise err + finally: + # Break explicitly a reference cycle + err = None + else: + raise OSError("getaddrinfo returns an empty list") + + +def _set_socket_options( + sock: socket.socket, options: _TYPE_SOCKET_OPTIONS | None +) -> None: + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) + + +def allowed_gai_family() -> socket.AddressFamily: + """This function is designed to work in the context of + getaddrinfo, where family=socket.AF_UNSPEC is the default and + will perform a DNS search for both IPv6 and IPv4 records.""" + + family = socket.AF_INET + if HAS_IPV6: + family = socket.AF_UNSPEC + return family + + +def _has_ipv6(host: str) -> bool: + """Returns True if the system can bind an IPv6 address.""" + sock = None + has_ipv6 = False + + if socket.has_ipv6: + # has_ipv6 returns true if cPython was compiled with IPv6 support. + # It does not tell us if the system has IPv6 support enabled. To + # determine that we must bind to an IPv6 address. + # https://github.com/urllib3/urllib3/pull/611 + # https://bugs.python.org/issue658327 + try: + sock = socket.socket(socket.AF_INET6) + sock.bind((host, 0)) + has_ipv6 = True + except Exception: + pass + + if sock: + sock.close() + return has_ipv6 + + +HAS_IPV6 = _has_ipv6("::1") diff --git a/templates/skills/spotify/dependencies/urllib3/util/proxy.py b/templates/skills/spotify/dependencies/urllib3/util/proxy.py new file mode 100644 index 00000000..908fc662 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/proxy.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +import typing + +from .url import Url + +if typing.TYPE_CHECKING: + from ..connection import ProxyConfig + + +def connection_requires_http_tunnel( + proxy_url: Url | None = None, + proxy_config: ProxyConfig | None = None, + destination_scheme: str | None = None, +) -> bool: + """ + Returns True if the connection requires an HTTP CONNECT through the proxy. + + :param URL proxy_url: + URL of the proxy. + :param ProxyConfig proxy_config: + Proxy configuration from poolmanager.py + :param str destination_scheme: + The scheme of the destination. (i.e https, http, etc) + """ + # If we're not using a proxy, no way to use a tunnel. + if proxy_url is None: + return False + + # HTTP destinations never require tunneling, we always forward. + if destination_scheme == "http": + return False + + # Support for forwarding with HTTPS proxies and HTTPS destinations. + if ( + proxy_url.scheme == "https" + and proxy_config + and proxy_config.use_forwarding_for_https + ): + return False + + # Otherwise always use a tunnel. + return True diff --git a/templates/skills/spotify/dependencies/urllib3/util/request.py b/templates/skills/spotify/dependencies/urllib3/util/request.py new file mode 100644 index 00000000..fe0e3485 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/request.py @@ -0,0 +1,256 @@ +from __future__ import annotations + +import io +import typing +from base64 import b64encode +from enum import Enum + +from ..exceptions import UnrewindableBodyError +from .util import to_bytes + +if typing.TYPE_CHECKING: + from typing import Final + +# Pass as a value within ``headers`` to skip +# emitting some HTTP headers that are added automatically. +# The only headers that are supported are ``Accept-Encoding``, +# ``Host``, and ``User-Agent``. +SKIP_HEADER = "@@@SKIP_HEADER@@@" +SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) + +ACCEPT_ENCODING = "gzip,deflate" +try: + try: + import brotlicffi as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401 + except ImportError: + import brotli as _unused_module_brotli # type: ignore[import-not-found] # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" +try: + import zstandard as _unused_module_zstd # type: ignore[import-not-found] # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",zstd" + + +class _TYPE_FAILEDTELL(Enum): + token = 0 + + +_FAILEDTELL: Final[_TYPE_FAILEDTELL] = _TYPE_FAILEDTELL.token + +_TYPE_BODY_POSITION = typing.Union[int, _TYPE_FAILEDTELL] + +# When sending a request with these methods we aren't expecting +# a body so don't need to set an explicit 'Content-Length: 0' +# The reason we do this in the negative instead of tracking methods +# which 'should' have a body is because unknown methods should be +# treated as if they were 'POST' which *does* expect a body. +_METHODS_NOT_EXPECTING_BODY = {"GET", "HEAD", "DELETE", "TRACE", "OPTIONS", "CONNECT"} + + +def make_headers( + keep_alive: bool | None = None, + accept_encoding: bool | list[str] | str | None = None, + user_agent: str | None = None, + basic_auth: str | None = None, + proxy_basic_auth: str | None = None, + disable_cache: bool | None = None, +) -> dict[str, str]: + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. If either the ``brotli`` or + ``brotlicffi`` package is installed 'gzip,deflate,br' is used instead. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example: + + .. code-block:: python + + import urllib3 + + print(urllib3.util.make_headers(keep_alive=True, user_agent="Batman/1.0")) + # {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + print(urllib3.util.make_headers(accept_encoding=True)) + # {'accept-encoding': 'gzip,deflate'} + """ + headers: dict[str, str] = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ",".join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers["accept-encoding"] = accept_encoding + + if user_agent: + headers["user-agent"] = user_agent + + if keep_alive: + headers["connection"] = "keep-alive" + + if basic_auth: + headers[ + "authorization" + ] = f"Basic {b64encode(basic_auth.encode('latin-1')).decode()}" + + if proxy_basic_auth: + headers[ + "proxy-authorization" + ] = f"Basic {b64encode(proxy_basic_auth.encode('latin-1')).decode()}" + + if disable_cache: + headers["cache-control"] = "no-cache" + + return headers + + +def set_file_position( + body: typing.Any, pos: _TYPE_BODY_POSITION | None +) -> _TYPE_BODY_POSITION | None: + """ + If a position is provided, move file to that point. + Otherwise, we'll attempt to record a position for future use. + """ + if pos is not None: + rewind_body(body, pos) + elif getattr(body, "tell", None) is not None: + try: + pos = body.tell() + except OSError: + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body. + pos = _FAILEDTELL + + return pos + + +def rewind_body(body: typing.IO[typing.AnyStr], body_pos: _TYPE_BODY_POSITION) -> None: + """ + Attempt to rewind body to a certain position. + Primarily used for request redirects and retries. + + :param body: + File-like object that supports seek. + + :param int pos: + Position to seek to in file. + """ + body_seek = getattr(body, "seek", None) + if body_seek is not None and isinstance(body_pos, int): + try: + body_seek(body_pos) + except OSError as e: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect/retry." + ) from e + elif body_pos is _FAILEDTELL: + raise UnrewindableBodyError( + "Unable to record file position for rewinding " + "request body during a redirect/retry." + ) + else: + raise ValueError( + f"body_pos must be of type integer, instead it was {type(body_pos)}." + ) + + +class ChunksAndContentLength(typing.NamedTuple): + chunks: typing.Iterable[bytes] | None + content_length: int | None + + +def body_to_chunks( + body: typing.Any | None, method: str, blocksize: int +) -> ChunksAndContentLength: + """Takes the HTTP request method, body, and blocksize and + transforms them into an iterable of chunks to pass to + socket.sendall() and an optional 'Content-Length' header. + + A 'Content-Length' of 'None' indicates the length of the body + can't be determined so should use 'Transfer-Encoding: chunked' + for framing instead. + """ + + chunks: typing.Iterable[bytes] | None + content_length: int | None + + # No body, we need to make a recommendation on 'Content-Length' + # based on whether that request method is expected to have + # a body or not. + if body is None: + chunks = None + if method.upper() not in _METHODS_NOT_EXPECTING_BODY: + content_length = 0 + else: + content_length = None + + # Bytes or strings become bytes + elif isinstance(body, (str, bytes)): + chunks = (to_bytes(body),) + content_length = len(chunks[0]) + + # File-like object, TODO: use seek() and tell() for length? + elif hasattr(body, "read"): + + def chunk_readable() -> typing.Iterable[bytes]: + nonlocal body, blocksize + encode = isinstance(body, io.TextIOBase) + while True: + datablock = body.read(blocksize) + if not datablock: + break + if encode: + datablock = datablock.encode("iso-8859-1") + yield datablock + + chunks = chunk_readable() + content_length = None + + # Otherwise we need to start checking via duck-typing. + else: + try: + # Check if the body implements the buffer API. + mv = memoryview(body) + except TypeError: + try: + # Check if the body is an iterable + chunks = iter(body) + content_length = None + except TypeError: + raise TypeError( + f"'body' must be a bytes-like object, file-like " + f"object, or iterable. Instead was {body!r}" + ) from None + else: + # Since it implements the buffer API can be passed directly to socket.sendall() + chunks = (body,) + content_length = mv.nbytes + + return ChunksAndContentLength(chunks=chunks, content_length=content_length) diff --git a/templates/skills/spotify/dependencies/urllib3/util/response.py b/templates/skills/spotify/dependencies/urllib3/util/response.py new file mode 100644 index 00000000..0f457869 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/response.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import http.client as httplib +from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect + +from ..exceptions import HeaderParsingError + + +def is_fp_closed(obj: object) -> bool: + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check `isclosed()` first, in case Python3 doesn't set `closed`. + # GH Issue #928 + return obj.isclosed() # type: ignore[no-any-return, attr-defined] + except AttributeError: + pass + + try: + # Check via the official file-like-object way. + return obj.closed # type: ignore[no-any-return, attr-defined] + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None # type: ignore[attr-defined] + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers: httplib.HTTPMessage) -> None: + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param http.client.HTTPMessage headers: Headers to verify. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError(f"expected httplib.Message, got {type(headers)}.") + + unparsed_data = None + + # get_payload is actually email.message.Message.get_payload; + # we're only interested in the result if it's not a multipart message + if not headers.is_multipart(): + payload = headers.get_payload() + + if isinstance(payload, (bytes, str)): + unparsed_data = payload + + # httplib is assuming a response body is available + # when parsing headers even when httplib only sends + # header data to parse_headers() This results in + # defects on multipart responses in particular. + # See: https://github.com/urllib3/urllib3/issues/800 + + # So we ignore the following defects: + # - StartBoundaryNotFoundDefect: + # The claimed start boundary was never found. + # - MultipartInvariantViolationDefect: + # A message claimed to be a multipart but no subparts were found. + defects = [ + defect + for defect in headers.defects + if not isinstance( + defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) + ) + ] + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response: httplib.HTTPResponse) -> bool: + """ + Checks whether the request of a response has been a HEAD-request. + + :param http.client.HTTPResponse response: + Response to check if the originating request + used 'HEAD' as a method. + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method_str = response._method # type: str # type: ignore[attr-defined] + return method_str.upper() == "HEAD" diff --git a/templates/skills/spotify/dependencies/urllib3/util/retry.py b/templates/skills/spotify/dependencies/urllib3/util/retry.py new file mode 100644 index 00000000..7572bfd2 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/retry.py @@ -0,0 +1,529 @@ +from __future__ import annotations + +import email +import logging +import random +import re +import time +import typing +from itertools import takewhile +from types import TracebackType + +from ..exceptions import ( + ConnectTimeoutError, + InvalidHeader, + MaxRetryError, + ProtocolError, + ProxyError, + ReadTimeoutError, + ResponseError, +) +from .util import reraise + +if typing.TYPE_CHECKING: + from ..connectionpool import ConnectionPool + from ..response import BaseHTTPResponse + +log = logging.getLogger(__name__) + + +# Data structure for representing the metadata of requests that result in a retry. +class RequestHistory(typing.NamedTuple): + method: str | None + url: str | None + error: Exception | None + status: int | None + redirect_location: str | None + + +class Retry: + """Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool: + + .. code-block:: python + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request("GET", "https://example.com/") + + Or per-request (which overrides the default for the pool): + + .. code-block:: python + + response = http.request("GET", "https://example.com/", retries=Retry(10)) + + Retries can be disabled by passing ``False``: + + .. code-block:: python + + response = http.request("GET", "https://example.com/", retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int status: + How many times to retry on bad status codes. + + These are retries made on responses, where status code matches + ``status_forcelist``. + + Set to ``0`` to fail on the first retry of this type. + + :param int other: + How many times to retry on other errors. + + Other errors are errors that are not connect, read, redirect or status errors. + These errors might be raised after the request was sent to the server, so the + request might have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + If ``total`` is not set, it's a good idea to set this to 0 to account + for unexpected edge cases and avoid infinite retry loops. + + :param Collection allowed_methods: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + idempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. + + Set to a ``None`` value to retry on any verb. + + :param Collection status_forcelist: + A set of integer HTTP status codes that we should force a retry on. + A retry is initiated if the request method is in ``allowed_methods`` + and the response status code is in ``status_forcelist``. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a + delay). urllib3 will sleep for:: + + {backoff factor} * (2 ** ({number of previous retries})) + + seconds. If `backoff_jitter` is non-zero, this sleep is extended by:: + + random.uniform(0, {backoff jitter}) + + seconds. For example, if the backoff_factor is 0.1, then :func:`Retry.sleep` will + sleep for [0.0s, 0.2s, 0.4s, 0.8s, ...] between retries. No backoff will ever + be longer than `backoff_max`. + + By default, backoff is disabled (factor set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. + + :param tuple history: The history of the request encountered during + each call to :meth:`~Retry.increment`. The list is in the order + the requests occurred. Each list item is of class :class:`RequestHistory`. + + :param bool respect_retry_after_header: + Whether to respect Retry-After header on status codes defined as + :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + + :param Collection remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. + """ + + #: Default methods to be used for ``allowed_methods`` + DEFAULT_ALLOWED_METHODS = frozenset( + ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] + ) + + #: Default status codes to be used for ``status_forcelist`` + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + #: Default headers to be used for ``remove_headers_on_redirect`` + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) + + #: Default maximum backoff time. + DEFAULT_BACKOFF_MAX = 120 + + # Backward compatibility; assigned outside of the class. + DEFAULT: typing.ClassVar[Retry] + + def __init__( + self, + total: bool | int | None = 10, + connect: int | None = None, + read: int | None = None, + redirect: bool | int | None = None, + status: int | None = None, + other: int | None = None, + allowed_methods: typing.Collection[str] | None = DEFAULT_ALLOWED_METHODS, + status_forcelist: typing.Collection[int] | None = None, + backoff_factor: float = 0, + backoff_max: float = DEFAULT_BACKOFF_MAX, + raise_on_redirect: bool = True, + raise_on_status: bool = True, + history: tuple[RequestHistory, ...] | None = None, + respect_retry_after_header: bool = True, + remove_headers_on_redirect: typing.Collection[ + str + ] = DEFAULT_REMOVE_HEADERS_ON_REDIRECT, + backoff_jitter: float = 0.0, + ) -> None: + self.total = total + self.connect = connect + self.read = read + self.status = status + self.other = other + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.allowed_methods = allowed_methods + self.backoff_factor = backoff_factor + self.backoff_max = backoff_max + self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status + self.history = history or () + self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = frozenset( + h.lower() for h in remove_headers_on_redirect + ) + self.backoff_jitter = backoff_jitter + + def new(self, **kw: typing.Any) -> Retry: + params = dict( + total=self.total, + connect=self.connect, + read=self.read, + redirect=self.redirect, + status=self.status, + other=self.other, + allowed_methods=self.allowed_methods, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + backoff_max=self.backoff_max, + raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, + history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect, + respect_retry_after_header=self.respect_retry_after_header, + backoff_jitter=self.backoff_jitter, + ) + + params.update(kw) + return type(self)(**params) # type: ignore[arg-type] + + @classmethod + def from_int( + cls, + retries: Retry | bool | int | None, + redirect: bool | int | None = True, + default: Retry | bool | int | None = None, + ) -> Retry: + """Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r", retries, new_retries) + return new_retries + + def get_backoff_time(self) -> float: + """Formula for computing the current backoff + + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) + if consecutive_errors_len <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) + if self.backoff_jitter != 0.0: + backoff_value += random.random() * self.backoff_jitter + return float(max(0, min(self.backoff_max, backoff_value))) + + def parse_retry_after(self, retry_after: str) -> float: + seconds: float + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after) + if retry_date_tuple is None: + raise InvalidHeader(f"Invalid Retry-After header: {retry_after}") + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + seconds = max(seconds, 0) + + return seconds + + def get_retry_after(self, response: BaseHTTPResponse) -> float | None: + """Get the value of Retry-After in seconds.""" + + retry_after = response.headers.get("Retry-After") + + if retry_after is None: + return None + + return self.parse_retry_after(retry_after) + + def sleep_for_retry(self, response: BaseHTTPResponse) -> bool: + retry_after = self.get_retry_after(response) + if retry_after: + time.sleep(retry_after) + return True + + return False + + def _sleep_backoff(self) -> None: + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def sleep(self, response: BaseHTTPResponse | None = None) -> None: + """Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + """ + + if self.respect_retry_after_header and response: + slept = self.sleep_for_retry(response) + if slept: + return + + self._sleep_backoff() + + def _is_connection_error(self, err: Exception) -> bool: + """Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + if isinstance(err, ProxyError): + err = err.original_error + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err: Exception) -> bool: + """Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def _is_method_retryable(self, method: str) -> bool: + """Checks if a given HTTP method should be retried upon, depending if + it is included in the allowed_methods + """ + if self.allowed_methods and method.upper() not in self.allowed_methods: + return False + return True + + def is_retry( + self, method: str, status_code: int, has_retry_after: bool = False + ) -> bool: + """Is this method/status code retryable? (Based on allowlists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + if not self._is_method_retryable(method): + return False + + if self.status_forcelist and status_code in self.status_forcelist: + return True + + return bool( + self.total + and self.respect_retry_after_header + and has_retry_after + and (status_code in self.RETRY_AFTER_STATUS_CODES) + ) + + def is_exhausted(self) -> bool: + """Are we out of retries?""" + retry_counts = [ + x + for x in ( + self.total, + self.connect, + self.read, + self.redirect, + self.status, + self.other, + ) + if x + ] + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment( + self, + method: str | None = None, + url: str | None = None, + response: BaseHTTPResponse | None = None, + error: Exception | None = None, + _pool: ConnectionPool | None = None, + _stacktrace: TracebackType | None = None, + ) -> Retry: + """Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.BaseHTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + connect = self.connect + read = self.read + redirect = self.redirect + status_count = self.status + other = self.other + cause = "unknown" + status = None + redirect_location = None + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False or method is None or not self._is_method_retryable(method): + raise reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + + elif error: + # Other retry? + if other is not None: + other -= 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = "too many redirects" + response_redirect_location = response.get_redirect_location() + if response_redirect_location: + redirect_location = response_redirect_location + status = response.status + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and the given method is in the allowed_methods + cause = ResponseError.GENERIC_ERROR + if response and response.status: + if status_count is not None: + status_count -= 1 + cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) + status = response.status + + history = self.history + ( + RequestHistory(method, url, error, status, redirect_location), + ) + + new_retry = self.new( + total=total, + connect=connect, + read=read, + redirect=redirect, + status=status_count, + other=other, + history=history, + ) + + if new_retry.is_exhausted(): + reason = error or ResponseError(cause) + raise MaxRetryError(_pool, url, reason) from reason # type: ignore[arg-type] + + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) + + return new_retry + + def __repr__(self) -> str: + return ( + f"{type(self).__name__}(total={self.total}, connect={self.connect}, " + f"read={self.read}, redirect={self.redirect}, status={self.status})" + ) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/templates/skills/spotify/dependencies/urllib3/util/ssl_.py b/templates/skills/spotify/dependencies/urllib3/util/ssl_.py new file mode 100644 index 00000000..b14cf27b --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/ssl_.py @@ -0,0 +1,509 @@ +from __future__ import annotations + +import hmac +import os +import socket +import sys +import typing +import warnings +from binascii import unhexlify +from hashlib import md5, sha1, sha256 + +from ..exceptions import ProxySchemeUnsupported, SSLError +from .url import _BRACELESS_IPV6_ADDRZ_RE, _IPV4_RE + +SSLContext = None +SSLTransport = None +HAS_NEVER_CHECK_COMMON_NAME = False +IS_PYOPENSSL = False +ALPN_PROTOCOLS = ["http/1.1"] + +_TYPE_VERSION_INFO = typing.Tuple[int, int, int, str, int] + +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} + + +def _is_bpo_43522_fixed( + implementation_name: str, + version_info: _TYPE_VERSION_INFO, + pypy_version_info: _TYPE_VERSION_INFO | None, +) -> bool: + """Return True for CPython 3.8.9+, 3.9.3+ or 3.10+ and PyPy 7.3.8+ where + setting SSLContext.hostname_checks_common_name to False works. + + Outside of CPython and PyPy we don't know which implementations work + or not so we conservatively use our hostname matching as we know that works + on all implementations. + + https://github.com/urllib3/urllib3/issues/2192#issuecomment-821832963 + https://foss.heptapod.net/pypy/pypy/-/issues/3539 + """ + if implementation_name == "pypy": + # https://foss.heptapod.net/pypy/pypy/-/issues/3129 + return pypy_version_info >= (7, 3, 8) # type: ignore[operator] + elif implementation_name == "cpython": + major_minor = version_info[:2] + micro = version_info[2] + return ( + (major_minor == (3, 8) and micro >= 9) + or (major_minor == (3, 9) and micro >= 3) + or major_minor >= (3, 10) + ) + else: # Defensive: + return False + + +def _is_has_never_check_common_name_reliable( + openssl_version: str, + openssl_version_number: int, + implementation_name: str, + version_info: _TYPE_VERSION_INFO, + pypy_version_info: _TYPE_VERSION_INFO | None, +) -> bool: + # As of May 2023, all released versions of LibreSSL fail to reject certificates with + # only common names, see https://github.com/urllib3/urllib3/pull/3024 + is_openssl = openssl_version.startswith("OpenSSL ") + # Before fixing OpenSSL issue #14579, the SSL_new() API was not copying hostflags + # like X509_CHECK_FLAG_NEVER_CHECK_SUBJECT, which tripped up CPython. + # https://github.com/openssl/openssl/issues/14579 + # This was released in OpenSSL 1.1.1l+ (>=0x101010cf) + is_openssl_issue_14579_fixed = openssl_version_number >= 0x101010CF + + return is_openssl and ( + is_openssl_issue_14579_fixed + or _is_bpo_43522_fixed(implementation_name, version_info, pypy_version_info) + ) + + +if typing.TYPE_CHECKING: + from ssl import VerifyMode + from typing import Literal, TypedDict + + from .ssltransport import SSLTransport as SSLTransportType + + class _TYPE_PEER_CERT_RET_DICT(TypedDict, total=False): + subjectAltName: tuple[tuple[str, str], ...] + subject: tuple[tuple[tuple[str, str], ...], ...] + serialNumber: str + + +# Mapping from 'ssl.PROTOCOL_TLSX' to 'TLSVersion.X' +_SSL_VERSION_TO_TLS_VERSION: dict[int, int] = {} + +try: # Do we have ssl at all? + import ssl + from ssl import ( # type: ignore[assignment] + CERT_REQUIRED, + HAS_NEVER_CHECK_COMMON_NAME, + OP_NO_COMPRESSION, + OP_NO_TICKET, + OPENSSL_VERSION, + OPENSSL_VERSION_NUMBER, + PROTOCOL_TLS, + PROTOCOL_TLS_CLIENT, + OP_NO_SSLv2, + OP_NO_SSLv3, + SSLContext, + TLSVersion, + ) + + PROTOCOL_SSLv23 = PROTOCOL_TLS + + # Setting SSLContext.hostname_checks_common_name = False didn't work before CPython + # 3.8.9, 3.9.3, and 3.10 (but OK on PyPy) or OpenSSL 1.1.1l+ + if HAS_NEVER_CHECK_COMMON_NAME and not _is_has_never_check_common_name_reliable( + OPENSSL_VERSION, + OPENSSL_VERSION_NUMBER, + sys.implementation.name, + sys.version_info, + sys.pypy_version_info if sys.implementation.name == "pypy" else None, # type: ignore[attr-defined] + ): + HAS_NEVER_CHECK_COMMON_NAME = False + + # Need to be careful here in case old TLS versions get + # removed in future 'ssl' module implementations. + for attr in ("TLSv1", "TLSv1_1", "TLSv1_2"): + try: + _SSL_VERSION_TO_TLS_VERSION[getattr(ssl, f"PROTOCOL_{attr}")] = getattr( + TLSVersion, attr + ) + except AttributeError: # Defensive: + continue + + from .ssltransport import SSLTransport # type: ignore[assignment] +except ImportError: + OP_NO_COMPRESSION = 0x20000 # type: ignore[assignment] + OP_NO_TICKET = 0x4000 # type: ignore[assignment] + OP_NO_SSLv2 = 0x1000000 # type: ignore[assignment] + OP_NO_SSLv3 = 0x2000000 # type: ignore[assignment] + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 # type: ignore[assignment] + PROTOCOL_TLS_CLIENT = 16 # type: ignore[assignment] + + +_TYPE_PEER_CERT_RET = typing.Union["_TYPE_PEER_CERT_RET_DICT", bytes, None] + + +def assert_fingerprint(cert: bytes | None, fingerprint: str) -> None: + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + if cert is None: + raise SSLError("No certificate for the peer.") + + fingerprint = fingerprint.replace(":", "").lower() + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError(f"Fingerprint of invalid length: {fingerprint}") + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + cert_digest = hashfunc(cert).digest() + + if not hmac.compare_digest(cert_digest, fingerprint_bytes): + raise SSLError( + f'Fingerprints did not match. Expected "{fingerprint}", got "{cert_digest.hex()}"' + ) + + +def resolve_cert_reqs(candidate: None | int | str) -> VerifyMode: + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_REQUIRED`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbreviation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_REQUIRED + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "CERT_" + candidate) + return res # type: ignore[no-any-return] + + return candidate # type: ignore[return-value] + + +def resolve_ssl_version(candidate: None | int | str) -> int: + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_TLS + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "PROTOCOL_" + candidate) + return typing.cast(int, res) + + return candidate + + +def create_urllib3_context( + ssl_version: int | None = None, + cert_reqs: int | None = None, + options: int | None = None, + ciphers: str | None = None, + ssl_minimum_version: int | None = None, + ssl_maximum_version: int | None = None, +) -> ssl.SSLContext: + """Creates and configures an :class:`ssl.SSLContext` instance for use with urllib3. + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + + This parameter is deprecated instead use 'ssl_minimum_version'. + :param ssl_minimum_version: + The minimum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. + :param ssl_maximum_version: + The maximum version of TLS to be used. Use the 'ssl.TLSVersion' enum for specifying the value. + Not recommended to set to anything other than 'ssl.TLSVersion.MAXIMUM_SUPPORTED' which is the + default value. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. + :param ciphers: + Which cipher suites to allow the server to select. Defaults to either system configured + ciphers if OpenSSL 1.1.1+, otherwise uses a secure default set of ciphers. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + if SSLContext is None: + raise TypeError("Can't create an SSLContext object without an ssl module") + + # This means 'ssl_version' was specified as an exact value. + if ssl_version not in (None, PROTOCOL_TLS, PROTOCOL_TLS_CLIENT): + # Disallow setting 'ssl_version' and 'ssl_minimum|maximum_version' + # to avoid conflicts. + if ssl_minimum_version is not None or ssl_maximum_version is not None: + raise ValueError( + "Can't specify both 'ssl_version' and either " + "'ssl_minimum_version' or 'ssl_maximum_version'" + ) + + # 'ssl_version' is deprecated and will be removed in the future. + else: + # Use 'ssl_minimum_version' and 'ssl_maximum_version' instead. + ssl_minimum_version = _SSL_VERSION_TO_TLS_VERSION.get( + ssl_version, TLSVersion.MINIMUM_SUPPORTED + ) + ssl_maximum_version = _SSL_VERSION_TO_TLS_VERSION.get( + ssl_version, TLSVersion.MAXIMUM_SUPPORTED + ) + + # This warning message is pushing users to use 'ssl_minimum_version' + # instead of both min/max. Best practice is to only set the minimum version and + # keep the maximum version to be it's default value: 'TLSVersion.MAXIMUM_SUPPORTED' + warnings.warn( + "'ssl_version' option is deprecated and will be " + "removed in urllib3 v2.1.0. Instead use 'ssl_minimum_version'", + category=DeprecationWarning, + stacklevel=2, + ) + + # PROTOCOL_TLS is deprecated in Python 3.10 so we always use PROTOCOL_TLS_CLIENT + context = SSLContext(PROTOCOL_TLS_CLIENT) + + if ssl_minimum_version is not None: + context.minimum_version = ssl_minimum_version + else: # Python <3.10 defaults to 'MINIMUM_SUPPORTED' so explicitly set TLSv1.2 here + context.minimum_version = TLSVersion.TLSv1_2 + + if ssl_maximum_version is not None: + context.maximum_version = ssl_maximum_version + + # Unless we're given ciphers defer to either system ciphers in + # the case of OpenSSL 1.1.1+ or use our own secure default ciphers. + if ciphers: + context.set_ciphers(ciphers) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET + + context.options |= options + + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist when using + # an SSLContext created by pyOpenSSL. + if getattr(context, "post_handshake_auth", None) is not None: + context.post_handshake_auth = True + + # The order of the below lines setting verify_mode and check_hostname + # matter due to safe-guards SSLContext has to prevent an SSLContext with + # check_hostname=True, verify_mode=NONE/OPTIONAL. + # We always set 'check_hostname=False' for pyOpenSSL so we rely on our own + # 'ssl.match_hostname()' implementation. + if cert_reqs == ssl.CERT_REQUIRED and not IS_PYOPENSSL: + context.verify_mode = cert_reqs + context.check_hostname = True + else: + context.check_hostname = False + context.verify_mode = cert_reqs + + try: + context.hostname_checks_common_name = False + except AttributeError: # Defensive: for CPython < 3.8.9 and 3.9.3; for PyPy < 7.3.8 + pass + + # Enable logging of TLS session keys via defacto standard environment variable + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. + if hasattr(context, "keylog_filename"): + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile + + return context + + +@typing.overload +def ssl_wrap_socket( + sock: socket.socket, + keyfile: str | None = ..., + certfile: str | None = ..., + cert_reqs: int | None = ..., + ca_certs: str | None = ..., + server_hostname: str | None = ..., + ssl_version: int | None = ..., + ciphers: str | None = ..., + ssl_context: ssl.SSLContext | None = ..., + ca_cert_dir: str | None = ..., + key_password: str | None = ..., + ca_cert_data: None | str | bytes = ..., + tls_in_tls: Literal[False] = ..., +) -> ssl.SSLSocket: + ... + + +@typing.overload +def ssl_wrap_socket( + sock: socket.socket, + keyfile: str | None = ..., + certfile: str | None = ..., + cert_reqs: int | None = ..., + ca_certs: str | None = ..., + server_hostname: str | None = ..., + ssl_version: int | None = ..., + ciphers: str | None = ..., + ssl_context: ssl.SSLContext | None = ..., + ca_cert_dir: str | None = ..., + key_password: str | None = ..., + ca_cert_data: None | str | bytes = ..., + tls_in_tls: bool = ..., +) -> ssl.SSLSocket | SSLTransportType: + ... + + +def ssl_wrap_socket( + sock: socket.socket, + keyfile: str | None = None, + certfile: str | None = None, + cert_reqs: int | None = None, + ca_certs: str | None = None, + server_hostname: str | None = None, + ssl_version: int | None = None, + ciphers: str | None = None, + ssl_context: ssl.SSLContext | None = None, + ca_cert_dir: str | None = None, + key_password: str | None = None, + ca_cert_data: None | str | bytes = None, + tls_in_tls: bool = False, +) -> ssl.SSLSocket | SSLTransportType: + """ + All arguments except for server_hostname, ssl_context, tls_in_tls, ca_cert_data and + ca_cert_dir have the same meaning as they do when using + :func:`ssl.create_default_context`, :meth:`ssl.SSLContext.load_cert_chain`, + :meth:`ssl.SSLContext.set_ciphers` and :meth:`ssl.SSLContext.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. + :param ca_cert_data: + Optional string containing CA certificates in PEM format suitable for + passing as the cadata parameter to SSLContext.load_verify_locations() + :param tls_in_tls: + Use SSLTransport to wrap the existing socket. + """ + context = ssl_context + if context is None: + # Note: This branch of code and all the variables in it are only used in tests. + # We should consider deprecating and removing this code. + context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) + + if ca_certs or ca_cert_dir or ca_cert_data: + try: + context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) + except OSError as e: + raise SSLError(e) from e + + elif ssl_context is None and hasattr(context, "load_default_certs"): + # try to load OS default certs; works well on Windows. + context.load_default_certs() + + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + + if certfile: + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) + + try: + context.set_alpn_protocols(ALPN_PROTOCOLS) + except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols + pass + + ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls, server_hostname) + return ssl_sock + + +def is_ipaddress(hostname: str | bytes) -> bool: + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if isinstance(hostname, bytes): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode("ascii") + return bool(_IPV4_RE.match(hostname) or _BRACELESS_IPV6_ADDRZ_RE.match(hostname)) + + +def _is_key_file_encrypted(key_file: str) -> bool: + """Detects if a key file is encrypted or not.""" + with open(key_file) as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if "ENCRYPTED" in line: + return True + + return False + + +def _ssl_wrap_socket_impl( + sock: socket.socket, + ssl_context: ssl.SSLContext, + tls_in_tls: bool, + server_hostname: str | None = None, +) -> ssl.SSLSocket | SSLTransportType: + if tls_in_tls: + if not SSLTransport: + # Import error, ssl is not available. + raise ProxySchemeUnsupported( + "TLS in TLS requires support for the 'ssl' module" + ) + + SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) + return SSLTransport(sock, ssl_context, server_hostname) + + return ssl_context.wrap_socket(sock, server_hostname=server_hostname) diff --git a/templates/skills/spotify/dependencies/urllib3/util/ssl_match_hostname.py b/templates/skills/spotify/dependencies/urllib3/util/ssl_match_hostname.py new file mode 100644 index 00000000..453cfd42 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/ssl_match_hostname.py @@ -0,0 +1,159 @@ +"""The match_hostname() function from Python 3.5, essential when using SSL.""" + +# Note: This file is under the PSF license as the code comes from the python +# stdlib. http://docs.python.org/3/license.html +# It is modified to remove commonName support. + +from __future__ import annotations + +import ipaddress +import re +import typing +from ipaddress import IPv4Address, IPv6Address + +if typing.TYPE_CHECKING: + from .ssl_ import _TYPE_PEER_CERT_RET_DICT + +__version__ = "3.5.0.1" + + +class CertificateError(ValueError): + pass + + +def _dnsname_match( + dn: typing.Any, hostname: str, max_wildcards: int = 1 +) -> typing.Match[str] | None | bool: + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r".") + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count("*") + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn) + ) + + # speed up common case w/o wildcards + if not wildcards: + return bool(dn.lower() == hostname.lower()) + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == "*": + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append("[^.]+") + elif leftmost.startswith("xn--") or hostname.startswith("xn--"): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) + return pat.match(hostname) + + +def _ipaddress_match(ipname: str, host_ip: IPv4Address | IPv6Address) -> bool: + """Exact matching of IP addresses. + + RFC 9110 section 4.3.5: "A reference identity of IP-ID contains the decoded + bytes of the IP address. An IP version 4 address is 4 octets, and an IP + version 6 address is 16 octets. [...] A reference identity of type IP-ID + matches if the address is identical to an iPAddress value of the + subjectAltName extension of the certificate." + """ + # OpenSSL may add a trailing newline to a subjectAltName's IP address + # Divergence from upstream: ipaddress can't handle byte str + ip = ipaddress.ip_address(ipname.rstrip()) + return bool(ip.packed == host_ip.packed) + + +def match_hostname( + cert: _TYPE_PEER_CERT_RET_DICT | None, + hostname: str, + hostname_checks_common_name: bool = False, +) -> None: + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError( + "empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED" + ) + try: + # Divergence from upstream: ipaddress can't handle byte str + # + # The ipaddress module shipped with Python < 3.9 does not support + # scoped IPv6 addresses so we unconditionally strip the Zone IDs for + # now. Once we drop support for Python 3.9 we can remove this branch. + if "%" in hostname: + host_ip = ipaddress.ip_address(hostname[: hostname.rfind("%")]) + else: + host_ip = ipaddress.ip_address(hostname) + + except ValueError: + # Not an IP address (common case) + host_ip = None + dnsnames = [] + san: tuple[tuple[str, str], ...] = cert.get("subjectAltName", ()) + key: str + value: str + for key, value in san: + if key == "DNS": + if host_ip is None and _dnsname_match(value, hostname): + return + dnsnames.append(value) + elif key == "IP Address": + if host_ip is not None and _ipaddress_match(value, host_ip): + return + dnsnames.append(value) + + # We only check 'commonName' if it's enabled and we're not verifying + # an IP address. IP addresses aren't valid within 'commonName'. + if hostname_checks_common_name and host_ip is None and not dnsnames: + for sub in cert.get("subject", ()): + for key, value in sub: + if key == "commonName": + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + + if len(dnsnames) > 1: + raise CertificateError( + "hostname %r " + "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) + ) + elif len(dnsnames) == 1: + raise CertificateError(f"hostname {hostname!r} doesn't match {dnsnames[0]!r}") + else: + raise CertificateError("no appropriate subjectAltName fields were found") diff --git a/templates/skills/spotify/dependencies/urllib3/util/ssltransport.py b/templates/skills/spotify/dependencies/urllib3/util/ssltransport.py new file mode 100644 index 00000000..fa9f2b37 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/ssltransport.py @@ -0,0 +1,280 @@ +from __future__ import annotations + +import io +import socket +import ssl +import typing + +from ..exceptions import ProxySchemeUnsupported + +if typing.TYPE_CHECKING: + from typing import Literal + + from .ssl_ import _TYPE_PEER_CERT_RET, _TYPE_PEER_CERT_RET_DICT + + +_SelfT = typing.TypeVar("_SelfT", bound="SSLTransport") +_WriteBuffer = typing.Union[bytearray, memoryview] +_ReturnValue = typing.TypeVar("_ReturnValue") + +SSL_BLOCKSIZE = 16384 + + +class SSLTransport: + """ + The SSLTransport wraps an existing socket and establishes an SSL connection. + + Contrary to Python's implementation of SSLSocket, it allows you to chain + multiple TLS connections together. It's particularly useful if you need to + implement TLS within TLS. + + The class supports most of the socket API operations. + """ + + @staticmethod + def _validate_ssl_context_for_tls_in_tls(ssl_context: ssl.SSLContext) -> None: + """ + Raises a ProxySchemeUnsupported if the provided ssl_context can't be used + for TLS in TLS. + + The only requirement is that the ssl_context provides the 'wrap_bio' + methods. + """ + + if not hasattr(ssl_context, "wrap_bio"): + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "available on non-native SSLContext" + ) + + def __init__( + self, + socket: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + suppress_ragged_eofs: bool = True, + ) -> None: + """ + Create an SSLTransport around socket using the provided ssl_context. + """ + self.incoming = ssl.MemoryBIO() + self.outgoing = ssl.MemoryBIO() + + self.suppress_ragged_eofs = suppress_ragged_eofs + self.socket = socket + + self.sslobj = ssl_context.wrap_bio( + self.incoming, self.outgoing, server_hostname=server_hostname + ) + + # Perform initial handshake. + self._ssl_io_loop(self.sslobj.do_handshake) + + def __enter__(self: _SelfT) -> _SelfT: + return self + + def __exit__(self, *_: typing.Any) -> None: + self.close() + + def fileno(self) -> int: + return self.socket.fileno() + + def read(self, len: int = 1024, buffer: typing.Any | None = None) -> int | bytes: + return self._wrap_ssl_read(len, buffer) + + def recv(self, buflen: int = 1024, flags: int = 0) -> int | bytes: + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv") + return self._wrap_ssl_read(buflen) + + def recv_into( + self, + buffer: _WriteBuffer, + nbytes: int | None = None, + flags: int = 0, + ) -> None | int | bytes: + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv_into") + if nbytes is None: + nbytes = len(buffer) + return self.read(nbytes, buffer) + + def sendall(self, data: bytes, flags: int = 0) -> None: + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to sendall") + count = 0 + with memoryview(data) as view, view.cast("B") as byte_view: + amount = len(byte_view) + while count < amount: + v = self.send(byte_view[count:]) + count += v + + def send(self, data: bytes, flags: int = 0) -> int: + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to send") + return self._ssl_io_loop(self.sslobj.write, data) + + def makefile( + self, + mode: str, + buffering: int | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> typing.BinaryIO | typing.TextIO | socket.SocketIO: + """ + Python's httpclient uses makefile and buffered io when reading HTTP + messages and we need to support it. + + This is unfortunately a copy and paste of socket.py makefile with small + changes to point to the socket directly. + """ + if not set(mode) <= {"r", "w", "b"}: + raise ValueError(f"invalid mode {mode!r} (only r, w, b allowed)") + + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = socket.SocketIO(self, rawmode) # type: ignore[arg-type] + self.socket._io_refs += 1 # type: ignore[attr-defined] + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + buffer: typing.BinaryIO + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) # type: ignore[assignment] + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode # type: ignore[misc] + return text + + def unwrap(self) -> None: + self._ssl_io_loop(self.sslobj.unwrap) + + def close(self) -> None: + self.socket.close() + + @typing.overload + def getpeercert( + self, binary_form: Literal[False] = ... + ) -> _TYPE_PEER_CERT_RET_DICT | None: + ... + + @typing.overload + def getpeercert(self, binary_form: Literal[True]) -> bytes | None: + ... + + def getpeercert(self, binary_form: bool = False) -> _TYPE_PEER_CERT_RET: + return self.sslobj.getpeercert(binary_form) # type: ignore[return-value] + + def version(self) -> str | None: + return self.sslobj.version() + + def cipher(self) -> tuple[str, str, int] | None: + return self.sslobj.cipher() + + def selected_alpn_protocol(self) -> str | None: + return self.sslobj.selected_alpn_protocol() + + def selected_npn_protocol(self) -> str | None: + return self.sslobj.selected_npn_protocol() + + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: + return self.sslobj.shared_ciphers() + + def compression(self) -> str | None: + return self.sslobj.compression() + + def settimeout(self, value: float | None) -> None: + self.socket.settimeout(value) + + def gettimeout(self) -> float | None: + return self.socket.gettimeout() + + def _decref_socketios(self) -> None: + self.socket._decref_socketios() # type: ignore[attr-defined] + + def _wrap_ssl_read(self, len: int, buffer: bytearray | None = None) -> int | bytes: + try: + return self._ssl_io_loop(self.sslobj.read, len, buffer) + except ssl.SSLError as e: + if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: + return 0 # eof, return 0. + else: + raise + + # func is sslobj.do_handshake or sslobj.unwrap + @typing.overload + def _ssl_io_loop(self, func: typing.Callable[[], None]) -> None: + ... + + # func is sslobj.write, arg1 is data + @typing.overload + def _ssl_io_loop(self, func: typing.Callable[[bytes], int], arg1: bytes) -> int: + ... + + # func is sslobj.read, arg1 is len, arg2 is buffer + @typing.overload + def _ssl_io_loop( + self, + func: typing.Callable[[int, bytearray | None], bytes], + arg1: int, + arg2: bytearray | None, + ) -> bytes: + ... + + def _ssl_io_loop( + self, + func: typing.Callable[..., _ReturnValue], + arg1: None | bytes | int = None, + arg2: bytearray | None = None, + ) -> _ReturnValue: + """Performs an I/O loop between incoming/outgoing and the socket.""" + should_loop = True + ret = None + + while should_loop: + errno = None + try: + if arg1 is None and arg2 is None: + ret = func() + elif arg2 is None: + ret = func(arg1) + else: + ret = func(arg1, arg2) + except ssl.SSLError as e: + if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): + # WANT_READ, and WANT_WRITE are expected, others are not. + raise e + errno = e.errno + + buf = self.outgoing.read() + self.socket.sendall(buf) + + if errno is None: + should_loop = False + elif errno == ssl.SSL_ERROR_WANT_READ: + buf = self.socket.recv(SSL_BLOCKSIZE) + if buf: + self.incoming.write(buf) + else: + self.incoming.write_eof() + return typing.cast(_ReturnValue, ret) diff --git a/templates/skills/spotify/dependencies/urllib3/util/timeout.py b/templates/skills/spotify/dependencies/urllib3/util/timeout.py new file mode 100644 index 00000000..4bb1be11 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/timeout.py @@ -0,0 +1,275 @@ +from __future__ import annotations + +import time +import typing +from enum import Enum +from socket import getdefaulttimeout + +from ..exceptions import TimeoutStateError + +if typing.TYPE_CHECKING: + from typing import Final + + +class _TYPE_DEFAULT(Enum): + # This value should never be passed to socket.settimeout() so for safety we use a -1. + # socket.settimout() raises a ValueError for negative values. + token = -1 + + +_DEFAULT_TIMEOUT: Final[_TYPE_DEFAULT] = _TYPE_DEFAULT.token + +_TYPE_TIMEOUT = typing.Optional[typing.Union[float, _TYPE_DEFAULT]] + + +class Timeout: + """Timeout configuration. + + Timeouts can be defined as a default for a pool: + + .. code-block:: python + + import urllib3 + + timeout = urllib3.util.Timeout(connect=2.0, read=7.0) + + http = urllib3.PoolManager(timeout=timeout) + + resp = http.request("GET", "https://example.com/") + + print(resp.status) + + Or per-request (which overrides the default for the pool): + + .. code-block:: python + + response = http.request("GET", "https://example.com/", timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``: + + .. code-block:: python + + no_timeout = Timeout(connect=None, read=None) + response = http.request("GET", "https://example.com/", timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: int, float, or None + + :param connect: + The maximum amount of time (in seconds) to wait for a connection + attempt to a server to succeed. Omitting the parameter will default the + connect timeout to the system default, probably `the global default + timeout in socket.py + `_. + None will set an infinite timeout for connection attempts. + + :type connect: int, float, or None + + :param read: + The maximum amount of time (in seconds) to wait between consecutive + read operations for a response from the server. Omitting the parameter + will default the read timeout to the system default, probably `the + global default timeout in socket.py + `_. + None will set an infinite timeout. + + :type read: int, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not trigger, even though the request will take + several minutes to complete. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT + + def __init__( + self, + total: _TYPE_TIMEOUT = None, + connect: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + read: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, + ) -> None: + self._connect = self._validate_timeout(connect, "connect") + self._read = self._validate_timeout(read, "read") + self.total = self._validate_timeout(total, "total") + self._start_connect: float | None = None + + def __repr__(self) -> str: + return f"{type(self).__name__}(connect={self._connect!r}, read={self._read!r}, total={self.total!r})" + + # __str__ provided for backwards compatibility + __str__ = __repr__ + + @staticmethod + def resolve_default_timeout(timeout: _TYPE_TIMEOUT) -> float | None: + return getdefaulttimeout() if timeout is _DEFAULT_TIMEOUT else timeout + + @classmethod + def _validate_timeout(cls, value: _TYPE_TIMEOUT, name: str) -> _TYPE_TIMEOUT: + """Check that a timeout attribute is valid. + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If it is a numeric value less than or equal to + zero, or the type is not an integer, float, or None. + """ + if value is None or value is _DEFAULT_TIMEOUT: + return value + + if isinstance(value, bool): + raise ValueError( + "Timeout cannot be a boolean value. It must " + "be an int, float or None." + ) + try: + float(value) + except (TypeError, ValueError): + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) from None + + try: + if value <= 0: + raise ValueError( + "Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value) + ) + except TypeError: + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) from None + + return value + + @classmethod + def from_float(cls, timeout: _TYPE_TIMEOUT) -> Timeout: + """Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value + passed to this function. + + :param timeout: The legacy timeout value. + :type timeout: integer, float, :attr:`urllib3.util.Timeout.DEFAULT_TIMEOUT`, or None + :return: Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self) -> Timeout: + """Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, total=self.total) + + def start_connect(self) -> float: + """Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = time.monotonic() + return self._start_connect + + def get_connect_duration(self) -> float: + """Gets the time elapsed since the call to :meth:`start_connect`. + + :return: Elapsed time in seconds. + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError( + "Can't get connect duration for timer that has not started." + ) + return time.monotonic() - self._start_connect + + @property + def connect_timeout(self) -> _TYPE_TIMEOUT: + """Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: Connect timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is _DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) # type: ignore[type-var] + + @property + def read_timeout(self) -> float | None: + """Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: Value to use for the read timeout. + :rtype: int, float or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if ( + self.total is not None + and self.total is not _DEFAULT_TIMEOUT + and self._read is not None + and self._read is not _DEFAULT_TIMEOUT + ): + # In case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), self._read)) + elif self.total is not None and self.total is not _DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self.resolve_default_timeout(self._read) diff --git a/templates/skills/spotify/dependencies/urllib3/util/url.py b/templates/skills/spotify/dependencies/urllib3/util/url.py new file mode 100644 index 00000000..d53ea932 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/url.py @@ -0,0 +1,471 @@ +from __future__ import annotations + +import re +import typing + +from ..exceptions import LocationParseError +from .util import to_str + +# We only want to normalize urls with an HTTP(S) scheme. +# urllib3 infers URLs without a scheme (None) to be http. +_NORMALIZABLE_SCHEMES = ("http", "https", None) + +# Almost all of these patterns were derived from the +# 'rfc3986' module: https://github.com/python-hyper/rfc3986 +_PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") +_SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") +_URI_RE = re.compile( + r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" + r"(?://([^\\/?#]*))?" + r"([^?#]*)" + r"(?:\?([^#]*))?" + r"(?:#(.*))?$", + re.UNICODE | re.DOTALL, +) + +_IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +_HEX_PAT = "[0-9A-Fa-f]{1,4}" +_LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=_HEX_PAT, ipv4=_IPV4_PAT) +_subs = {"hex": _HEX_PAT, "ls32": _LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +_UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~" +_IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +_ZONE_ID_PAT = "(?:%25|%)(?:[" + _UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +_IPV6_ADDRZ_PAT = r"\[" + _IPV6_PAT + r"(?:" + _ZONE_ID_PAT + r")?\]" +_REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" +_TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") + +_IPV4_RE = re.compile("^" + _IPV4_PAT + "$") +_IPV6_RE = re.compile("^" + _IPV6_PAT + "$") +_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT + "$") +_BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + _IPV6_ADDRZ_PAT[2:-2] + "$") +_ZONE_ID_RE = re.compile("(" + _ZONE_ID_PAT + r")\]$") + +_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % ( + _REG_NAME_PAT, + _IPV4_PAT, + _IPV6_ADDRZ_PAT, +) +_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL) + +_UNRESERVED_CHARS = set( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" +) +_SUB_DELIM_CHARS = set("!$&'()*+,;=") +_USERINFO_CHARS = _UNRESERVED_CHARS | _SUB_DELIM_CHARS | {":"} +_PATH_CHARS = _USERINFO_CHARS | {"@", "/"} +_QUERY_CHARS = _FRAGMENT_CHARS = _PATH_CHARS | {"?"} + + +class Url( + typing.NamedTuple( + "Url", + [ + ("scheme", typing.Optional[str]), + ("auth", typing.Optional[str]), + ("host", typing.Optional[str]), + ("port", typing.Optional[int]), + ("path", typing.Optional[str]), + ("query", typing.Optional[str]), + ("fragment", typing.Optional[str]), + ], + ) +): + """ + Data structure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. Both the scheme and host are normalized as they are + both case-insensitive according to RFC 3986. + """ + + def __new__( # type: ignore[no-untyped-def] + cls, + scheme: str | None = None, + auth: str | None = None, + host: str | None = None, + port: int | None = None, + path: str | None = None, + query: str | None = None, + fragment: str | None = None, + ): + if path and not path.startswith("/"): + path = "/" + path + if scheme is not None: + scheme = scheme.lower() + return super().__new__(cls, scheme, auth, host, port, path, query, fragment) + + @property + def hostname(self) -> str | None: + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self) -> str: + """Absolute path including the query string.""" + uri = self.path or "/" + + if self.query is not None: + uri += "?" + self.query + + return uri + + @property + def authority(self) -> str | None: + """ + Authority component as defined in RFC 3986 3.2. + This includes userinfo (auth), host and port. + + i.e. + userinfo@host:port + """ + userinfo = self.auth + netloc = self.netloc + if netloc is None or userinfo is None: + return netloc + else: + return f"{userinfo}@{netloc}" + + @property + def netloc(self) -> str | None: + """ + Network location including host and port. + + If you need the equivalent of urllib.parse's ``netloc``, + use the ``authority`` property instead. + """ + if self.host is None: + return None + if self.port: + return f"{self.host}:{self.port}" + return self.host + + @property + def url(self) -> str: + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: + + .. code-block:: python + + import urllib3 + + U = urllib3.util.parse_url("https://google.com/mail/") + + print(U.url) + # "https://google.com/mail/" + + print( urllib3.util.Url("https", "username:password", + "host.com", 80, "/path", "query", "fragment" + ).url + ) + # "https://username:password@host.com:80/path?query#fragment" + """ + scheme, auth, host, port, path, query, fragment = self + url = "" + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + "://" + if auth is not None: + url += auth + "@" + if host is not None: + url += host + if port is not None: + url += ":" + str(port) + if path is not None: + url += path + if query is not None: + url += "?" + query + if fragment is not None: + url += "#" + fragment + + return url + + def __str__(self) -> str: + return self.url + + +@typing.overload +def _encode_invalid_chars( + component: str, allowed_chars: typing.Container[str] +) -> str: # Abstract + ... + + +@typing.overload +def _encode_invalid_chars( + component: None, allowed_chars: typing.Container[str] +) -> None: # Abstract + ... + + +def _encode_invalid_chars( + component: str | None, allowed_chars: typing.Container[str] +) -> str | None: + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. + """ + if component is None: + return component + + component = to_str(component) + + # Normalize existing percent-encoded bytes. + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + component, percent_encodings = _PERCENT_RE.subn( + lambda match: match.group(0).upper(), component + ) + + uri_bytes = component.encode("utf-8", "surrogatepass") + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in allowed_chars + ): + encoded_component += byte + continue + encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) + + return encoded_component.decode() + + +def _remove_path_dot_segments(path: str) -> str: + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = path.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + if segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if path.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if path.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +@typing.overload +def _normalize_host(host: None, scheme: str | None) -> None: + ... + + +@typing.overload +def _normalize_host(host: str, scheme: str | None) -> str: + ... + + +def _normalize_host(host: str | None, scheme: str | None) -> str | None: + if host: + if scheme in _NORMALIZABLE_SCHEMES: + is_ipv6 = _IPV6_ADDRZ_RE.match(host) + if is_ipv6: + # IPv6 hosts of the form 'a::b%zone' are encoded in a URL as + # such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID + # separator as necessary to return a valid RFC 4007 scoped IP. + match = _ZONE_ID_RE.search(host) + if match: + start, end = match.span(1) + zone_id = host[start:end] + + if zone_id.startswith("%25") and zone_id != "%25": + zone_id = zone_id[3:] + else: + zone_id = zone_id[1:] + zone_id = _encode_invalid_chars(zone_id, _UNRESERVED_CHARS) + return f"{host[:start].lower()}%{zone_id}{host[end:]}" + else: + return host.lower() + elif not _IPV4_RE.match(host): + return to_str( + b".".join([_idna_encode(label) for label in host.split(".")]), + "ascii", + ) + return host + + +def _idna_encode(name: str) -> bytes: + if not name.isascii(): + try: + import idna + except ImportError: + raise LocationParseError( + "Unable to parse URL without the 'idna' module" + ) from None + + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + raise LocationParseError( + f"Name '{name}' is not a valid IDNA label" + ) from None + + return name.lower().encode("ascii") + + +def _encode_target(target: str) -> str: + """Percent-encodes a request target so that there are no invalid characters + + Pre-condition for this function is that 'target' must start with '/'. + If that is the case then _TARGET_RE will always produce a match. + """ + match = _TARGET_RE.match(target) + if not match: # Defensive: + raise LocationParseError(f"{target!r} is not a valid request URI") + + path, query = match.groups() + encoded_target = _encode_invalid_chars(path, _PATH_CHARS) + if query is not None: + query = _encode_invalid_chars(query, _QUERY_CHARS) + encoded_target += "?" + query + return encoded_target + + +def parse_url(url: str) -> Url: + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 and RFC 6874 compliant. + + The parser logic and helper functions are based heavily on + work done in the ``rfc3986`` module. + + :param str url: URL to parse into a :class:`.Url` namedtuple. + + Partly backwards-compatible with :mod:`urllib.parse`. + + Example: + + .. code-block:: python + + import urllib3 + + print( urllib3.util.parse_url('http://google.com/mail/')) + # Url(scheme='http', host='google.com', port=None, path='/mail/', ...) + + print( urllib3.util.parse_url('google.com:80')) + # Url(scheme=None, host='google.com', port=80, path=None, ...) + + print( urllib3.util.parse_url('/foo?bar')) + # Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + if not url: + # Empty + return Url() + + source_url = url + if not _SCHEME_RE.search(url): + url = "//" + url + + scheme: str | None + authority: str | None + auth: str | None + host: str | None + port: str | None + port_int: int | None + path: str | None + query: str | None + fragment: str | None + + try: + scheme, authority, path, query, fragment = _URI_RE.match(url).groups() # type: ignore[union-attr] + normalize_uri = scheme is None or scheme.lower() in _NORMALIZABLE_SCHEMES + + if scheme: + scheme = scheme.lower() + + if authority: + auth, _, host_port = authority.rpartition("@") + auth = auth or None + host, port = _HOST_PORT_RE.match(host_port).groups() # type: ignore[union-attr] + if auth and normalize_uri: + auth = _encode_invalid_chars(auth, _USERINFO_CHARS) + if port == "": + port = None + else: + auth, host, port = None, None, None + + if port is not None: + port_int = int(port) + if not (0 <= port_int <= 65535): + raise LocationParseError(url) + else: + port_int = None + + host = _normalize_host(host, scheme) + + if normalize_uri and path: + path = _remove_path_dot_segments(path) + path = _encode_invalid_chars(path, _PATH_CHARS) + if normalize_uri and query: + query = _encode_invalid_chars(query, _QUERY_CHARS) + if normalize_uri and fragment: + fragment = _encode_invalid_chars(fragment, _FRAGMENT_CHARS) + + except (ValueError, AttributeError) as e: + raise LocationParseError(source_url) from e + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. + if not path: + if query is not None or fragment is not None: + path = "" + else: + path = None + + return Url( + scheme=scheme, + auth=auth, + host=host, + port=port_int, + path=path, + query=query, + fragment=fragment, + ) diff --git a/templates/skills/spotify/dependencies/urllib3/util/util.py b/templates/skills/spotify/dependencies/urllib3/util/util.py new file mode 100644 index 00000000..35c77e40 --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/util.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import typing +from types import TracebackType + + +def to_bytes( + x: str | bytes, encoding: str | None = None, errors: str | None = None +) -> bytes: + if isinstance(x, bytes): + return x + elif not isinstance(x, str): + raise TypeError(f"not expecting type {type(x).__name__}") + if encoding or errors: + return x.encode(encoding or "utf-8", errors=errors or "strict") + return x.encode() + + +def to_str( + x: str | bytes, encoding: str | None = None, errors: str | None = None +) -> str: + if isinstance(x, str): + return x + elif not isinstance(x, bytes): + raise TypeError(f"not expecting type {type(x).__name__}") + if encoding or errors: + return x.decode(encoding or "utf-8", errors=errors or "strict") + return x.decode() + + +def reraise( + tp: type[BaseException] | None, + value: BaseException, + tb: TracebackType | None = None, +) -> typing.NoReturn: + try: + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None # type: ignore[assignment] + tb = None diff --git a/templates/skills/spotify/dependencies/urllib3/util/wait.py b/templates/skills/spotify/dependencies/urllib3/util/wait.py new file mode 100644 index 00000000..aeca0c7a --- /dev/null +++ b/templates/skills/spotify/dependencies/urllib3/util/wait.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +import select +import socket +from functools import partial + +__all__ = ["wait_for_read", "wait_for_write"] + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + + +def select_wait_for_socket( + sock: socket.socket, + read: bool = False, + write: bool = False, + timeout: float | None = None, +) -> bool: + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = fn(timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket( + sock: socket.socket, + read: bool = False, + write: bool = False, + timeout: float | None = None, +) -> bool: + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t: float | None) -> list[tuple[int, int]]: + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(do_poll(timeout)) + + +def _have_working_poll() -> bool: + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + poll_obj.poll(0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket( + sock: socket.socket, + read: bool = False, + write: bool = False, + timeout: float | None = None, +) -> bool: + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + return wait_for_socket(sock, read, write, timeout) + + +def wait_for_read(sock: socket.socket, timeout: float | None = None) -> bool: + """Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock: socket.socket, timeout: float | None = None) -> bool: + """Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/templates/skills/spotify/logo.png b/templates/skills/spotify/logo.png new file mode 100644 index 00000000..4ceb1369 Binary files /dev/null and b/templates/skills/spotify/logo.png differ diff --git a/templates/skills/spotify/main.py b/templates/skills/spotify/main.py new file mode 100644 index 00000000..7826fcd8 --- /dev/null +++ b/templates/skills/spotify/main.py @@ -0,0 +1,378 @@ +from os import path +import spotipy +from spotipy.oauth2 import SpotifyOAuth +from api.enums import LogSource, LogType +from api.interface import ( + SettingsConfig, + SkillConfig, + WingmanConfig, + WingmanInitializationError, +) +from services.file import get_writable_dir +from skills.skill_base import Skill + + +class Spotify(Skill): + + def __init__( + self, + config: SkillConfig, + wingman_config: WingmanConfig, + settings: SettingsConfig, + ) -> None: + super().__init__( + config=config, wingman_config=wingman_config, settings=settings + ) + self.data_path = get_writable_dir(path.join("skills", "spotify", "data")) + self.spotify: spotipy.Spotify = None + self.available_devices = [] + + async def validate(self) -> list[WingmanInitializationError]: + errors = await super().validate() + + secret = await self.retrieve_secret("spotify_client_secret", errors) + client_id = self.retrieve_custom_property_value("spotify_client_id", errors) + redirect_url = self.retrieve_custom_property_value( + "spotify_redirect_url", errors + ) + if secret and client_id and redirect_url: + # now that we have everything, initialize the Spotify client + cache_handler = spotipy.cache_handler.CacheFileHandler( + cache_path=f"{self.data_path}/.cache" + ) + self.spotify = spotipy.Spotify( + auth_manager=SpotifyOAuth( + client_id=client_id, + client_secret=secret, + redirect_uri=redirect_url, + scope=[ + "user-library-read", + "user-read-currently-playing", + "user-read-playback-state", + "user-modify-playback-state", + "streaming", + # "playlist-modify-public", + "playlist-read-private", + # "playlist-modify-private", + "user-library-modify", + # "user-read-recently-played", + # "user-top-read" + ], + cache_handler=cache_handler, + ) + ) + return errors + + def get_tools(self) -> list[tuple[str, dict]]: + tools = [ + ( + "control_spotify_device", + { + "type": "function", + "function": { + "name": "control_spotify_device", + "description": "Retrieves or sets the audio device of he user that Spotify songs are played on.", + "parameters": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The playback action to take", + "enum": ["get_devices", "set_active_device"], + }, + "device_name": { + "type": "string", + "description": "The name of the device to set as the active device.", + "enum": [ + device["name"] + for device in self.get_available_devices() + ], + }, + }, + "required": ["action"], + }, + }, + }, + ), + ( + "control_spotify_playback", + { + "type": "function", + "function": { + "name": "control_spotify_playback", + "description": "Control the Spotify audio playback with actions like play, pause/stop or play the previous/next track or set the volume level.", + "parameters": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The playback action to take", + "enum": [ + "play", + "pause", + "stop", + "play_next_track", + "play_previous_track", + "set_volume", + "mute", + "get_current_track", + "like_song", + ], + }, + "volume_level": { + "type": "number", + "description": "The volume level to set (in percent)", + }, + }, + "required": ["action"], + }, + }, + }, + ), + ( + "play_song_with_spotify", + { + "type": "function", + "function": { + "name": "play_song_with_spotify", + "description": "Find a song with Spotify to either play it immediately or queue it.", + "parameters": { + "type": "object", + "properties": { + "track": { + "type": "string", + "description": "The name of the track to play", + }, + "artist": { + "type": "string", + "description": "The artist that created the track", + }, + "queue": { + "type": "boolean", + "description": "If true, the song will be queued and played later. Otherwise it will be played immediately.", + }, + }, + }, + }, + }, + ), + ( + "interact_with_spotify_playlists", + { + "type": "function", + "function": { + "name": "interact_with_spotify_playlists", + "description": "Play a song from a Spotify playlist or add a song to a playlist.", + "parameters": { + "type": "object", + "properties": { + "action": { + "type": "string", + "description": "The action to take", + "enum": ["get_playlists", "play_playlist"], + }, + "playlist": { + "type": "string", + "description": "The name of the playlist to interact with", + "enum": [ + playlist["name"] + for playlist in self.get_user_playlists() + ], + }, + }, + "required": ["action"], + }, + }, + }, + ), + ] + return tools + + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] + ) -> tuple[str, str]: + instant_response = "" # not used here + function_response = "Unable to control Spotify." + + if tool_name not in [ + "control_spotify_device", + "control_spotify_playback", + "play_song_with_spotify", + "interact_with_spotify_playlists", + ]: + return function_response, instant_response + + if self.settings.debug_mode: + self.start_execution_benchmark() + await self.printr.print_async( + f"Spotify: Executing {tool_name} with parameters: {parameters}", + color=LogType.INFO, + ) + + action = parameters.get("action", None) + parameters.pop("action", None) + function = getattr(self, action if action else tool_name) + function_response = function(**parameters) + + if self.settings.debug_mode: + await self.print_execution_time() + + return function_response, instant_response + + # HELPERS + + def get_available_devices(self): + devices = [ + device + for device in self.spotify.devices().get("devices") + if not device["is_restricted"] + ] + return devices + + def get_active_devices(self): + active_devices = [ + device + for device in self.spotify.devices().get("devices") + if device["is_active"] + ] + return active_devices + + def get_user_playlists(self): + playlists = self.spotify.current_user_playlists() + return playlists["items"] + + def get_playlist_uri(self, playlist_name: str): + playlists = self.spotify.current_user_playlists() + playlist = next( + ( + playlist + for playlist in playlists["items"] + if playlist["name"].lower() == playlist_name.lower() + ), + None, + ) + return playlist["uri"] if playlist else None + + # ACTIONS + + def get_devices(self): + active_devices = self.get_active_devices() + active_device_names = ", ".join([device["name"] for device in active_devices]) + available_device_names = ", ".join( + [device["name"] for device in self.get_available_devices()] + ) + if active_devices and len(active_devices) > 0: + return f"Your available devices are: {available_device_names}. Your active devices are: {active_device_names}." + if available_device_names: + return f"No active device found but these are the available devices: {available_device_names}" + + return "No devices found. Start Spotify on one of your devices first, then try again." + + def set_active_device(self, device_name: str): + if device_name: + device = next( + ( + device + for device in self.get_available_devices() + if device["name"] == device_name + ), + None, + ) + if device: + self.spotify.transfer_playback(device["id"]) + return "OK" + else: + return f"Device '{device_name}' not found." + + return "Device name not provided." + + def play(self): + self.spotify.start_playback() + return "OK" + + def pause(self): + self.spotify.pause_playback() + return "OK" + + def stop(self): + return self.pause() + + def play_previous_track(self): + self.spotify.previous_track() + return "OK" + + def play_next_track(self): + self.spotify.next_track() + return "OK" + + def set_volume(self, volume_level: int): + if volume_level: + self.spotify.volume(volume_level) + return "OK" + + return "Volume level not provided." + + def mute(self): + self.spotify.volume(0) + return "OK" + + def get_current_track(self): + current_playback = self.spotify.current_playback() + if current_playback: + artist = current_playback["item"]["artists"][0]["name"] + track = current_playback["item"]["name"] + return f"Currently playing '{track}' by '{artist}'." + + return "No track playing." + + def like_song(self): + current_playback = self.spotify.current_playback() + if current_playback: + track_id = current_playback["item"]["id"] + self.spotify.current_user_saved_tracks_add([track_id]) + return "Track saved to 'Your Music' library." + + return "No track playing. Play a song, then tell me to like it." + + def play_song_with_spotify( + self, track: str = None, artist: str = None, queue: bool = False + ): + if not track and not artist: + return "What song or artist would you like to play?" + results = self.spotify.search(q=f"{track} {artist}", type="track", limit=1) + track = results["tracks"]["items"][0] + if track: + track_name = track["name"] + artist_name = track["artists"][0]["name"] + try: + if queue: + self.spotify.add_to_queue(track["uri"]) + return f"Added '{track_name}' by '{artist_name}' to the queue." + else: + self.spotify.start_playback(uris=[track["uri"]]) + return f"Now playing '{track_name}' by '{artist_name}'." + except spotipy.SpotifyException as e: + if e.reason == "NO_ACTIVE_DEVICE": + return "No active device found. Start Spotify on one of your devices first, then play a song or tell me to activate a device." + return f"An error occurred while trying to play the song. Code: {e.code}, Reason: '{e.reason}'" + + return "No track found." + + def get_playlists(self): + playlists = self.get_user_playlists() + playlist_names = ", ".join([playlist["name"] for playlist in playlists]) + if playlist_names: + return f"Your playlists are: {playlist_names}" + + return "No playlists found." + + def play_playlist(self, playlist: str = None): + if not playlist: + return "Which playlist would you like to play?" + + playlist_uri = self.get_playlist_uri(playlist) + if playlist_uri: + self.spotify.start_playback(context_uri=playlist_uri) + return f"Playing playlist '{playlist}'." + + return f"Playlist '{playlist}' not found." diff --git a/templates/skills/spotify/requirements.txt b/templates/skills/spotify/requirements.txt new file mode 100644 index 00000000..dbc2911c --- /dev/null +++ b/templates/skills/spotify/requirements.txt @@ -0,0 +1 @@ +spotipy==2.23.0 \ No newline at end of file diff --git a/templates/skills/star_head/default_config.yaml b/templates/skills/star_head/default_config.yaml new file mode 100644 index 00000000..fb9ffd01 --- /dev/null +++ b/templates/skills/star_head/default_config.yaml @@ -0,0 +1,43 @@ +name: StarHead +module: skills.star_head.main +description: + en: Use the StarHead API to retrieve detailed information about spaceships, weapons and more. StarHead can also calculate optimal trading routes based on live data. + de: Nutze die StarHead API, um detaillierte Informationen über Raumschiffe, Waffen und mehr abzurufen. StarHead kann auch optimale Handelsrouten anhand von Live-Daten berechnen. +# hint: +# en: +# de: +examples: + - question: + en: I want to trade. What's the best route? + de: Ich möchte handeln. Was ist die beste Route? + answer: + en: To provide you with the best trading route, I need to know your ship model, your current location, and your available budget. Could you please provide these details? + de: Um dir die beste Handelsroute anbieten zu können, muss ich dein Schiffsmodell, deinen aktuellen Standort und dein verfügbares Budget kennen. Kannst du mir diese Angaben bitte mitteilen? + - question: + en: I'm flying a Caterpillar and am near Yela. I have 100.000 credits to spend. + de: Ich fliege eine Caterpillar und bin in der Nähe von Yela. Ich habe 100.000 Credits auszugeben. + answer: + en: You can buy Stims at Deakins Research Outpost near Yela for 2.8 credits/unit and sell them at CRU-L1 Ambitious Dream Station for 3.85 credits/unit. The total profit for this route is approximately 37499 credits, and the travel time estimation is 41 minutes. + de: Du kannst Stims bei Deakins Research Outpost in der Nähe von Yela für 2,8 Credits/Stück kaufen und sie bei CRU-L1 Ambitious Dream Station für 3,85 Credits/Stück verkaufen. Der Gesamtgewinn für diese Route beträgt ca. 37499 Credits, und die geschätzte Reisezeit beträgt 41 Minuten. + - question: + en: What can you tell me about the Caterpillar? + de: Was kannst du mir über die Caterpillar erzählen? + answer: + en: The Constellation Taurus is a dedicated freighter, designed for hauling cargo. It has a cargo capacity of 174 SCU and is fully configurable but without all the bells and whistles found on other Constellation variants. On the other hand, the Constellation Andromeda is a multi-person freighter and the most popular ship in RSI's current production array. It has a cargo capacity of 96 SCU and is beloved by smugglers and merchants alike for its modular and high-powered capabilities. Both are part of the Constellation series, but the Taurus specifically caters to dedicated freight operations whereas the Andromeda serves as a multi-person versatile ship. + de: Die Constellation Taurus ist ein reiner Frachter, der für den Transport von Fracht entwickelt wurde. Er hat eine Ladekapazität von 174 SCU und ist voll konfigurierbar, hat aber nicht den ganzen Schnickschnack der anderen Constellation-Varianten. Die Constellation Andromeda hingegen ist ein Mehrpersonen-Frachter und das beliebteste Schiff in der aktuellen Produktion von RSI. Sie hat eine Ladekapazität von 96 SCU und ist bei Schmugglern und Händlern wegen ihrer modularen und leistungsstarken Fähigkeiten gleichermaßen beliebt. Beide gehören zur Constellation-Serie, aber die Taurus ist speziell für den reinen Frachtverkehr gedacht, während die Andromeda ein vielseitiges Schiff für mehrere Personen ist. +prompt: | + You have access to the StarHead API which you can use to access live trading data and to retrieve additional information about spaceships in Star Citizen. + Your job is to find good trading routes for the user based on his/her ship, current location and available budget. + The user can also ask you about details of specific ships, components, weapons, and more. + You always use the tools available to you to retrieve the required information and to provide the user with the information. +custom_properties: + - hint: The URL of the StarHead API. + id: starhead_api_url + name: StarHead API URL + required: true + value: https://api.star-head.de + - hint: The URL of the Star Citizen Wiki API. + id: star_citizen_wiki_api_url + name: Star Citizen Wiki API URL + required: true + value: https://api.star-citizen.wiki/api/v2 diff --git a/templates/skills/star_head/logo.png b/templates/skills/star_head/logo.png new file mode 100644 index 00000000..80c534cb Binary files /dev/null and b/templates/skills/star_head/logo.png differ diff --git a/templates/skills/star_head/main.py b/templates/skills/star_head/main.py new file mode 100644 index 00000000..8be1c5d1 --- /dev/null +++ b/templates/skills/star_head/main.py @@ -0,0 +1,275 @@ +import json +from typing import Optional +import requests +from api.enums import LogSource, LogType, WingmanInitializationErrorType +from api.interface import ( + SettingsConfig, + SkillConfig, + WingmanConfig, + WingmanInitializationError, +) +from skills.skill_base import Skill + + +class StarHead(Skill): + + def __init__( + self, + config: SkillConfig, + wingman_config: WingmanConfig, + settings: SettingsConfig, + ) -> None: + super().__init__( + config=config, wingman_config=wingman_config, settings=settings + ) + + # config entry existence not validated yet. Assign later when checked! + self.starhead_url = "" + """The base URL of the StarHead API""" + + self.headers = {"x-origin": "wingman-ai"} + """Requireds header for the StarHead API""" + + self.timeout = 5 + """Global timeout for calls to the the StarHead API (in seconds)""" + + self.star_citizen_wiki_url = "" + + self.vehicles = [] + self.ship_names = [] + self.celestial_objects = [] + self.celestial_object_names = [] + self.quantum_drives = [] + + async def validate(self) -> list[WingmanInitializationError]: + errors = await super().validate() + + self.starhead_url = self.retrieve_custom_property_value( + "starhead_api_url", errors + ) + self.star_citizen_wiki_url = self.retrieve_custom_property_value( + "star_citizen_wiki_api_url", errors + ) + + try: + await self._prepare_data() + except Exception as e: + errors.append( + WingmanInitializationError( + wingman_name=self.name, + message=f"Failed to load data from StarHead API: {e}", + error_type=WingmanInitializationErrorType.UNKNOWN, + ) + ) + + return errors + + async def _prepare_data(self): + self.vehicles = await self._fetch_data("vehicle") + self.ship_names = [ + self._format_ship_name(vehicle) + for vehicle in self.vehicles + if vehicle["type"] == "Ship" + ] + + self.celestial_objects = await self._fetch_data("celestialobject") + self.celestial_object_names = [ + celestial_object["name"] for celestial_object in self.celestial_objects + ] + + self.quantum_drives = await self._fetch_data( + "vehiclecomponent", {"typeFilter": 8} + ) + + async def _fetch_data( + self, endpoint: str, params: Optional[dict[str, any]] = None + ) -> list[dict[str, any]]: + url = f"{self.starhead_url}/{endpoint}" + + if self.settings.debug_mode: + self.start_execution_benchmark() + await self.printr.print_async( + f"Retrieving {url}", + color=LogType.INFO, + ) + + response = requests.get( + url, params=params, timeout=self.timeout, headers=self.headers + ) + response.raise_for_status() + if self.settings.debug_mode: + await self.print_execution_time() + + return response.json() + + def _format_ship_name(self, vehicle: dict[str, any]) -> str: + """Formats name by combining model and name, avoiding repetition""" + return vehicle["name"] + + async def execute_tool( + self, tool_name: str, parameters: dict[str, any] + ) -> tuple[str, str]: + instant_response = "" + function_response = "" + + if tool_name == "get_best_trading_route": + function_response = await self._get_best_trading_route(**parameters) + if tool_name == "get_ship_information": + function_response = await self._get_ship_information(**parameters) + return function_response, instant_response + + def get_tools(self) -> list[tuple[str, dict]]: + tools = [ + ( + "get_best_trading_route", + { + "type": "function", + "function": { + "name": "get_best_trading_route", + "description": "Finds the best trade route for a given spaceship and position.", + "parameters": { + "type": "object", + "properties": { + "ship": {"type": "string", "enum": self.ship_names}, + "position": { + "type": "string", + "enum": self.celestial_object_names, + }, + "moneyToSpend": {"type": "number"}, + }, + "required": ["ship", "position", "moneyToSpend"], + }, + }, + }, + ), + ( + "get_ship_information", + { + "type": "function", + "function": { + "name": "get_ship_information", + "description": "Gives information about the given ship.", + "parameters": { + "type": "object", + "properties": { + "ship": {"type": "string", "enum": self.ship_names}, + }, + "required": ["ship"], + }, + }, + }, + ), + ] + + return tools + + async def _get_ship_information(self, ship: str) -> str: + try: + response = requests.get( + url=f"{self.star_citizen_wiki_url}/vehicles/{ship}", + timeout=self.timeout, + headers=self.headers, + ) + response.raise_for_status() + except requests.exceptions.RequestException as e: + return f"Failed to fetch ship information: {e}" + ship_details = json.dumps(response.json()) + return ship_details + + async def _get_best_trading_route( + self, ship: str, position: str, moneyToSpend: float + ) -> str: + """Calculates the best trading route for the specified ship and position. + Note that the function arguments have to match the funtion_args from OpenAI, hence the camelCase! + """ + + cargo, qd = await self._get_ship_details(ship) + if not cargo or not qd: + return f"Could not find ship '{ship}' in the StarHead database." + + celestial_object_id = self._get_celestial_object_id(position) + if not celestial_object_id: + return f"Could not find celestial object '{position}' in the StarHead database." + + data = { + "startCelestialObjectId": celestial_object_id, + "quantumDriveId": qd["id"] if qd else None, + "maxAvailablScu": cargo, + "maxAvailableMoney": moneyToSpend, + "useOnlyWeaponFreeZones": False, + "onlySingleSections": True, + } + url = f"{self.starhead_url}/trading" + try: + response = requests.post( + url=url, + json=data, + timeout=self.timeout, + headers=self.headers, + ) + response.raise_for_status() + except requests.exceptions.RequestException as e: + return f"Failed to fetch trading route: {e}" + + parsed_response = response.json() + if parsed_response: + section = parsed_response[0] + return json.dumps(section) + return f"No route found for ship '{ship}' at '{position}' with '{moneyToSpend}' aUEC." + + def _get_celestial_object_id(self, name: str) -> Optional[int]: + """Finds the ID of the celestial object with the specified name.""" + return next( + ( + obj["id"] + for obj in self.celestial_objects + if obj["name"].lower() == name.lower() + ), + None, + ) + + async def _get_ship_details( + self, ship_name: str + ) -> tuple[Optional[int], Optional[dict[str, any]]]: + """Gets ship details including cargo capacity and quantum drive information.""" + vehicle = next( + ( + v + for v in self.vehicles + if self._format_ship_name(v).lower() == ship_name.lower() + ), + None, + ) + if vehicle: + cargo = vehicle.get("scuCargo") + loadouts = await self._get_ship_loadout(vehicle.get("id")) + if loadouts: + loadout = next( + (l for l in loadouts.get("loadouts") if l["isDefaultLayout"]), None + ) + qd = next( + ( + qd + for qd in self.quantum_drives + for item in loadout.get("data") + if item.get("componentId") == qd.get("id") + ), + None, + ) + return cargo, qd + return None, None + + async def _get_ship_loadout( + self, ship_id: Optional[int] + ) -> Optional[dict[str, any]]: + """Retrieves loadout data for a given ship ID.""" + if ship_id: + try: + loadout = await self._fetch_data(f"vehicle/{ship_id}/loadout") + return loadout or None + except requests.HTTPError: + await self.printr.print_async( + f"Failed to fetch loadout data for ship with ID: {ship_id}", + color=LogType.ERROR, + ) + return None diff --git a/wingmen/open_ai_wingman.py b/wingmen/open_ai_wingman.py index 87b84918..54d79f2c 100644 --- a/wingmen/open_ai_wingman.py +++ b/wingmen/open_ai_wingman.py @@ -19,6 +19,7 @@ from providers.whispercpp import Whispercpp from services.audio_player import AudioPlayer from services.printr import Printr +from skills.skill_base import Skill from wingmen.wingman import Wingman printr = Printr() @@ -53,7 +54,9 @@ def __init__( # validate will set these: self.openai: OpenAi = None self.mistral: OpenAi = None - self.llama: OpenAi = None + self.groq: OpenAi = None + self.openrouter: OpenAi = None + self.local_llm: OpenAi = None self.openai_azure: OpenAiAzure = None self.elevenlabs: ElevenLabs = None self.xvasynth: XVASynth = None @@ -63,16 +66,14 @@ def __init__( self.pending_tool_calls = [] self.last_gpt_call = None - # every conversation starts with the "context" that the user has configured - self.messages = ( - [{"role": "system", "content": self.config.openai.context}] - if self.config.openai.context - else [] - ) + self.messages = [] """The conversation history that is used for the GPT calls""" self.azure_api_keys = {key: None for key in self.AZURE_SERVICES} + self.tool_skills: dict[str, Skill] = {} + self.skill_tools: list[dict] = [] + async def validate(self): errors = await super().validate() @@ -82,8 +83,14 @@ async def validate(self): if self.uses_provider("mistral"): await self.validate_and_set_mistral(errors) - if self.uses_provider("llama"): - await self.validate_and_set_llama(errors) + if self.uses_provider("groq"): + await self.validate_and_set_groq(errors) + + if self.uses_provider("openrouter"): + await self.validate_and_set_openrouter(errors) + + if self.uses_provider("local_llm"): + await self.validate_and_set_local_llm(errors) if self.uses_provider("elevenlabs"): await self.validate_and_set_elevenlabs(errors) @@ -119,11 +126,25 @@ def uses_provider(self, provider_type: str): self.summarize_provider == SummarizeProvider.MISTRAL, ] ) - elif provider_type == "llama": + elif provider_type == "groq": return any( [ - self.conversation_provider == ConversationProvider.LLAMA, - self.summarize_provider == SummarizeProvider.LLAMA, + self.conversation_provider == ConversationProvider.GROQ, + self.summarize_provider == SummarizeProvider.GROQ, + ] + ) + elif provider_type == "openrouter": + return any( + [ + self.conversation_provider == ConversationProvider.OPENROUTER, + self.summarize_provider == SummarizeProvider.OPENROUTER, + ] + ) + elif provider_type == "local_llm": + return any( + [ + self.conversation_provider == ConversationProvider.LOCAL_LLM, + self.summarize_provider == SummarizeProvider.LOCAL_LLM, ] ) elif provider_type == "azure": @@ -155,6 +176,14 @@ def uses_provider(self, provider_type: str): ) return False + async def prepare_skill(self, skill: Skill): + # prepare the skill and skill tools + for tool_name, tool in skill.get_tools(): + self.tool_skills[tool_name] = skill + self.skill_tools.append(tool) + + skill.gpt_call = self.actual_llm_call + async def validate_and_set_openai(self, errors: list[WingmanInitializationError]): api_key = await self.retrieve_secret("openai", errors) if api_key: @@ -174,16 +203,33 @@ async def validate_and_set_mistral(self, errors: list[WingmanInitializationError base_url=self.config.mistral.endpoint, ) - async def validate_and_set_llama(self, errors: list[WingmanInitializationError]): - api_key = await self.retrieve_secret("llama", errors) + async def validate_and_set_groq(self, errors: list[WingmanInitializationError]): + api_key = await self.retrieve_secret("groq", errors) if api_key: # TODO: maybe use their native client (or LangChain) instead of OpenAI(?) - self.llama = OpenAi( + self.groq = OpenAi( api_key=api_key, - organization=self.config.openai.organization, - base_url=self.config.llama.endpoint, + base_url=self.config.groq.endpoint, ) + async def validate_and_set_openrouter( + self, errors: list[WingmanInitializationError] + ): + api_key = await self.retrieve_secret("openrouter", errors) + if api_key: + self.openrouter = OpenAi( + api_key=api_key, + base_url=self.config.openrouter.endpoint, + ) + + async def validate_and_set_local_llm( + self, errors: list[WingmanInitializationError] + ): + self.local_llm = OpenAi( + api_key="not-set", + base_url=self.config.local_llm.endpoint, + ) + async def validate_and_set_elevenlabs( self, errors: list[WingmanInitializationError] ): @@ -282,7 +328,9 @@ async def _transcribe(self, audio_input_wav: str) -> str | None: return transcript.text - async def _get_response_for_transcript(self, transcript: str) -> tuple[str, str]: + async def _get_response_for_transcript( + self, transcript: str + ) -> tuple[str, str, Skill | None]: """Gets the response for a given transcript. This function interprets the transcript, runs instant commands if triggered, @@ -301,14 +349,14 @@ async def _get_response_for_transcript(self, transcript: str) -> tuple[str, str] ) if instant_response: self._add_assistant_message(instant_response) - return instant_response, instant_response + return instant_response, instant_response, None # make a GPT call with the conversation history # if an instant command got executed, prevent tool calls to avoid duplicate executions - completion = await self._gpt_call(instant_command_executed is False) + completion = await self._llm_call(instant_command_executed is False) if completion is None: - return None, None + return None, None, None response_message, tool_calls = await self._process_completion(completion) @@ -316,14 +364,15 @@ async def _get_response_for_transcript(self, transcript: str) -> tuple[str, str] self._add_gpt_response(response_message, tool_calls) if tool_calls: - instant_response = await self._handle_tool_calls(tool_calls) + instant_response, skill = await self._handle_tool_calls(tool_calls) if instant_response: - return None, instant_response + return None, instant_response, None - summarize_response = self._summarize_function_calls() - return self._finalize_response(str(summarize_response)) + summarize_response = await self._summarize_function_calls() + summarize_response = self._finalize_response(str(summarize_response)) + return summarize_response, summarize_response, skill - return response_message.content, response_message.content + return response_message.content, response_message.content, None async def _fix_tool_calls(self, tool_calls): """Fixes tool calls that have a command name as function name. @@ -507,13 +556,8 @@ async def _cleanup_conversation_history(self): if remember_messages is None or len(self.messages) == 0: return 0 # Configuration not set, nothing to delete. - # The system message aka `context` does not count - context_offset = ( - 1 if self.messages and self.messages[0]["role"] == "system" else 0 - ) - # Find the cutoff index where to end deletion, making sure to only count 'user' messages towards the limit starting with newest messages. - cutoff_index = len(self.messages) - 1 + cutoff_index = len(self.messages) user_message_count = 0 for message in reversed(self.messages): if self.__get_message_role(message) == "user": @@ -526,10 +570,10 @@ async def _cleanup_conversation_history(self): if user_message_count < remember_messages: return 0 - total_deleted_messages = cutoff_index - context_offset # Messages to delete. + total_deleted_messages = cutoff_index # Messages to delete. # Remove the pending tool calls that are no longer needed. - for mesage in self.messages[context_offset:cutoff_index]: + for mesage in self.messages[:cutoff_index]: if ( self.__get_message_role(mesage) == "tool" and mesage.get("tool_call_id") in self.pending_tool_calls @@ -542,7 +586,7 @@ async def _cleanup_conversation_history(self): ) # Remove the messages before the cutoff index, exclusive of the system message. - del self.messages[context_offset:cutoff_index] + del self.messages[:cutoff_index] # Optional debugging printout. if self.debug and total_deleted_messages > 0: @@ -554,8 +598,8 @@ async def _cleanup_conversation_history(self): return total_deleted_messages def reset_conversation_history(self): - """Resets the conversation history by removing all messages except for the initial system message.""" - del self.messages[1:] + """Resets the conversation history by removing all messages.""" + self.messages = [] async def _try_instant_activation(self, transcript: str) -> str: """Tries to execute an instant activation command if present in the transcript. @@ -574,71 +618,110 @@ async def _try_instant_activation(self, transcript: str) -> str: return None, True return None, False - async def _gpt_call(self, allow_tool_calls: bool = True): - """Makes the primary GPT call with the conversation history and tools enabled. + async def _add_context(self, messages): + """build the context and inserts it into the messages""" + skill_prompts = "" + for skill in self.skills: + prompt = await skill.get_prompt() + if prompt: + skill_prompts += "\n\n" + skill.name + "\n\n" + prompt - Returns: - The GPT completion object or None if the call fails. - """ - if self.debug: - await printr.print_async( - f"Calling GPT with {(len(self.messages) - 1)} messages (excluding context)", - color=LogType.INFO, - ) + context = self.config.prompts.system_prompt.format( + backstory=self.config.prompts.backstory, skills=skill_prompts + ) + messages.insert(0, {"role": "system", "content": context}) - # save request time for later comparison - thiscall = time.time() - self.last_gpt_call = thiscall + async def actual_llm_call(self, original_messages, tools: list[dict] = None): + """ + Perform the actual GPT call with the messages provided. + """ + messages = original_messages.copy() - # build tools - if allow_tool_calls: - tools = self._build_tools() - else: - tools = None + await self._add_context(messages) if self.conversation_provider == ConversationProvider.AZURE: completion = self.openai_azure.ask( - messages=self.messages, + messages=messages, api_key=self.azure_api_keys["conversation"], config=self.config.azure.conversation, tools=tools, ) elif self.conversation_provider == ConversationProvider.OPENAI: completion = self.openai.ask( - messages=self.messages, + messages=messages, tools=tools, - model=self.config.openai.conversation_model, + model=self.config.openai.conversation_model.value, ) elif self.conversation_provider == ConversationProvider.MISTRAL: completion = self.mistral.ask( - messages=self.messages, + messages=messages, tools=tools, - model=self.config.mistral.conversation_model, + model=self.config.mistral.conversation_model.value, ) - elif self.conversation_provider == ConversationProvider.LLAMA: - completion = self.llama.ask( - messages=self.messages, + elif self.conversation_provider == ConversationProvider.GROQ: + completion = self.groq.ask( + messages=messages, tools=tools, - model=self.config.llama.conversation_model, + model=self.config.groq.conversation_model.value, + ) + elif self.conversation_provider == ConversationProvider.OPENROUTER: + completion = self.openrouter.ask( + messages=messages, + tools=tools, + model=self.config.openrouter.conversation_model, + ) + elif self.conversation_provider == ConversationProvider.LOCAL_LLM: + completion = self.local_llm.ask( + messages=messages, + tools=tools, + model=self.config.local_llm.conversation_model, ) elif self.conversation_provider == ConversationProvider.WINGMAN_PRO: completion = self.wingman_pro.ask( - messages=self.messages, + messages=messages, deployment=self.config.wingman_pro.conversation_deployment, tools=tools, ) + return completion + + async def _llm_call(self, allow_tool_calls: bool = True): + """Makes the primary LLM call with the conversation history and tools enabled. + + Returns: + The LLM completion object or None if the call fails. + """ + + # save request time for later comparison + thiscall = time.time() + self.last_gpt_call = thiscall + + # build tools + tools = self._build_tools() if allow_tool_calls else None + + if self.debug: + self.start_execution_benchmark() + await printr.print_async( + f"Calling LLM with {(len(self.messages))} messages (excluding context) and {len(tools) if tools else 0} tools.", + color=LogType.INFO, + ) + + completion = await self.actual_llm_call(self.messages, tools) + + if self.debug: + self.print_execution_time(reset_timer=True) + # if request isnt most recent, ignore the response if self.last_gpt_call != thiscall: await printr.print_async( - "GPT call was cancelled due to a new call.", color=LogType.WARNING + "LLM call was cancelled due to a new call.", color=LogType.WARNING ) return None return completion async def _process_completion(self, completion): - """Processes the completion returned by the GPT call. + """Processes the completion returned by the LLM call. Args: completion: The completion object from an OpenAI call. @@ -685,6 +768,7 @@ async def _handle_tool_calls(self, tool_calls): ( function_response, instant_response, + skill, ) = await self._execute_command_by_function_call( function_name, function_args ) @@ -696,66 +780,88 @@ async def _handle_tool_calls(self, tool_calls): # adding a new tool response self._add_tool_response(tool_call, function_response) - return instant_response + return instant_response, skill - def _summarize_function_calls(self): + async def _summarize_function_calls(self): """Summarizes the function call responses using the GPT model specified for summarization in the configuration. Returns: The content of the GPT response to the function call summaries. """ + summarize_response = await self._actual_summarize_function_call(self.messages) + + if summarize_response is None: + return None + + # do not tamper with this message as it will lead to 400 errors! + message = summarize_response.choices[0].message + self.messages.append(message) + return message.content + + async def _actual_summarize_function_call(self, original_messages): + """ + Perform the actual GPT call with the messages provided. + """ + messages = original_messages.copy() + + await self._add_context(messages) + if self.summarize_provider == SummarizeProvider.AZURE: summarize_response = self.openai_azure.ask( - messages=self.messages, + messages=messages, api_key=self.azure_api_keys["summarize"], config=self.config.azure.summarize, ) elif self.summarize_provider == SummarizeProvider.OPENAI: summarize_response = self.openai.ask( messages=self.messages, - model=self.config.openai.summarize_model, + model=self.config.openai.summarize_model.value, ) elif self.summarize_provider == SummarizeProvider.MISTRAL: summarize_response = self.mistral.ask( messages=self.messages, - model=self.config.mistral.summarize_model, + model=self.config.mistral.summarize_model.value, ) - elif self.summarize_provider == SummarizeProvider.LLAMA: - summarize_response = self.llama.ask( + elif self.summarize_provider == SummarizeProvider.GROQ: + summarize_response = self.groq.ask( messages=self.messages, - model=self.config.mistral.summarize_model, + model=self.config.groq.summarize_model.value, + ) + elif self.summarize_provider == SummarizeProvider.OPENROUTER: + summarize_response = self.openrouter.ask( + messages=self.messages, + model=self.config.openrouter.summarize_model, + ) + elif self.summarize_provider == SummarizeProvider.LOCAL_LLM: + summarize_response = self.local_llm.ask( + messages=self.messages, + model=self.config.local_llm.summarize_model, ) elif self.summarize_provider == SummarizeProvider.WINGMAN_PRO: summarize_response = self.wingman_pro.ask( - messages=self.messages, + messages=messages, deployment=self.config.wingman_pro.summarize_deployment, ) - if summarize_response is None: - return None + return summarize_response - # do not tamper with this message as it will lead to 400 errors! - message = summarize_response.choices[0].message - self.messages.append(message) - return message.content - - def _finalize_response(self, summarize_response: str) -> tuple[str, str]: + def _finalize_response(self, summarize_response: str) -> str: """Finalizes the response based on the call of the second (summarize) GPT call. Args: summarize_response (str): The response content from the second GPT call. Returns: - A tuple containing the final response to the user. + The final response to the user. """ if summarize_response is None: - return self.messages[-1]["content"], self.messages[-1]["content"] - return summarize_response, summarize_response + return self.messages[-1]["content"] + return summarize_response async def _execute_command_by_function_call( self, function_name: str, function_args: dict[str, any] - ) -> tuple[str, str]: + ) -> tuple[str, str, Skill | None]: """ Uses an OpenAI function call to execute a command. If it's an instant activation_command, one if its responses will be played. @@ -770,8 +876,9 @@ async def _execute_command_by_function_call( """ function_response = "" instant_response = "" + used_skill = None if function_name == "execute_command": - # get the command based on the argument passed by GPT + # get the command based on the argument passed by the LLM command = self._get_command(function_args["command_name"]) # execute the command function_response = await self._execute_command(command) @@ -780,7 +887,17 @@ async def _execute_command_by_function_call( instant_response = self._select_command_response(command) await self._play_to_user(instant_response) - return function_response, instant_response + # Go through the skills and check if the function name matches any of the tools + if function_name in self.tool_skills: + skill = self.tool_skills[function_name] + function_response, instant_response = await skill.execute_tool( + function_name, function_args + ) + used_skill = skill + if instant_response: + await self._play_to_user(instant_response) + + return function_response, instant_response, used_skill async def _play_to_user(self, text: str): """Plays audio to the user using the configured TTS Provider (default: OpenAI TTS). @@ -894,6 +1011,11 @@ def _build_tools(self) -> list[dict]: }, }, ] + + # extend with skill tools + for tool in self.skill_tools: + tools.append(tool) + return tools def __get_message_role(self, message): @@ -923,5 +1045,5 @@ def _remove_emote_text(self, input_string): end = input_string.find("*", start + 1) if end == -1: break - input_string = input_string[:start] + input_string[end + 1:] - return input_string \ No newline at end of file + input_string = input_string[:start] + input_string[end + 1 :] + return input_string diff --git a/wingmen/wingman.py b/wingmen/wingman.py index 5e7a596d..7b445bc2 100644 --- a/wingmen/wingman.py +++ b/wingmen/wingman.py @@ -1,8 +1,6 @@ import random import time from difflib import SequenceMatcher -from importlib import import_module, util -from os import path import keyboard.keyboard as keyboard import mouse.mouse as mouse from api.interface import ( @@ -13,9 +11,11 @@ ) from api.enums import LogSource, LogType, WingmanInitializationErrorType from services.audio_player import AudioPlayer +from services.module_manager import ModuleManager from services.secret_keeper import SecretKeeper from services.printr import Printr -from services.file import get_writable_dir + +from skills.skill_base import Skill printr = Printr() @@ -58,52 +58,15 @@ def __init__( self.execution_start: None | float = None """Used for benchmarking executon times. The timer is (re-)started whenever the process function starts.""" - self.debug: bool = self.config.features.debug_mode - """If enabled, the Wingman will skip executing any keypresses. It will also print more debug messages and benchmark results.""" + self.debug: bool = self.settings.debug_mode + """If enabled, the Wingman will print more debug messages and benchmark results.""" self.tts_provider = self.config.features.tts_provider self.stt_provider = self.config.features.stt_provider self.conversation_provider = self.config.features.conversation_provider self.summarize_provider = self.config.features.summarize_provider - @staticmethod - def create_dynamically( - name: str, - config: WingmanConfig, - settings: SettingsConfig, - audio_player: AudioPlayer, - ): - """Dynamically creates a Wingman instance from a module path and class name - - Args: - name (str): The name of the wingman. This is the key you gave it in the config, e.g. "atc" - config (WingmanConfig): All "general" config entries merged with the specific Wingman config settings. The Wingman takes precedence and overrides the general config. You can just add new keys to the config and they will be available here. - settings (SettingsConfig): The general user settings. - audio_player (AudioPlayer): The audio player handling the playback of audio files. - """ - - try: - # try to load from app dir first - module = import_module(config.custom_class.module) - except ModuleNotFoundError: - # split module into name and path - module_name = config.custom_class.module.split(".")[-1] - module_path = "" - for sub_dir in config.custom_class.module.split(".")[:-1]: - module_path = path.join(module_path, sub_dir) - module_path = path.join(get_writable_dir(module_path), module_name + ".py") - # load from alternative absolute file path - spec = util.spec_from_file_location(module_name, module_path) - module = util.module_from_spec(spec) - spec.loader.exec_module(module) - DerivedWingmanClass = getattr(module, config.custom_class.name) - instance = DerivedWingmanClass( - name=name, - config=config, - settings=settings, - audio_player=audio_player, - ) - return instance + self.skills: list[Skill] = [] def get_record_key(self) -> str | int: """Returns the activation or "push-to-talk" key for this Wingman.""" @@ -113,6 +76,10 @@ def get_record_button(self) -> str: """Returns the activation or "push-to-talk" mouse button for this Wingman.""" return self.config.record_mouse_button + def start_execution_benchmark(self): + """Starts the execution benchmark timer.""" + self.execution_start = time.perf_counter() + async def print_execution_time(self, reset_timer=False): """Prints the current time since the execution started (in seconds).""" if self.execution_start: @@ -124,10 +91,6 @@ async def print_execution_time(self, reset_timer=False): if reset_timer: self.start_execution_benchmark() - def start_execution_benchmark(self): - """Starts the execution benchmark timer.""" - self.execution_start = time.perf_counter() - # ──────────────────────────────────── Hooks ─────────────────────────────────── # async def validate(self) -> list[WingmanInitializationError]: @@ -164,13 +127,58 @@ async def retrieve_secret(self, secret_name, errors): ) return api_key - # TODO: this should be async - def prepare(self): + async def prepare(self): """This method is called only once when the Wingman is instantiated by Tower. - It is run AFTER validate() so you can access validated params safely here. + It is run AFTER validate() and AFTER init_skills() so you can access validated params safely here. You can override it if you need to load async data from an API or file.""" - pass + + async def init_skills(self) -> list[WingmanInitializationError]: + """This method is called only once when the Wingman is instantiated by Tower. + It is run AFTER validate() so you can access validated params safely here. + It is used to load and init the skills of the Wingman.""" + errors = [] + skills_config = self.config.skills + if not skills_config: + return errors + + for skill_config in skills_config: + try: + skill = ModuleManager.load_skill( + config=skill_config, + wingman_config=self.config, + settings=self.settings, + ) + if skill: + validation_errors = await skill.validate() + errors.extend(validation_errors) + + if len(errors) == 0: + self.skills.append(skill) + await self.prepare_skill(skill) + printr.print( + f"Skill '{skill_config.name}' loaded successfully.", + color=LogType.INFO, + server_only=True, + ) + else: + await printr.print_async( + f"Skill '{skill_config.name}' could not be loaded: {' '.join(error.message for error in validation_errors)}", + color=LogType.ERROR, + ) + except Exception as e: + await printr.print_async( + f"Could not load skill '{skill_config.name}': {str(e)}", + color=LogType.ERROR, + ) + + return errors + + async def prepare_skill(self, skill: Skill): + """This method is called only once when the Skill is instantiated. + It is run AFTER validate() so you can access validated params safely here. + + You can override it if you need to react on data of this skill.""" def reset_conversation_history(self): """This function is called when the user triggers the ResetConversationHistory command. @@ -207,7 +215,7 @@ async def process(self, audio_input_wav: str = None, transcript: str = None): # transcribe the audio. transcript = await self._transcribe(audio_input_wav) - if self.debug: + if self.debug and not transcript: await self.print_execution_time(reset_timer=True) if transcript: @@ -224,8 +232,8 @@ async def process(self, audio_input_wav: str = None, transcript: str = None): ) # process the transcript further. This is where you can do your magic. Return a string that is the "answer" to your passed transcript. - process_result, instant_response = await self._get_response_for_transcript( - transcript + process_result, instant_response, skill = ( + await self._get_response_for_transcript(transcript) ) if self.debug: @@ -238,20 +246,13 @@ async def process(self, audio_input_wav: str = None, transcript: str = None): color=LogType.POSITIVE, source=LogSource.WINGMAN, source_name=self.name, + skill_name=skill.name if skill else "", ) - if self.debug: - await printr.print_async( - "Playing response back to user...", color=LogType.INFO - ) - # the last step in the chain. You'll probably want to play the response to the user as audio using a TTS provider or mechanism of your choice. if process_result: await self._play_to_user(str(process_result)) - if self.debug: - await self.print_execution_time() - # ───────────────── virtual methods / hooks ───────────────── # async def _transcribe(self, audio_input_wav: str) -> str | None: @@ -265,7 +266,9 @@ async def _transcribe(self, audio_input_wav: str) -> str | None: """ return None - async def _get_response_for_transcript(self, transcript: str) -> tuple[str, str]: + async def _get_response_for_transcript( + self, transcript: str + ) -> tuple[str, str, Skill | None]: """Processes the transcript and return a response as text. This where you'll do most of your work. Pass the transcript to AI providers and build a conversation. Call commands or APIs. Play temporary results to the user etc. @@ -276,7 +279,7 @@ async def _get_response_for_transcript(self, transcript: str) -> tuple[str, str] Returns: A tuple of strings representing the response to a function call and/or an instant response. """ - return ("", "") + return ("", "", None) async def _play_to_user(self, text: str): """You'll probably want to play the response to the user as audio using a TTS provider or mechanism of your choice. @@ -363,7 +366,7 @@ async def _execute_command(self, command: CommandConfig) -> str: if not command: return "Command not found" - if len(command.actions or []) > 0 and not self.debug: + if len(command.actions or []) > 0: await printr.print_async( f"Executing command: {command.name}", color=LogType.INFO ) @@ -377,12 +380,6 @@ async def _execute_command(self, command: CommandConfig) -> str: f"No actions found for command: {command.name}", color=LogType.WARNING ) - if self.debug: - await printr.print_async( - "Skipping actual keypress execution in debug_mode...", - color=LogType.WARNING, - ) - # handle the global special commands: if command.name == "ResetConversationHistory": self.reset_conversation_history()