From e4be79604e254fbaab453cae465dad78b5e20917 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 2 Oct 2025 17:08:21 +0200 Subject: [PATCH 01/78] SNOW-2306184: config refactory entry point guarded by env --- src/snowflake/cli/api/config_provider.py | 196 ++++++++++++++++++ tests/api/test_config_provider.py | 119 +++++++++++ .../test_function_old_build.ambr | 1 + 3 files changed, 316 insertions(+) create mode 100644 src/snowflake/cli/api/config_provider.py create mode 100644 tests/api/test_config_provider.py diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py new file mode 100644 index 0000000000..28f143cc38 --- /dev/null +++ b/src/snowflake/cli/api/config_provider.py @@ -0,0 +1,196 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import os +from abc import ABC, abstractmethod +from typing import Any, Optional + +ALTERNATIVE_CONFIG_ENV_VAR = "SNOWFLAKE_CLI_CONFIG_V2_ENABLED" + + +class ConfigProvider(ABC): + """ + Abstract base class for configuration providers. + All methods must return data in the same format as current implementation. + """ + + @abstractmethod + def get_section(self, *path) -> dict: + """Get configuration section at specified path.""" + ... + + @abstractmethod + def get_value(self, *path, key: str, default: Optional[Any] = None) -> Any: + """Get single configuration value.""" + ... + + @abstractmethod + def set_value(self, path: list[str], value: Any) -> None: + """Set configuration value at path.""" + ... + + @abstractmethod + def unset_value(self, path: list[str]) -> None: + """Remove configuration value at path.""" + ... + + @abstractmethod + def section_exists(self, *path) -> bool: + """Check if configuration section exists.""" + ... + + @abstractmethod + def read_config(self) -> None: + """Load configuration from source.""" + ... + + @abstractmethod + def get_connection_dict(self, connection_name: str) -> dict: + """Get connection configuration by name.""" + ... + + @abstractmethod + def get_all_connections(self) -> dict: + """Get all connection configurations.""" + ... + + +class LegacyConfigProvider(ConfigProvider): + """ + Current TOML-based configuration provider. + Wraps existing implementation for compatibility. + """ + + def get_section(self, *path) -> dict: + from snowflake.cli.api.config import get_config_section_internal + + return get_config_section_internal(*path) + + def get_value(self, *path, key: str, default: Optional[Any] = None) -> Any: + from snowflake.cli.api.config import Empty, get_config_value_internal + + return get_config_value_internal( + *path, key=key, default=default if default is not None else Empty + ) + + def set_value(self, path: list[str], value: Any) -> None: + from snowflake.cli.api.config import set_config_value_internal + + set_config_value_internal(path, value) + + def unset_value(self, path: list[str]) -> None: + from snowflake.cli.api.config import unset_config_value_internal + + unset_config_value_internal(path) + + def section_exists(self, *path) -> bool: + from snowflake.cli.api.config import config_section_exists_internal + + return config_section_exists_internal(*path) + + def read_config(self) -> None: + from snowflake.cli.api.config import _read_config_file + + _read_config_file() + + def get_connection_dict(self, connection_name: str) -> dict: + from snowflake.cli.api.config import get_connection_dict_internal + + return get_connection_dict_internal(connection_name) + + def get_all_connections(self) -> dict: + from snowflake.cli.api.config import get_all_connections_internal + + return get_all_connections_internal() + + +class AlternativeConfigProvider(ConfigProvider): + """ + New configuration provider implementation. + To be implemented with new logic while maintaining same output format. + """ + + def __init__(self): + pass + + def get_section(self, *path) -> dict: + raise NotImplementedError("Alternative config provider not yet implemented") + + def get_value(self, *path, key: str, default: Optional[Any] = None) -> Any: + raise NotImplementedError("Alternative config provider not yet implemented") + + def set_value(self, path: list[str], value: Any) -> None: + raise NotImplementedError("Alternative config provider not yet implemented") + + def unset_value(self, path: list[str]) -> None: + raise NotImplementedError("Alternative config provider not yet implemented") + + def section_exists(self, *path) -> bool: + raise NotImplementedError("Alternative config provider not yet implemented") + + def read_config(self) -> None: + raise NotImplementedError("Alternative config provider not yet implemented") + + def get_connection_dict(self, connection_name: str) -> dict: + raise NotImplementedError("Alternative config provider not yet implemented") + + def get_all_connections(self) -> dict: + raise NotImplementedError("Alternative config provider not yet implemented") + + +def _is_alternative_config_enabled() -> bool: + """ + Check if alternative configuration handling is enabled via environment variable. + Does not use the built-in feature flags mechanism. + """ + return os.environ.get(ALTERNATIVE_CONFIG_ENV_VAR, "").lower() in ( + "1", + "true", + "yes", + "on", + ) + + +def get_config_provider() -> ConfigProvider: + """ + Factory function to get the appropriate configuration provider + based on environment variable. + """ + if _is_alternative_config_enabled(): + return AlternativeConfigProvider() + return LegacyConfigProvider() + + +_config_provider_instance: Optional[ConfigProvider] = None + + +def get_config_provider_singleton() -> ConfigProvider: + """ + Get or create singleton instance of configuration provider. + """ + global _config_provider_instance + if _config_provider_instance is None: + _config_provider_instance = get_config_provider() + return _config_provider_instance + + +def reset_config_provider(): + """ + Reset the config provider singleton. + Useful for testing and when config source changes. + """ + global _config_provider_instance + _config_provider_instance = None diff --git a/tests/api/test_config_provider.py b/tests/api/test_config_provider.py new file mode 100644 index 0000000000..69f5fb6de7 --- /dev/null +++ b/tests/api/test_config_provider.py @@ -0,0 +1,119 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest +from snowflake.cli.api.config_provider import ( + ALTERNATIVE_CONFIG_ENV_VAR, + AlternativeConfigProvider, + LegacyConfigProvider, + get_config_provider, + reset_config_provider, +) + + +def test_legacy_provider_by_default(): + """Should use legacy provider when env var not set.""" + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + + reset_config_provider() + provider = get_config_provider() + assert isinstance(provider, LegacyConfigProvider) + + +def test_alternative_provider_when_enabled(): + """Should use alternative provider when env var is set.""" + os.environ[ALTERNATIVE_CONFIG_ENV_VAR] = "1" + + reset_config_provider() + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + + +@pytest.mark.parametrize("value", ["true", "True", "TRUE", "yes", "Yes", "on", "1"]) +def test_alternative_provider_various_values(value): + """Should enable alternative provider for various truthy values.""" + os.environ[ALTERNATIVE_CONFIG_ENV_VAR] = value + + reset_config_provider() + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + + +@pytest.mark.parametrize("value", ["0", "false", "False", "no", "off", ""]) +def test_legacy_provider_for_falsy_values(value): + """Should use legacy provider for falsy env var values.""" + os.environ[ALTERNATIVE_CONFIG_ENV_VAR] = value + + reset_config_provider() + provider = get_config_provider() + assert isinstance(provider, LegacyConfigProvider) + + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + + +def test_provider_singleton(): + """Should return same instance on multiple calls.""" + reset_config_provider() + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider1 = get_config_provider_singleton() + provider2 = get_config_provider_singleton() + assert provider1 is provider2 + + +def test_reset_provider(): + """Should create new instance after reset.""" + reset_config_provider() + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider1 = get_config_provider_singleton() + reset_config_provider() + provider2 = get_config_provider_singleton() + assert provider1 is not provider2 + + +def test_alternative_provider_methods_not_implemented(): + """AlternativeConfigProvider methods should raise NotImplementedError.""" + provider = AlternativeConfigProvider() + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.get_section("test") + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.get_value("test", key="key") + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.set_value(["test"], "value") + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.unset_value(["test"]) + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.section_exists("test") + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.read_config() + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.get_connection_dict("test") + + with pytest.raises(NotImplementedError, match="not yet implemented"): + provider.get_all_connections() diff --git a/tests/snowpark/__snapshots__/test_function_old_build.ambr b/tests/snowpark/__snapshots__/test_function_old_build.ambr index edde16c81f..2d46096439 100644 --- a/tests/snowpark/__snapshots__/test_function_old_build.ambr +++ b/tests/snowpark/__snapshots__/test_function_old_build.ambr @@ -26,6 +26,7 @@ | string) | | | +------------------------------------------------------------------------------+ + ''' # --- # name: test_deploy_function_fully_qualified_name_duplicated_database[database error] From 6df5d6cd71d3f7449448b85319a721aff6c5b2b8 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 3 Oct 2025 08:25:16 +0200 Subject: [PATCH 02/78] SNOW-2306184: config refactory entry point guarded by env - fix snapshot --- tests/snowpark/__snapshots__/test_function_old_build.ambr | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/snowpark/__snapshots__/test_function_old_build.ambr b/tests/snowpark/__snapshots__/test_function_old_build.ambr index 2d46096439..edde16c81f 100644 --- a/tests/snowpark/__snapshots__/test_function_old_build.ambr +++ b/tests/snowpark/__snapshots__/test_function_old_build.ambr @@ -26,7 +26,6 @@ | string) | | | +------------------------------------------------------------------------------+ - ''' # --- # name: test_deploy_function_fully_qualified_name_duplicated_database[database error] From dbcebc31e22328e06bef63a66ee5f8b41518dd48 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 3 Oct 2025 14:54:47 +0200 Subject: [PATCH 03/78] SNOW-2306184: config refactory - core abstraction --- src/snowflake/cli/api/config_ng/__init__.py | 39 ++ src/snowflake/cli/api/config_ng/core.py | 220 ++++++++ tests/config_ng/__init__.py | 17 + tests/config_ng/test_config_value.py | 309 ++++++++++++ tests/config_ng/test_resolution_history.py | 529 ++++++++++++++++++++ tests/config_ng/test_source_priority.py | 131 +++++ tests/config_ng/test_value_source.py | 295 +++++++++++ 7 files changed, 1540 insertions(+) create mode 100644 src/snowflake/cli/api/config_ng/__init__.py create mode 100644 src/snowflake/cli/api/config_ng/core.py create mode 100644 tests/config_ng/__init__.py create mode 100644 tests/config_ng/test_config_value.py create mode 100644 tests/config_ng/test_resolution_history.py create mode 100644 tests/config_ng/test_source_priority.py create mode 100644 tests/config_ng/test_value_source.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py new file mode 100644 index 0000000000..5f6fd28f69 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -0,0 +1,39 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Enhanced Configuration System - Next Generation (NG) + +This package implements a layered, extensible configuration system with: +- Clear precedence rules (CLI > Environment > Files) +- Migration support (SnowCLI and SnowSQL compatibility) +- Complete resolution history tracking +- Read-only, immutable configuration sources +""" + +from snowflake.cli.api.config_ng.core import ( + ConfigValue, + ResolutionEntry, + ResolutionHistory, + SourcePriority, + ValueSource, +) + +__all__ = [ + "ConfigValue", + "ResolutionEntry", + "ResolutionHistory", + "SourcePriority", + "ValueSource", +] diff --git a/src/snowflake/cli/api/config_ng/core.py b/src/snowflake/cli/api/config_ng/core.py new file mode 100644 index 0000000000..e22f48e270 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/core.py @@ -0,0 +1,220 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Core abstractions for the enhanced configuration system. + +This module implements the foundational data structures and interfaces: +- SourcePriority: Defines precedence levels +- ConfigValue: Immutable value container with provenance +- ValueSource: Common protocol for all configuration sources +- ResolutionHistory: Tracks the complete resolution process +""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from datetime import datetime +from enum import Enum +from typing import Any, Dict, List, Optional + + +class SourcePriority(Enum): + """ + Defines top-level precedence for configuration sources. + Lower numeric value = higher priority. + """ + + CLI_ARGUMENT = 1 # Highest: command-line arguments + ENVIRONMENT = 2 # Medium: environment variables + FILE = 3 # Lowest: configuration files + + +@dataclass(frozen=True) +class ConfigValue: + """ + Immutable configuration value with full provenance tracking. + Stores both parsed value and original raw value. + """ + + key: str + value: Any + source_name: str + priority: SourcePriority + raw_value: Optional[Any] = None + + def __repr__(self) -> str: + """Readable representation showing conversion if applicable.""" + value_display = f"{self.value}" + if self.raw_value is not None and self.raw_value != self.value: + value_display = f"{self.raw_value} → {self.value}" + return f"ConfigValue({self.key}={value_display}, from {self.source_name})" + + +class ValueSource(ABC): + """ + Common interface for all configuration sources and handlers. + All implementations are READ-ONLY discovery mechanisms. + """ + + @property + @abstractmethod + def source_name(self) -> str: + """ + Unique identifier for this source. + Examples: "cli_arguments", "snowflake_cli_env", "toml:connections" + """ + ... + + @property + @abstractmethod + def priority(self) -> SourcePriority: + """Top-level priority for this source.""" + ... + + @abstractmethod + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Discover configuration values from this source. + + Args: + key: Specific key to discover, or None to discover all values + + Returns: + Dictionary mapping configuration keys to ConfigValue objects. + Returns empty dict if no values found. + """ + ... + + @abstractmethod + def supports_key(self, key: str) -> bool: + """ + Check if this source can provide the given configuration key. + + Args: + key: Configuration key to check + + Returns: + True if this source supports the key, False otherwise + """ + ... + + +@dataclass(frozen=True) +class ResolutionEntry: + """ + Represents a single value discovery during resolution. + Immutable record of what was found where and when. + """ + + config_value: ConfigValue + timestamp: datetime + was_used: bool + overridden_by: Optional[str] = None + + +@dataclass +class ResolutionHistory: + """ + Complete resolution history for a single configuration key. + Shows the full precedence chain from lowest to highest priority. + """ + + key: str + entries: List[ResolutionEntry] = field(default_factory=list) + final_value: Optional[Any] = None + default_used: bool = False + + @property + def sources_consulted(self) -> List[str]: + """List of all source names that were consulted.""" + return [entry.config_value.source_name for entry in self.entries] + + @property + def values_considered(self) -> List[Any]: + """List of all values that were considered.""" + return [entry.config_value.value for entry in self.entries] + + @property + def selected_entry(self) -> Optional[ResolutionEntry]: + """The entry that was ultimately selected.""" + for entry in self.entries: + if entry.was_used: + return entry + return None + + @property + def overridden_entries(self) -> List[ResolutionEntry]: + """All entries that were overridden by higher priority sources.""" + return [entry for entry in self.entries if not entry.was_used] + + def format_chain(self) -> str: + """ + Format the resolution chain as a readable string. + + Example output: + account resolution chain (4 sources): + 1. ❌ snowsql_config: "old_account" (overridden by cli_arguments) + 2. ❌ toml:connections: "new_account" (overridden by cli_arguments) + 3. ❌ snowflake_cli_env: "env_account" (overridden by cli_arguments) + 4. ✅ cli_arguments: "final_account" (SELECTED) + """ + lines = [f"{self.key} resolution chain ({len(self.entries)} sources):"] + + for i, entry in enumerate(self.entries, 1): + cv = entry.config_value + status_icon = "✅" if entry.was_used else "❌" + + if entry.was_used: + status_text = "(SELECTED)" + elif entry.overridden_by: + status_text = f"(overridden by {entry.overridden_by})" + else: + status_text = "(not used)" + + # Show raw value if different from parsed value + value_display = f'"{cv.value}"' + if cv.raw_value is not None and cv.raw_value != cv.value: + value_display = f'"{cv.raw_value}" → {cv.value}' + + lines.append( + f" {i}. {status_icon} {cv.source_name}: {value_display} {status_text}" + ) + + if self.default_used: + lines.append(f" Default value used: {self.final_value}") + + return "\n".join(lines) + + def to_dict(self) -> dict: + """Convert to dictionary for JSON serialization/export.""" + return { + "key": self.key, + "final_value": self.final_value, + "default_used": self.default_used, + "sources_consulted": self.sources_consulted, + "entries": [ + { + "source": entry.config_value.source_name, + "value": entry.config_value.value, + "raw_value": entry.config_value.raw_value, + "priority": entry.config_value.priority.name, + "was_used": entry.was_used, + "overridden_by": entry.overridden_by, + "timestamp": entry.timestamp.isoformat(), + } + for entry in self.entries + ], + } diff --git a/tests/config_ng/__init__.py b/tests/config_ng/__init__.py new file mode 100644 index 0000000000..c64cc0301a --- /dev/null +++ b/tests/config_ng/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Tests for the Enhanced Configuration System (config_ng). +""" diff --git a/tests/config_ng/test_config_value.py b/tests/config_ng/test_config_value.py new file mode 100644 index 0000000000..ef650be4c3 --- /dev/null +++ b/tests/config_ng/test_config_value.py @@ -0,0 +1,309 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for ConfigValue dataclass. + +Tests verify: +- Immutability (frozen dataclass) +- Field values and types +- Raw value preservation +- Type conversions +- Representation formatting +""" + +import pytest +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority + + +class TestConfigValue: + """Test suite for ConfigValue dataclass.""" + + def test_create_basic_config_value(self): + """Should create a basic ConfigValue with required fields.""" + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + assert cv.key == "account" + assert cv.value == "my_account" + assert cv.source_name == "cli_arguments" + assert cv.priority == SourcePriority.CLI_ARGUMENT + assert cv.raw_value is None + + def test_create_config_value_with_raw_value(self): + """Should create ConfigValue with raw value preservation.""" + cv = ConfigValue( + key="port", + value=443, + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + raw_value="443", + ) + + assert cv.key == "port" + assert cv.value == 443 + assert cv.raw_value == "443" + assert isinstance(cv.value, int) + assert isinstance(cv.raw_value, str) + + def test_config_value_is_immutable(self): + """ConfigValue should be immutable (frozen dataclass).""" + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + with pytest.raises(Exception): + cv.key = "new_key" + + with pytest.raises(Exception): + cv.value = "new_value" + + with pytest.raises(Exception): + cv.source_name = "new_source" + + def test_config_value_equality(self): + """ConfigValue instances with same data should be equal.""" + cv1 = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + cv2 = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + assert cv1 == cv2 + + def test_config_value_inequality(self): + """ConfigValue instances with different data should not be equal.""" + cv1 = ConfigValue( + key="account", + value="account1", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + cv2 = ConfigValue( + key="account", + value="account2", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + assert cv1 != cv2 + + def test_repr_without_conversion(self): + """__repr__ should show value only when no conversion occurred.""" + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + repr_str = repr(cv) + assert "account=my_account" in repr_str + assert "cli_arguments" in repr_str + assert "→" not in repr_str + + def test_repr_with_conversion(self): + """__repr__ should show conversion when raw_value differs from value.""" + cv = ConfigValue( + key="port", + value=443, + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + raw_value="443", + ) + + repr_str = repr(cv) + assert "port" in repr_str + assert "443" in repr_str + assert "→" in repr_str + assert "snowflake_cli_env" in repr_str + + def test_repr_with_same_raw_and_parsed_value(self): + """__repr__ should not show conversion when values are the same.""" + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + raw_value="my_account", + ) + + repr_str = repr(cv) + assert "→" not in repr_str + + def test_boolean_conversion_example(self): + """Should handle boolean conversion from string.""" + cv = ConfigValue( + key="enable_diag", + value=True, + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + raw_value="true", + ) + + assert cv.value is True + assert cv.raw_value == "true" + assert isinstance(cv.value, bool) + assert isinstance(cv.raw_value, str) + + def test_integer_conversion_example(self): + """Should handle integer conversion from string.""" + cv = ConfigValue( + key="timeout", + value=30, + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + raw_value="30", + ) + + assert cv.value == 30 + assert cv.raw_value == "30" + assert isinstance(cv.value, int) + assert isinstance(cv.raw_value, str) + + def test_snowsql_key_mapping_example(self): + """Should preserve original SnowSQL key in raw_value.""" + cv = ConfigValue( + key="account", + value="my_account", + source_name="snowsql_config", + priority=SourcePriority.FILE, + raw_value="accountname=my_account", + ) + + assert cv.key == "account" + assert cv.value == "my_account" + assert cv.raw_value == "accountname=my_account" + + def test_none_value(self): + """Should handle None as a value.""" + cv = ConfigValue( + key="optional_field", + value=None, + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + assert cv.value is None + assert cv.key == "optional_field" + + def test_complex_value_types(self): + """Should handle complex value types like lists and dicts.""" + cv_list = ConfigValue( + key="tags", + value=["tag1", "tag2"], + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + + cv_dict = ConfigValue( + key="metadata", + value={"key1": "value1", "key2": "value2"}, + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + + assert cv_list.value == ["tag1", "tag2"] + assert cv_dict.value == {"key1": "value1", "key2": "value2"} + + def test_all_priority_levels(self): + """Should work with all priority levels.""" + cv_cli = ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + cv_env = ConfigValue( + key="account", + value="env_account", + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + ) + + cv_file = ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + + assert cv_cli.priority == SourcePriority.CLI_ARGUMENT + assert cv_env.priority == SourcePriority.ENVIRONMENT + assert cv_file.priority == SourcePriority.FILE + + def test_priority_comparison(self): + """Should be able to compare priorities.""" + cv_high = ConfigValue( + key="account", + value="high", + source_name="cli", + priority=SourcePriority.CLI_ARGUMENT, + ) + + cv_low = ConfigValue( + key="account", + value="low", + source_name="file", + priority=SourcePriority.FILE, + ) + + assert cv_high.priority.value < cv_low.priority.value + + def test_config_value_hash(self): + """ConfigValue should be hashable (frozen dataclass).""" + cv1 = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + cv2 = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + assert hash(cv1) == hash(cv2) + + config_set = {cv1, cv2} + assert len(config_set) == 1 + + def test_config_value_can_be_dict_key(self): + """ConfigValue should be usable as dictionary key.""" + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + test_dict = {cv: "some_data"} + assert test_dict[cv] == "some_data" diff --git a/tests/config_ng/test_resolution_history.py b/tests/config_ng/test_resolution_history.py new file mode 100644 index 0000000000..74a4208d24 --- /dev/null +++ b/tests/config_ng/test_resolution_history.py @@ -0,0 +1,529 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for Resolution History tracking. + +Tests verify: +- ResolutionEntry immutability and fields +- ResolutionHistory creation and properties +- Resolution chain formatting +- History export to dictionary +- Timestamp tracking +""" + +from datetime import datetime + +import pytest +from snowflake.cli.api.config_ng.core import ( + ConfigValue, + ResolutionEntry, + ResolutionHistory, + SourcePriority, +) + + +class TestResolutionEntry: + """Test suite for ResolutionEntry dataclass.""" + + def test_create_resolution_entry(self): + """Should create a ResolutionEntry with all fields.""" + config_value = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + timestamp = datetime.now() + entry = ResolutionEntry( + config_value=config_value, + timestamp=timestamp, + was_used=True, + ) + + assert entry.config_value == config_value + assert entry.timestamp == timestamp + assert entry.was_used is True + assert entry.overridden_by is None + + def test_create_entry_with_override(self): + """Should create entry with overridden_by information.""" + config_value = ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + + entry = ResolutionEntry( + config_value=config_value, + timestamp=datetime.now(), + was_used=False, + overridden_by="cli_arguments", + ) + + assert entry.was_used is False + assert entry.overridden_by == "cli_arguments" + + def test_resolution_entry_is_immutable(self): + """ResolutionEntry should be immutable (frozen dataclass).""" + config_value = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + entry = ResolutionEntry( + config_value=config_value, + timestamp=datetime.now(), + was_used=True, + ) + + with pytest.raises(Exception): + entry.was_used = False + + with pytest.raises(Exception): + entry.overridden_by = "someone" + + def test_resolution_entry_equality(self): + """ResolutionEntry instances with same data should be equal.""" + config_value = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + timestamp = datetime.now() + + entry1 = ResolutionEntry( + config_value=config_value, + timestamp=timestamp, + was_used=True, + ) + + entry2 = ResolutionEntry( + config_value=config_value, + timestamp=timestamp, + was_used=True, + ) + + assert entry1 == entry2 + + +class TestResolutionHistory: + """Test suite for ResolutionHistory dataclass.""" + + def test_create_empty_resolution_history(self): + """Should create an empty ResolutionHistory.""" + history = ResolutionHistory(key="account") + + assert history.key == "account" + assert len(history.entries) == 0 + assert history.final_value is None + assert history.default_used is False + + def test_create_resolution_history_with_entries(self): + """Should create ResolutionHistory with entries.""" + config_value = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + entry = ResolutionEntry( + config_value=config_value, + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory( + key="account", + entries=[entry], + final_value="my_account", + ) + + assert len(history.entries) == 1 + assert history.final_value == "my_account" + + def test_sources_consulted_property(self): + """Should return list of all source names consulted.""" + entries = [ + ResolutionEntry( + config_value=ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ), + timestamp=datetime.now(), + was_used=False, + overridden_by="cli_arguments", + ), + ResolutionEntry( + config_value=ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ), + ] + + history = ResolutionHistory(key="account", entries=entries) + + sources = history.sources_consulted + assert len(sources) == 2 + assert "toml:connections" in sources + assert "cli_arguments" in sources + + def test_values_considered_property(self): + """Should return list of all values considered.""" + entries = [ + ResolutionEntry( + config_value=ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ), + timestamp=datetime.now(), + was_used=False, + ), + ResolutionEntry( + config_value=ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ), + ] + + history = ResolutionHistory(key="account", entries=entries) + + values = history.values_considered + assert len(values) == 2 + assert "file_account" in values + assert "cli_account" in values + + def test_selected_entry_property(self): + """Should return the entry that was selected.""" + entry1 = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ), + timestamp=datetime.now(), + was_used=False, + overridden_by="cli_arguments", + ) + + entry2 = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory(key="account", entries=[entry1, entry2]) + + selected = history.selected_entry + assert selected == entry2 + assert selected.config_value.value == "cli_account" + + def test_selected_entry_returns_none_when_no_selection(self): + """Should return None when no entry was selected.""" + entry = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ), + timestamp=datetime.now(), + was_used=False, + ) + + history = ResolutionHistory(key="account", entries=[entry]) + + assert history.selected_entry is None + + def test_overridden_entries_property(self): + """Should return all entries that were overridden.""" + entry1 = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ), + timestamp=datetime.now(), + was_used=False, + overridden_by="cli_arguments", + ) + + entry2 = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="env_account", + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + ), + timestamp=datetime.now(), + was_used=False, + overridden_by="cli_arguments", + ) + + entry3 = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory(key="account", entries=[entry1, entry2, entry3]) + + overridden = history.overridden_entries + assert len(overridden) == 2 + assert entry1 in overridden + assert entry2 in overridden + assert entry3 not in overridden + + def test_format_chain_simple(self): + """Should format a simple resolution chain.""" + entry = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory( + key="account", + entries=[entry], + final_value="my_account", + ) + + chain = history.format_chain() + + assert "account resolution chain (1 sources)" in chain + assert "cli_arguments" in chain + assert "my_account" in chain + assert "(SELECTED)" in chain + assert "✅" in chain + + def test_format_chain_with_override(self): + """Should format resolution chain showing override.""" + entry1 = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ), + timestamp=datetime.now(), + was_used=False, + overridden_by="cli_arguments", + ) + + entry2 = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory( + key="account", + entries=[entry1, entry2], + final_value="cli_account", + ) + + chain = history.format_chain() + + assert "account resolution chain (2 sources)" in chain + assert "toml:connections" in chain + assert "cli_arguments" in chain + assert "overridden by cli_arguments" in chain + assert "(SELECTED)" in chain + assert "❌" in chain + assert "✅" in chain + + def test_format_chain_with_conversion(self): + """Should show conversion in formatted chain.""" + entry = ResolutionEntry( + config_value=ConfigValue( + key="port", + value=443, + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + raw_value="443", + ), + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory( + key="port", + entries=[entry], + final_value=443, + ) + + chain = history.format_chain() + + assert "port resolution chain" in chain + assert "→" in chain + assert "443" in chain + + def test_format_chain_with_default(self): + """Should show default value in formatted chain.""" + history = ResolutionHistory( + key="account", + entries=[], + final_value="default_account", + default_used=True, + ) + + chain = history.format_chain() + + assert "account resolution chain (0 sources)" in chain + assert "Default value used: default_account" in chain + + def test_to_dict_conversion(self): + """Should convert history to dictionary for JSON export.""" + entry = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory( + key="account", + entries=[entry], + final_value="my_account", + ) + + data = history.to_dict() + + assert data["key"] == "account" + assert data["final_value"] == "my_account" + assert data["default_used"] is False + assert "cli_arguments" in data["sources_consulted"] + assert len(data["entries"]) == 1 + + entry_data = data["entries"][0] + assert entry_data["source"] == "cli_arguments" + assert entry_data["value"] == "my_account" + assert entry_data["priority"] == "CLI_ARGUMENT" + assert entry_data["was_used"] is True + + def test_to_dict_with_multiple_entries(self): + """Should convert complex history to dictionary.""" + entries = [ + ResolutionEntry( + config_value=ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + raw_value="file_account", + ), + timestamp=datetime.now(), + was_used=False, + overridden_by="cli_arguments", + ), + ResolutionEntry( + config_value=ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ), + ] + + history = ResolutionHistory( + key="account", + entries=entries, + final_value="cli_account", + ) + + data = history.to_dict() + + assert len(data["entries"]) == 2 + assert data["entries"][0]["overridden_by"] == "cli_arguments" + assert data["entries"][1]["was_used"] is True + + def test_resolution_history_is_mutable(self): + """ResolutionHistory should be mutable (not frozen).""" + history = ResolutionHistory(key="account") + + entry = ResolutionEntry( + config_value=ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ), + timestamp=datetime.now(), + was_used=True, + ) + + history.entries.append(entry) + history.final_value = "my_account" + + assert len(history.entries) == 1 + assert history.final_value == "my_account" + + def test_empty_history_properties(self): + """Empty history should return empty lists for properties.""" + history = ResolutionHistory(key="account") + + assert history.sources_consulted == [] + assert history.values_considered == [] + assert history.selected_entry is None + assert history.overridden_entries == [] diff --git a/tests/config_ng/test_source_priority.py b/tests/config_ng/test_source_priority.py new file mode 100644 index 0000000000..37dcbb336f --- /dev/null +++ b/tests/config_ng/test_source_priority.py @@ -0,0 +1,131 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for SourcePriority enum. + +Tests verify: +- Enum values are correctly defined +- Priority ordering is correct (lower value = higher priority) +- Enum members have expected attributes +""" + +import pytest +from snowflake.cli.api.config_ng.core import SourcePriority + + +class TestSourcePriority: + """Test suite for SourcePriority enum.""" + + def test_enum_members_exist(self): + """All required enum members should exist.""" + assert hasattr(SourcePriority, "CLI_ARGUMENT") + assert hasattr(SourcePriority, "ENVIRONMENT") + assert hasattr(SourcePriority, "FILE") + + def test_enum_values_are_integers(self): + """All enum values should be integers.""" + assert isinstance(SourcePriority.CLI_ARGUMENT.value, int) + assert isinstance(SourcePriority.ENVIRONMENT.value, int) + assert isinstance(SourcePriority.FILE.value, int) + + def test_cli_argument_has_highest_priority(self): + """CLI_ARGUMENT should have the lowest numeric value (highest priority).""" + assert SourcePriority.CLI_ARGUMENT.value == 1 + + def test_environment_has_medium_priority(self): + """ENVIRONMENT should have medium numeric value (medium priority).""" + assert SourcePriority.ENVIRONMENT.value == 2 + + def test_file_has_lowest_priority(self): + """FILE should have the highest numeric value (lowest priority).""" + assert SourcePriority.FILE.value == 3 + + def test_priority_ordering(self): + """Lower numeric value should mean higher priority.""" + assert SourcePriority.CLI_ARGUMENT.value < SourcePriority.ENVIRONMENT.value + assert SourcePriority.ENVIRONMENT.value < SourcePriority.FILE.value + + def test_enum_comparison(self): + """Enum members should be comparable by value.""" + priorities = [ + SourcePriority.FILE, + SourcePriority.CLI_ARGUMENT, + SourcePriority.ENVIRONMENT, + ] + sorted_priorities = sorted(priorities, key=lambda p: p.value) + + assert sorted_priorities[0] == SourcePriority.CLI_ARGUMENT + assert sorted_priorities[1] == SourcePriority.ENVIRONMENT + assert sorted_priorities[2] == SourcePriority.FILE + + def test_enum_equality(self): + """Enum members should be equal to themselves.""" + assert SourcePriority.CLI_ARGUMENT == SourcePriority.CLI_ARGUMENT + assert SourcePriority.ENVIRONMENT == SourcePriority.ENVIRONMENT + assert SourcePriority.FILE == SourcePriority.FILE + + def test_enum_inequality(self): + """Different enum members should not be equal.""" + assert SourcePriority.CLI_ARGUMENT != SourcePriority.ENVIRONMENT + assert SourcePriority.ENVIRONMENT != SourcePriority.FILE + assert SourcePriority.CLI_ARGUMENT != SourcePriority.FILE + + def test_enum_has_name_attribute(self): + """Enum members should have a name attribute.""" + assert SourcePriority.CLI_ARGUMENT.name == "CLI_ARGUMENT" + assert SourcePriority.ENVIRONMENT.name == "ENVIRONMENT" + assert SourcePriority.FILE.name == "FILE" + + def test_enum_is_iterable(self): + """Should be able to iterate over enum members.""" + members = list(SourcePriority) + assert len(members) == 3 + assert SourcePriority.CLI_ARGUMENT in members + assert SourcePriority.ENVIRONMENT in members + assert SourcePriority.FILE in members + + def test_enum_can_be_accessed_by_name(self): + """Should be able to access enum members by name.""" + assert SourcePriority["CLI_ARGUMENT"] == SourcePriority.CLI_ARGUMENT + assert SourcePriority["ENVIRONMENT"] == SourcePriority.ENVIRONMENT + assert SourcePriority["FILE"] == SourcePriority.FILE + + def test_enum_can_be_accessed_by_value(self): + """Should be able to access enum members by value.""" + assert SourcePriority(1) == SourcePriority.CLI_ARGUMENT + assert SourcePriority(2) == SourcePriority.ENVIRONMENT + assert SourcePriority(3) == SourcePriority.FILE + + def test_invalid_value_raises_error(self): + """Accessing enum with invalid value should raise ValueError.""" + with pytest.raises(ValueError): + SourcePriority(99) + + def test_invalid_name_raises_error(self): + """Accessing enum with invalid name should raise KeyError.""" + with pytest.raises(KeyError): + SourcePriority["INVALID"] + + def test_enum_repr(self): + """Enum members should have a readable representation.""" + assert "SourcePriority.CLI_ARGUMENT" in repr(SourcePriority.CLI_ARGUMENT) + assert "SourcePriority.ENVIRONMENT" in repr(SourcePriority.ENVIRONMENT) + assert "SourcePriority.FILE" in repr(SourcePriority.FILE) + + def test_enum_str(self): + """Enum members should have a readable string representation.""" + assert "SourcePriority.CLI_ARGUMENT" in str(SourcePriority.CLI_ARGUMENT) + assert "SourcePriority.ENVIRONMENT" in str(SourcePriority.ENVIRONMENT) + assert "SourcePriority.FILE" in str(SourcePriority.FILE) diff --git a/tests/config_ng/test_value_source.py b/tests/config_ng/test_value_source.py new file mode 100644 index 0000000000..0d65666b0e --- /dev/null +++ b/tests/config_ng/test_value_source.py @@ -0,0 +1,295 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for ValueSource interface. + +Tests verify: +- Abstract interface cannot be instantiated +- All abstract methods must be implemented +- Concrete implementations work correctly +- Common protocol is enforced +""" + +import pytest +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority, ValueSource + + +class TestValueSourceInterface: + """Test suite for ValueSource abstract interface.""" + + def test_cannot_instantiate_abstract_class(self): + """Should not be able to instantiate ValueSource directly.""" + with pytest.raises(TypeError): + ValueSource() + + def test_must_implement_source_name(self): + """Concrete implementations must implement source_name property.""" + + class IncompleteSource(ValueSource): + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover(self, key=None): + return {} + + def supports_key(self, key: str) -> bool: + return True + + with pytest.raises(TypeError): + IncompleteSource() + + def test_must_implement_priority(self): + """Concrete implementations must implement priority property.""" + + class IncompleteSource(ValueSource): + @property + def source_name(self) -> str: + return "test" + + def discover(self, key=None): + return {} + + def supports_key(self, key: str) -> bool: + return True + + with pytest.raises(TypeError): + IncompleteSource() + + def test_must_implement_discover(self): + """Concrete implementations must implement discover method.""" + + class IncompleteSource(ValueSource): + @property + def source_name(self) -> str: + return "test" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def supports_key(self, key: str) -> bool: + return True + + with pytest.raises(TypeError): + IncompleteSource() + + def test_must_implement_supports_key(self): + """Concrete implementations must implement supports_key method.""" + + class IncompleteSource(ValueSource): + @property + def source_name(self) -> str: + return "test" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover(self, key=None): + return {} + + with pytest.raises(TypeError): + IncompleteSource() + + def test_complete_implementation(self): + """Should be able to instantiate with all methods implemented.""" + + class CompleteSource(ValueSource): + @property + def source_name(self) -> str: + return "test_source" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover(self, key=None): + return {} + + def supports_key(self, key: str) -> bool: + return True + + source = CompleteSource() + assert source.source_name == "test_source" + assert source.priority == SourcePriority.FILE + + +class TestValueSourceConcreteImplementation: + """Test a concrete implementation of ValueSource.""" + + class MockSource(ValueSource): + """Mock source for testing.""" + + def __init__(self, data: dict): + self._data = data + + @property + def source_name(self) -> str: + return "mock_source" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover(self, key=None): + if key is None: + return { + k: ConfigValue( + key=k, + value=v, + source_name=self.source_name, + priority=self.priority, + ) + for k, v in self._data.items() + } + elif key in self._data: + return { + key: ConfigValue( + key=key, + value=self._data[key], + source_name=self.source_name, + priority=self.priority, + ) + } + else: + return {} + + def supports_key(self, key: str) -> bool: + return key in self._data + + def test_discover_all_values(self): + """Should discover all values when key is None.""" + source = self.MockSource({"account": "test_account", "user": "test_user"}) + + values = source.discover() + + assert len(values) == 2 + assert "account" in values + assert "user" in values + assert values["account"].value == "test_account" + assert values["user"].value == "test_user" + + def test_discover_specific_key(self): + """Should discover specific key when provided.""" + source = self.MockSource({"account": "test_account", "user": "test_user"}) + + values = source.discover(key="account") + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "test_account" + + def test_discover_nonexistent_key(self): + """Should return empty dict for nonexistent key.""" + source = self.MockSource({"account": "test_account"}) + + values = source.discover(key="nonexistent") + + assert len(values) == 0 + + def test_supports_existing_key(self): + """Should return True for existing key.""" + source = self.MockSource({"account": "test_account"}) + + assert source.supports_key("account") is True + + def test_supports_nonexistent_key(self): + """Should return False for nonexistent key.""" + source = self.MockSource({"account": "test_account"}) + + assert source.supports_key("nonexistent") is False + + def test_source_name_is_accessible(self): + """Should be able to access source_name property.""" + source = self.MockSource({}) + + assert source.source_name == "mock_source" + + def test_priority_is_accessible(self): + """Should be able to access priority property.""" + source = self.MockSource({}) + + assert source.priority == SourcePriority.FILE + + def test_discovered_values_have_correct_metadata(self): + """Discovered values should have correct metadata.""" + source = self.MockSource({"account": "test_account"}) + + values = source.discover(key="account") + config_value = values["account"] + + assert config_value.source_name == "mock_source" + assert config_value.priority == SourcePriority.FILE + assert config_value.key == "account" + assert config_value.value == "test_account" + + def test_discover_returns_dict_of_config_values(self): + """discover() should return Dict[str, ConfigValue].""" + source = self.MockSource({"account": "test_account"}) + + values = source.discover() + + assert isinstance(values, dict) + for key, value in values.items(): + assert isinstance(key, str) + assert isinstance(value, ConfigValue) + + def test_empty_source_discover(self): + """Should handle empty source gracefully.""" + source = self.MockSource({}) + + values = source.discover() + + assert len(values) == 0 + assert isinstance(values, dict) + + def test_multiple_sources_with_different_priorities(self): + """Should be able to create sources with different priorities.""" + + class HighPrioritySource(ValueSource): + @property + def source_name(self) -> str: + return "high_priority" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.CLI_ARGUMENT + + def discover(self, key=None): + return {} + + def supports_key(self, key: str) -> bool: + return False + + class LowPrioritySource(ValueSource): + @property + def source_name(self) -> str: + return "low_priority" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover(self, key=None): + return {} + + def supports_key(self, key: str) -> bool: + return False + + high = HighPrioritySource() + low = LowPrioritySource() + + assert high.priority.value < low.priority.value From 248b286eec6b01c37242acce322acfd0d7a4689a Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 3 Oct 2025 15:32:09 +0200 Subject: [PATCH 04/78] SNOW-2306184: config refactory - top tier configuration sources --- src/snowflake/cli/api/config_ng/__init__.py | 10 + src/snowflake/cli/api/config_ng/handlers.py | 84 ++++ src/snowflake/cli/api/config_ng/sources.py | 332 +++++++++++++ tests/config_ng/test_cli_argument_source.py | 207 +++++++++ tests/config_ng/test_configuration_source.py | 363 +++++++++++++++ tests/config_ng/test_environment_source.py | 300 ++++++++++++ tests/config_ng/test_file_source.py | 463 +++++++++++++++++++ 7 files changed, 1759 insertions(+) create mode 100644 src/snowflake/cli/api/config_ng/handlers.py create mode 100644 src/snowflake/cli/api/config_ng/sources.py create mode 100644 tests/config_ng/test_cli_argument_source.py create mode 100644 tests/config_ng/test_configuration_source.py create mode 100644 tests/config_ng/test_environment_source.py create mode 100644 tests/config_ng/test_file_source.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index 5f6fd28f69..e31b54da80 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -29,9 +29,19 @@ SourcePriority, ValueSource, ) +from snowflake.cli.api.config_ng.sources import ( + CliArgumentSource, + ConfigurationSource, + EnvironmentSource, + FileSource, +) __all__ = [ + "CliArgumentSource", + "ConfigurationSource", "ConfigValue", + "EnvironmentSource", + "FileSource", "ResolutionEntry", "ResolutionHistory", "SourcePriority", diff --git a/src/snowflake/cli/api/config_ng/handlers.py b/src/snowflake/cli/api/config_ng/handlers.py new file mode 100644 index 0000000000..bb00398727 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/handlers.py @@ -0,0 +1,84 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Configuration handlers for specific formats and schemas. + +This module will implement specific handlers for: +- Environment variables (SNOWFLAKE_*, SNOWSQL_*) +- File formats (TOML, SnowSQL config, JSON, YAML) + +To be implemented in Phase 3-4. +""" + +from __future__ import annotations + +from abc import abstractmethod +from pathlib import Path +from typing import Dict, Optional + +from snowflake.cli.api.config_ng.core import ConfigValue, ValueSource + + +class SourceHandler(ValueSource): + """ + Specific handler for a configuration format or schema. + Examples: TOML files, SnowSQL config, SNOWFLAKE_* env vars, etc. + """ + + @property + @abstractmethod + def handler_type(self) -> str: + """ + Type identifier for this handler. + Examples: 'toml', 'json', 'snowsql_env', 'snowsql_config' + """ + ... + + @abstractmethod + def can_handle(self) -> bool: + """ + Check if this handler is applicable/available. + + Returns: + True if handler can be used, False otherwise + """ + ... + + def can_handle_file(self, file_path: Path) -> bool: + """ + Check if this handler can process the given file. + + Args: + file_path: Path to file to check + + Returns: + True if handler can process this file, False otherwise + """ + return False + + def discover_from_file( + self, file_path: Path, key: Optional[str] = None + ) -> Dict[str, ConfigValue]: + """ + Discover values from a file. + + Args: + file_path: Path to file to read + key: Specific key to discover, or None for all + + Returns: + Dictionary of discovered values + """ + return {} diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py new file mode 100644 index 0000000000..cd7f6a9a14 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -0,0 +1,332 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Top-level configuration sources. + +This module implements the top-level configuration sources that orchestrate +handlers and provide configuration values according to precedence rules. +""" + +from __future__ import annotations + +import logging +from abc import abstractmethod +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority, ValueSource + +if TYPE_CHECKING: + from snowflake.cli.api.config_ng.handlers import SourceHandler + +log = logging.getLogger(__name__) + + +class ConfigurationSource(ValueSource): + """ + Base class for top-level sources that may delegate to handlers. + Handlers are tried IN ORDER - first handler with value wins. + """ + + def __init__(self, handlers: Optional[List["SourceHandler"]] = None): + """ + Initialize with ordered list of sub-handlers. + + Args: + handlers: List of handlers in priority order (first = highest) + """ + self._handlers = handlers or [] + + @abstractmethod + def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Discover values directly from this source (without handlers). + Direct values always take precedence over handler values. + + Returns: + Dictionary of directly discovered values + """ + ... + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Discover values from handlers and direct sources. + + Precedence within this source: + 1. Direct values (highest) + 2. First handler with value + 3. Second handler with value + 4. ... and so on + + Args: + key: Specific key to discover, or None for all + + Returns: + Dictionary of all discovered values with precedence applied + """ + discovered: Dict[str, ConfigValue] = {} + + # Process handlers in ORDER (first wins for same key) + for handler in self._handlers: + try: + handler_values = handler.discover(key) + for k, v in handler_values.items(): + if k not in discovered: # First handler wins + discovered[k] = v + except Exception as e: + log.debug("Handler %s failed: %s", handler.source_name, e) + + # Direct values override all handlers + direct_values = self.discover_direct(key) + discovered.update(direct_values) + + return discovered + + def add_handler(self, handler: "SourceHandler", position: int = -1) -> None: + """ + Add handler at specific position. + + Args: + handler: Handler to add + position: Insert position (-1 = append, 0 = prepend) + """ + if position == -1: + self._handlers.append(handler) + else: + self._handlers.insert(position, handler) + + def set_handlers(self, handlers: List["SourceHandler"]) -> None: + """Replace all handlers with new ordered list.""" + self._handlers = handlers + + def get_handlers(self) -> List["SourceHandler"]: + """Get current handler list (for inspection/reordering).""" + return self._handlers.copy() + + +class CliArgumentSource(ConfigurationSource): + """ + Source for command-line arguments. + Highest priority source with no sub-handlers. + Values come directly from parsed CLI arguments. + """ + + def __init__(self, cli_context: Optional[Dict[str, Any]] = None): + """ + Initialize with CLI context containing parsed arguments. + + Args: + cli_context: Dictionary of CLI arguments (key -> value) + """ + super().__init__(handlers=[]) # No handlers needed + self._cli_context = cli_context or {} + + @property + def source_name(self) -> str: + return "cli_arguments" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.CLI_ARGUMENT + + def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Extract non-None values from CLI context. + CLI arguments are already parsed by Typer/Click. + """ + values = {} + + if key is not None: + # Discover specific key + if key in self._cli_context and self._cli_context[key] is not None: + values[key] = ConfigValue( + key=key, + value=self._cli_context[key], + source_name=self.source_name, + priority=self.priority, + raw_value=self._cli_context[key], + ) + else: + # Discover all present values + for k, v in self._cli_context.items(): + if v is not None: + values[k] = ConfigValue( + key=k, + value=v, + source_name=self.source_name, + priority=self.priority, + raw_value=v, + ) + + return values + + def supports_key(self, key: str) -> bool: + """Check if key is present in CLI context.""" + return key in self._cli_context + + +class EnvironmentSource(ConfigurationSource): + """ + Source for environment variables with handler precedence. + + Default Handler Order (supports migration): + 1. SnowCliEnvHandler (SNOWFLAKE_*) ← Check first + 2. SnowSqlEnvHandler (SNOWSQL_*) ← Fallback for legacy + + This allows users to: + - Start with only SNOWSQL_* vars (works) + - Add SNOWFLAKE_* vars (automatically override SNOWSQL_*) + - Gradually migrate without breaking anything + """ + + def __init__(self, handlers: Optional[List["SourceHandler"]] = None): + """ + Initialize with ordered handlers. + + Args: + handlers: Custom handler list, or None for default + """ + super().__init__(handlers=handlers or []) + + @property + def source_name(self) -> str: + return "environment" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.ENVIRONMENT + + def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Environment source has no direct values. + All values come from handlers. + """ + return {} + + def supports_key(self, key: str) -> bool: + """Check if any handler supports this key.""" + return any(h.supports_key(key) for h in self._handlers) + + +class FileSource(ConfigurationSource): + """ + Source for configuration files with handler precedence. + + Default Handler Order (supports migration): + 1. SnowCLI TOML handlers (config.toml, connections.toml) ← Check first + 2. SnowSQL config handler (~/.snowsql/config) ← Fallback + + File Path Order: + - Earlier paths take precedence over later ones + - Allows user-specific configs to override system configs + """ + + def __init__( + self, + file_paths: Optional[List[Path]] = None, + handlers: Optional[List["SourceHandler"]] = None, + ): + """ + Initialize with file paths and handlers. + + Args: + file_paths: Ordered list of file paths (first = highest precedence) + handlers: Ordered list of format handlers (first = highest precedence) + """ + super().__init__(handlers=handlers or []) + self._file_paths = file_paths or [] + + @property + def source_name(self) -> str: + return "configuration_files" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + File source has no direct values. + All values come from file handlers. + """ + return {} + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Try each file path with each handler. + + Precedence: + 1. First file path with value + a. First handler that can read it with value + 2. Second file path with value + a. First handler that can read it with value + ... + + Args: + key: Specific key to discover, or None for all + + Returns: + Dictionary of discovered values with precedence applied + """ + discovered: Dict[str, ConfigValue] = {} + + for file_path in self._file_paths: + if not file_path.exists(): + continue + + for handler in self._handlers: + if not handler.can_handle_file(file_path): + continue + + try: + handler_values = handler.discover_from_file(file_path, key) + # First file+handler combination wins + for k, v in handler_values.items(): + if k not in discovered: + discovered[k] = v + except Exception as e: + log.debug( + "Handler %s failed for %s: %s", + handler.source_name, + file_path, + e, + ) + + return discovered + + def supports_key(self, key: str) -> bool: + """Check if any handler supports this key.""" + return any(h.supports_key(key) for h in self._handlers) + + def get_file_paths(self) -> List[Path]: + """Get current file paths list (for inspection).""" + return self._file_paths.copy() + + def add_file_path(self, file_path: Path, position: int = -1) -> None: + """ + Add file path at specific position. + + Args: + file_path: Path to add + position: Insert position (-1 = append, 0 = prepend) + """ + if position == -1: + self._file_paths.append(file_path) + else: + self._file_paths.insert(position, file_path) + + def set_file_paths(self, file_paths: List[Path]) -> None: + """Replace all file paths with new ordered list.""" + self._file_paths = file_paths diff --git a/tests/config_ng/test_cli_argument_source.py b/tests/config_ng/test_cli_argument_source.py new file mode 100644 index 0000000000..92e6de46b1 --- /dev/null +++ b/tests/config_ng/test_cli_argument_source.py @@ -0,0 +1,207 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for CliArgumentSource. + +Tests verify: +- Highest priority source (CLI_ARGUMENT) +- Direct value discovery from CLI context +- None value filtering +- No handler support +- Source identification +""" + +from snowflake.cli.api.config_ng.core import SourcePriority +from snowflake.cli.api.config_ng.sources import CliArgumentSource + + +class TestCliArgumentSource: + """Test suite for CliArgumentSource.""" + + def test_create_with_empty_context(self): + """Should create source with empty context.""" + source = CliArgumentSource() + + assert source.source_name == "cli_arguments" + assert source.priority == SourcePriority.CLI_ARGUMENT + + def test_create_with_context(self): + """Should create source with provided context.""" + context = {"account": "my_account", "user": "my_user"} + source = CliArgumentSource(cli_context=context) + + values = source.discover() + assert len(values) == 2 + assert values["account"].value == "my_account" + assert values["user"].value == "my_user" + + def test_has_highest_priority(self): + """Should have CLI_ARGUMENT priority (highest).""" + source = CliArgumentSource() + assert source.priority == SourcePriority.CLI_ARGUMENT + assert source.priority.value == 1 + + def test_discover_all_values(self): + """Should discover all non-None values when key is None.""" + context = {"account": "my_account", "user": "my_user", "port": 443} + source = CliArgumentSource(cli_context=context) + + values = source.discover() + + assert len(values) == 3 + assert values["account"].value == "my_account" + assert values["user"].value == "my_user" + assert values["port"].value == 443 + + def test_discover_specific_key(self): + """Should discover specific key when provided.""" + context = {"account": "my_account", "user": "my_user"} + source = CliArgumentSource(cli_context=context) + + values = source.discover(key="account") + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "my_account" + + def test_discover_nonexistent_key(self): + """Should return empty dict for nonexistent key.""" + context = {"account": "my_account"} + source = CliArgumentSource(cli_context=context) + + values = source.discover(key="nonexistent") + + assert len(values) == 0 + + def test_filters_none_values(self): + """Should not include None values in discovery.""" + context = {"account": "my_account", "user": None, "password": None} + source = CliArgumentSource(cli_context=context) + + values = source.discover() + + assert len(values) == 1 + assert "account" in values + assert "user" not in values + assert "password" not in values + + def test_filters_none_for_specific_key(self): + """Should return empty dict if specific key has None value.""" + context = {"account": None} + source = CliArgumentSource(cli_context=context) + + values = source.discover(key="account") + + assert len(values) == 0 + + def test_values_have_correct_metadata(self): + """Discovered values should have correct metadata.""" + context = {"account": "my_account"} + source = CliArgumentSource(cli_context=context) + + values = source.discover(key="account") + config_value = values["account"] + + assert config_value.key == "account" + assert config_value.value == "my_account" + assert config_value.source_name == "cli_arguments" + assert config_value.priority == SourcePriority.CLI_ARGUMENT + assert config_value.raw_value == "my_account" + + def test_supports_existing_key(self): + """Should return True for keys present in context.""" + context = {"account": "my_account"} + source = CliArgumentSource(cli_context=context) + + assert source.supports_key("account") is True + + def test_supports_nonexistent_key(self): + """Should return False for keys not in context.""" + context = {"account": "my_account"} + source = CliArgumentSource(cli_context=context) + + assert source.supports_key("nonexistent") is False + + def test_supports_key_with_none_value(self): + """Should still support key even if value is None.""" + context = {"account": None} + source = CliArgumentSource(cli_context=context) + + assert source.supports_key("account") is True + + def test_no_handlers(self): + """CLI source should not have any handlers.""" + source = CliArgumentSource() + + handlers = source.get_handlers() + assert len(handlers) == 0 + + def test_discover_direct_returns_same_as_discover(self): + """discover_direct should return same values as discover.""" + context = {"account": "my_account", "user": "my_user"} + source = CliArgumentSource(cli_context=context) + + direct_values = source.discover_direct() + discovered_values = source.discover() + + assert direct_values == discovered_values + + def test_handles_various_value_types(self): + """Should handle different value types correctly.""" + context = { + "string_val": "text", + "int_val": 42, + "bool_val": True, + "list_val": [1, 2, 3], + "dict_val": {"key": "value"}, + } + source = CliArgumentSource(cli_context=context) + + values = source.discover() + + assert len(values) == 5 + assert values["string_val"].value == "text" + assert values["int_val"].value == 42 + assert values["bool_val"].value is True + assert values["list_val"].value == [1, 2, 3] + assert values["dict_val"].value == {"key": "value"} + + def test_empty_context_returns_empty_dict(self): + """Empty context should return empty discovery result.""" + source = CliArgumentSource(cli_context={}) + + values = source.discover() + + assert len(values) == 0 + + def test_raw_value_equals_parsed_value(self): + """For CLI arguments, raw_value should equal parsed value.""" + context = {"account": "my_account"} + source = CliArgumentSource(cli_context=context) + + values = source.discover(key="account") + config_value = values["account"] + + assert config_value.raw_value == config_value.value + + def test_multiple_discover_calls_consistent(self): + """Multiple discover calls should return consistent results.""" + context = {"account": "my_account"} + source = CliArgumentSource(cli_context=context) + + values1 = source.discover() + values2 = source.discover() + + assert values1 == values2 diff --git a/tests/config_ng/test_configuration_source.py b/tests/config_ng/test_configuration_source.py new file mode 100644 index 0000000000..6f5800db16 --- /dev/null +++ b/tests/config_ng/test_configuration_source.py @@ -0,0 +1,363 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for ConfigurationSource abstract base class. + +Tests verify: +- Abstract class cannot be instantiated without implementing abstract methods +- Handler ordering and precedence +- Handler management (add, set, get) +- Direct value precedence over handler values +- Handler failure handling +""" + +from typing import Any, Dict, Optional + +import pytest +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority +from snowflake.cli.api.config_ng.handlers import SourceHandler +from snowflake.cli.api.config_ng.sources import ConfigurationSource + + +class MockHandler(SourceHandler): + """Mock handler for testing.""" + + def __init__(self, data: Dict[str, Any], name: str = "mock_handler"): + self._data = data + self._name = name + + @property + def source_name(self) -> str: + return self._name + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + @property + def handler_type(self) -> str: + return "mock" + + def can_handle(self) -> bool: + return True + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + if key is None: + return { + k: ConfigValue( + key=k, + value=v, + source_name=self.source_name, + priority=self.priority, + ) + for k, v in self._data.items() + } + elif key in self._data: + return { + key: ConfigValue( + key=key, + value=self._data[key], + source_name=self.source_name, + priority=self.priority, + ) + } + return {} + + def supports_key(self, key: str) -> bool: + return key in self._data + + +class TestConfigurationSourceInterface: + """Test suite for ConfigurationSource abstract base class.""" + + def test_cannot_instantiate_abstract_class(self): + """Should not be able to instantiate ConfigurationSource directly.""" + with pytest.raises(TypeError): + ConfigurationSource() + + def test_must_implement_discover_direct(self): + """Concrete implementations must implement discover_direct method.""" + + class IncompleteSource(ConfigurationSource): + @property + def source_name(self) -> str: + return "test" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def supports_key(self, key: str) -> bool: + return True + + with pytest.raises(TypeError): + IncompleteSource() + + def test_complete_implementation(self): + """Should be able to instantiate with all methods implemented.""" + + class CompleteSource(ConfigurationSource): + @property + def source_name(self) -> str: + return "test_source" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover_direct(self, key=None) -> Dict[str, ConfigValue]: + return {} + + def supports_key(self, key: str) -> bool: + return True + + source = CompleteSource() + assert source.source_name == "test_source" + assert source.priority == SourcePriority.FILE + + +class TestConfigurationSourceHandlers: + """Test handler management in ConfigurationSource.""" + + class TestSource(ConfigurationSource): + """Test implementation of ConfigurationSource.""" + + def __init__(self, direct_values=None, handlers=None): + super().__init__(handlers=handlers) + self._direct_values = direct_values or {} + + @property + def source_name(self) -> str: + return "test_source" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + def discover_direct(self, key=None) -> Dict[str, ConfigValue]: + if key is None: + return { + k: ConfigValue( + key=k, + value=v, + source_name=self.source_name, + priority=self.priority, + ) + for k, v in self._direct_values.items() + } + elif key in self._direct_values: + return { + key: ConfigValue( + key=key, + value=self._direct_values[key], + source_name=self.source_name, + priority=self.priority, + ) + } + return {} + + def supports_key(self, key: str) -> bool: + return key in self._direct_values or any( + h.supports_key(key) for h in self._handlers + ) + + def test_initialize_with_no_handlers(self): + """Should initialize with empty handler list.""" + source = self.TestSource() + assert len(source.get_handlers()) == 0 + + def test_initialize_with_handlers(self): + """Should initialize with provided handlers.""" + handler1 = MockHandler({"key1": "value1"}, "handler1") + handler2 = MockHandler({"key2": "value2"}, "handler2") + + source = self.TestSource(handlers=[handler1, handler2]) + handlers = source.get_handlers() + + assert len(handlers) == 2 + assert handlers[0] == handler1 + assert handlers[1] == handler2 + + def test_handler_ordering_first_wins(self): + """First handler with value should win for same key.""" + handler1 = MockHandler({"account": "handler1_account"}, "handler1") + handler2 = MockHandler({"account": "handler2_account"}, "handler2") + + source = self.TestSource(handlers=[handler1, handler2]) + values = source.discover(key="account") + + assert values["account"].value == "handler1_account" + assert values["account"].source_name == "handler1" + + def test_handlers_complement_each_other(self): + """Handlers should provide different keys.""" + handler1 = MockHandler({"key1": "value1"}, "handler1") + handler2 = MockHandler({"key2": "value2"}, "handler2") + + source = self.TestSource(handlers=[handler1, handler2]) + values = source.discover() + + assert len(values) == 2 + assert values["key1"].value == "value1" + assert values["key2"].value == "value2" + + def test_direct_values_override_handlers(self): + """Direct values should take precedence over handler values.""" + handler = MockHandler({"account": "handler_account"}, "handler") + direct_values = {"account": "direct_account"} + + source = self.TestSource(direct_values=direct_values, handlers=[handler]) + values = source.discover(key="account") + + assert values["account"].value == "direct_account" + assert values["account"].source_name == "test_source" + + def test_discover_all_values_from_handlers(self): + """Should discover all values when key is None.""" + handler1 = MockHandler({"key1": "value1", "key2": "value2"}, "handler1") + handler2 = MockHandler({"key3": "value3"}, "handler2") + + source = self.TestSource(handlers=[handler1, handler2]) + values = source.discover() + + assert len(values) == 3 + assert "key1" in values + assert "key2" in values + assert "key3" in values + + def test_discover_specific_key_from_handlers(self): + """Should discover specific key when provided.""" + handler = MockHandler({"key1": "value1", "key2": "value2"}, "handler") + + source = self.TestSource(handlers=[handler]) + values = source.discover(key="key1") + + assert len(values) == 1 + assert "key1" in values + assert values["key1"].value == "value1" + + def test_handler_failure_does_not_break_discovery(self): + """Failed handler should not prevent other handlers from working.""" + + class FailingHandler(SourceHandler): + @property + def source_name(self) -> str: + return "failing" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + @property + def handler_type(self) -> str: + return "failing" + + def can_handle(self) -> bool: + return True + + def discover(self, key=None): + raise RuntimeError("Handler failed") + + def supports_key(self, key: str) -> bool: + return True + + failing = FailingHandler() + working = MockHandler({"key1": "value1"}, "working") + + source = self.TestSource(handlers=[failing, working]) + values = source.discover() + + # Should still get value from working handler + assert len(values) == 1 + assert values["key1"].value == "value1" + + def test_add_handler_append(self): + """Should append handler to end of list.""" + handler1 = MockHandler({"key1": "value1"}, "handler1") + handler2 = MockHandler({"key2": "value2"}, "handler2") + + source = self.TestSource(handlers=[handler1]) + source.add_handler(handler2) + + handlers = source.get_handlers() + assert len(handlers) == 2 + assert handlers[1] == handler2 + + def test_add_handler_prepend(self): + """Should prepend handler to beginning of list.""" + handler1 = MockHandler({"key1": "value1"}, "handler1") + handler2 = MockHandler({"key2": "value2"}, "handler2") + + source = self.TestSource(handlers=[handler1]) + source.add_handler(handler2, position=0) + + handlers = source.get_handlers() + assert len(handlers) == 2 + assert handlers[0] == handler2 + + def test_add_handler_at_position(self): + """Should insert handler at specific position.""" + handler1 = MockHandler({"key1": "value1"}, "handler1") + handler2 = MockHandler({"key2": "value2"}, "handler2") + handler3 = MockHandler({"key3": "value3"}, "handler3") + + source = self.TestSource(handlers=[handler1, handler3]) + source.add_handler(handler2, position=1) + + handlers = source.get_handlers() + assert len(handlers) == 3 + assert handlers[1] == handler2 + + def test_set_handlers(self): + """Should replace all handlers with new list.""" + handler1 = MockHandler({"key1": "value1"}, "handler1") + handler2 = MockHandler({"key2": "value2"}, "handler2") + handler3 = MockHandler({"key3": "value3"}, "handler3") + + source = self.TestSource(handlers=[handler1, handler2]) + source.set_handlers([handler3]) + + handlers = source.get_handlers() + assert len(handlers) == 1 + assert handlers[0] == handler3 + + def test_get_handlers_returns_copy(self): + """get_handlers should return a copy, not the original list.""" + handler = MockHandler({"key1": "value1"}, "handler1") + source = self.TestSource(handlers=[handler]) + + handlers = source.get_handlers() + handlers.clear() + + # Original list should be unchanged + assert len(source.get_handlers()) == 1 + + def test_empty_handlers_returns_direct_values_only(self): + """With no handlers, should return only direct values.""" + direct_values = {"account": "direct_account"} + source = self.TestSource(direct_values=direct_values, handlers=[]) + + values = source.discover() + + assert len(values) == 1 + assert values["account"].value == "direct_account" + + def test_supports_key_checks_handlers(self): + """supports_key should check handlers.""" + handler = MockHandler({"key1": "value1"}, "handler") + source = self.TestSource(handlers=[handler]) + + assert source.supports_key("key1") is True + assert source.supports_key("nonexistent") is False diff --git a/tests/config_ng/test_environment_source.py b/tests/config_ng/test_environment_source.py new file mode 100644 index 0000000000..3d69174d8c --- /dev/null +++ b/tests/config_ng/test_environment_source.py @@ -0,0 +1,300 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for EnvironmentSource. + +Tests verify: +- Medium priority source (ENVIRONMENT) +- Handler-based discovery (no direct values) +- Handler ordering for migration support +- Multiple handler support +""" + +from typing import Any, Dict, Optional + +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority +from snowflake.cli.api.config_ng.handlers import SourceHandler +from snowflake.cli.api.config_ng.sources import EnvironmentSource + + +class MockEnvHandler(SourceHandler): + """Mock environment variable handler for testing.""" + + def __init__(self, data: Dict[str, Any], name: str = "mock_env_handler"): + self._data = data + self._name = name + + @property + def source_name(self) -> str: + return self._name + + @property + def priority(self) -> SourcePriority: + return SourcePriority.ENVIRONMENT + + @property + def handler_type(self) -> str: + return "mock_env" + + def can_handle(self) -> bool: + return len(self._data) > 0 + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + if key is None: + return { + k: ConfigValue( + key=k, + value=v, + source_name=self.source_name, + priority=self.priority, + ) + for k, v in self._data.items() + } + elif key in self._data: + return { + key: ConfigValue( + key=key, + value=self._data[key], + source_name=self.source_name, + priority=self.priority, + ) + } + return {} + + def supports_key(self, key: str) -> bool: + return key in self._data + + +class TestEnvironmentSource: + """Test suite for EnvironmentSource.""" + + def test_create_with_no_handlers(self): + """Should create source with empty handler list.""" + source = EnvironmentSource() + + assert source.source_name == "environment" + assert source.priority == SourcePriority.ENVIRONMENT + assert len(source.get_handlers()) == 0 + + def test_create_with_handlers(self): + """Should create source with provided handlers.""" + handler1 = MockEnvHandler({"key1": "value1"}, "handler1") + handler2 = MockEnvHandler({"key2": "value2"}, "handler2") + + source = EnvironmentSource(handlers=[handler1, handler2]) + + handlers = source.get_handlers() + assert len(handlers) == 2 + + def test_has_environment_priority(self): + """Should have ENVIRONMENT priority (medium).""" + source = EnvironmentSource() + + assert source.priority == SourcePriority.ENVIRONMENT + assert source.priority.value == 2 + + def test_discover_direct_returns_empty(self): + """Environment source should have no direct values.""" + handler = MockEnvHandler({"key1": "value1"}, "handler") + source = EnvironmentSource(handlers=[handler]) + + direct_values = source.discover_direct() + + assert len(direct_values) == 0 + + def test_discover_from_single_handler(self): + """Should discover values from single handler.""" + handler = MockEnvHandler( + {"account": "my_account", "user": "my_user"}, "handler" + ) + source = EnvironmentSource(handlers=[handler]) + + values = source.discover() + + assert len(values) == 2 + assert values["account"].value == "my_account" + assert values["user"].value == "my_user" + + def test_discover_from_multiple_handlers(self): + """Should discover values from multiple handlers.""" + handler1 = MockEnvHandler({"key1": "value1"}, "handler1") + handler2 = MockEnvHandler({"key2": "value2"}, "handler2") + + source = EnvironmentSource(handlers=[handler1, handler2]) + values = source.discover() + + assert len(values) == 2 + assert values["key1"].value == "value1" + assert values["key2"].value == "value2" + + def test_handler_ordering_first_wins(self): + """First handler with value should win for same key.""" + handler1 = MockEnvHandler({"account": "handler1_account"}, "snowflake_cli_env") + handler2 = MockEnvHandler({"account": "handler2_account"}, "snowsql_env") + + source = EnvironmentSource(handlers=[handler1, handler2]) + values = source.discover(key="account") + + assert values["account"].value == "handler1_account" + assert values["account"].source_name == "snowflake_cli_env" + + def test_migration_scenario_snowflake_overrides_snowsql(self): + """ + Migration scenario: SNOWFLAKE_* vars should override SNOWSQL_* vars. + Simulates handler ordering for migration support. + """ + # Handler order: SnowCLI first (higher priority), SnowSQL second (fallback) + snowflake_handler = MockEnvHandler( + {"account": "new_account", "user": "new_user"}, "snowflake_cli_env" + ) + snowsql_handler = MockEnvHandler( + {"account": "old_account", "user": "old_user", "password": "old_password"}, + "snowsql_env", + ) + + source = EnvironmentSource(handlers=[snowflake_handler, snowsql_handler]) + values = source.discover() + + # New values should win + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowflake_cli_env" + assert values["user"].value == "new_user" + assert values["user"].source_name == "snowflake_cli_env" + + # Fallback to legacy for unmigrated keys + assert values["password"].value == "old_password" + assert values["password"].source_name == "snowsql_env" + + def test_discover_specific_key(self): + """Should discover specific key when provided.""" + handler = MockEnvHandler( + {"account": "my_account", "user": "my_user"}, "handler" + ) + source = EnvironmentSource(handlers=[handler]) + + values = source.discover(key="account") + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "my_account" + + def test_discover_nonexistent_key(self): + """Should return empty dict for nonexistent key.""" + handler = MockEnvHandler({"account": "my_account"}, "handler") + source = EnvironmentSource(handlers=[handler]) + + values = source.discover(key="nonexistent") + + assert len(values) == 0 + + def test_supports_key_from_any_handler(self): + """Should return True if any handler supports the key.""" + handler1 = MockEnvHandler({"key1": "value1"}, "handler1") + handler2 = MockEnvHandler({"key2": "value2"}, "handler2") + + source = EnvironmentSource(handlers=[handler1, handler2]) + + assert source.supports_key("key1") is True + assert source.supports_key("key2") is True + assert source.supports_key("nonexistent") is False + + def test_no_handlers_returns_empty(self): + """With no handlers, should return empty dict.""" + source = EnvironmentSource(handlers=[]) + + values = source.discover() + + assert len(values) == 0 + + def test_values_have_correct_priority(self): + """All values should have ENVIRONMENT priority.""" + handler = MockEnvHandler({"account": "my_account"}, "handler") + source = EnvironmentSource(handlers=[handler]) + + values = source.discover() + + assert values["account"].priority == SourcePriority.ENVIRONMENT + + def test_add_handler_dynamically(self): + """Should be able to add handlers after creation.""" + source = EnvironmentSource(handlers=[]) + handler = MockEnvHandler({"account": "my_account"}, "handler") + + source.add_handler(handler) + values = source.discover() + + assert len(values) == 1 + assert values["account"].value == "my_account" + + def test_set_handlers_replaces_all(self): + """Should replace all handlers with new list.""" + handler1 = MockEnvHandler({"key1": "value1"}, "handler1") + handler2 = MockEnvHandler({"key2": "value2"}, "handler2") + handler3 = MockEnvHandler({"key3": "value3"}, "handler3") + + source = EnvironmentSource(handlers=[handler1, handler2]) + source.set_handlers([handler3]) + + values = source.discover() + + assert len(values) == 1 + assert "key3" in values + assert "key1" not in values + + def test_handler_failure_does_not_break_discovery(self): + """Failed handler should not prevent other handlers from working.""" + + class FailingHandler(SourceHandler): + @property + def source_name(self) -> str: + return "failing" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.ENVIRONMENT + + @property + def handler_type(self) -> str: + return "failing" + + def can_handle(self) -> bool: + return True + + def discover(self, key=None): + raise RuntimeError("Handler failed") + + def supports_key(self, key: str) -> bool: + return True + + failing = FailingHandler() + working = MockEnvHandler({"account": "my_account"}, "working") + + source = EnvironmentSource(handlers=[failing, working]) + values = source.discover() + + # Should still get value from working handler + assert len(values) == 1 + assert values["account"].value == "my_account" + + def test_empty_handler_returns_no_values(self): + """Handler with no data should contribute no values.""" + empty_handler = MockEnvHandler({}, "empty") + full_handler = MockEnvHandler({"account": "my_account"}, "full") + + source = EnvironmentSource(handlers=[empty_handler, full_handler]) + values = source.discover() + + assert len(values) == 1 + assert values["account"].value == "my_account" diff --git a/tests/config_ng/test_file_source.py b/tests/config_ng/test_file_source.py new file mode 100644 index 0000000000..0a2d1da8d3 --- /dev/null +++ b/tests/config_ng/test_file_source.py @@ -0,0 +1,463 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for FileSource. + +Tests verify: +- Lowest priority source (FILE) +- File-based discovery with handlers +- File path ordering for precedence +- Handler ordering within files +- File existence handling +""" + +from pathlib import Path +from typing import Any, Dict, Optional + +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority +from snowflake.cli.api.config_ng.handlers import SourceHandler +from snowflake.cli.api.config_ng.sources import FileSource + + +class MockFileHandler(SourceHandler): + """Mock file handler for testing.""" + + def __init__( + self, + data: Dict[Path, Dict[str, Any]], + name: str = "mock_file_handler", + file_extensions: Optional[list] = None, + ): + self._data = data # Path -> {key: value} + self._name = name + self._file_extensions = file_extensions or [".toml", ".conf"] + + @property + def source_name(self) -> str: + return self._name + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + @property + def handler_type(self) -> str: + return "mock_file" + + def can_handle(self) -> bool: + return len(self._data) > 0 + + def can_handle_file(self, file_path: Path) -> bool: + return file_path.suffix in self._file_extensions + + def discover_from_file( + self, file_path: Path, key: Optional[str] = None + ) -> Dict[str, ConfigValue]: + if file_path not in self._data: + return {} + + file_data = self._data[file_path] + + if key is None: + return { + k: ConfigValue( + key=k, + value=v, + source_name=self.source_name, + priority=self.priority, + ) + for k, v in file_data.items() + } + elif key in file_data: + return { + key: ConfigValue( + key=key, + value=file_data[key], + source_name=self.source_name, + priority=self.priority, + ) + } + return {} + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + # Not used in FileSource - discover_from_file is called instead + return {} + + def supports_key(self, key: str) -> bool: + # Check if key exists in any file + return any(key in file_data for file_data in self._data.values()) + + +class TestFileSource: + """Test suite for FileSource.""" + + def test_create_with_no_paths_or_handlers(self): + """Should create source with empty file paths and handlers.""" + source = FileSource() + + assert source.source_name == "configuration_files" + assert source.priority == SourcePriority.FILE + assert len(source.get_file_paths()) == 0 + assert len(source.get_handlers()) == 0 + + def test_create_with_file_paths(self, tmp_path): + """Should create source with provided file paths.""" + file1 = tmp_path / "config1.toml" + file2 = tmp_path / "config2.toml" + file1.touch() + file2.touch() + + source = FileSource(file_paths=[file1, file2]) + + paths = source.get_file_paths() + assert len(paths) == 2 + assert file1 in paths + assert file2 in paths + + def test_has_file_priority(self): + """Should have FILE priority (lowest).""" + source = FileSource() + + assert source.priority == SourcePriority.FILE + assert source.priority.value == 3 + + def test_discover_direct_returns_empty(self): + """File source should have no direct values.""" + source = FileSource() + + direct_values = source.discover_direct() + + assert len(direct_values) == 0 + + def test_discover_from_single_file(self, tmp_path): + """Should discover values from single file.""" + file_path = tmp_path / "config.toml" + file_path.touch() + + handler = MockFileHandler( + {file_path: {"account": "my_account", "user": "my_user"}}, "toml_handler" + ) + + source = FileSource(file_paths=[file_path], handlers=[handler]) + values = source.discover() + + assert len(values) == 2 + assert values["account"].value == "my_account" + assert values["user"].value == "my_user" + + def test_discover_from_multiple_files(self, tmp_path): + """Should discover values from multiple files.""" + file1 = tmp_path / "config1.toml" + file2 = tmp_path / "config2.toml" + file1.touch() + file2.touch() + + handler = MockFileHandler( + { + file1: {"key1": "value1"}, + file2: {"key2": "value2"}, + }, + "toml_handler", + ) + + source = FileSource(file_paths=[file1, file2], handlers=[handler]) + values = source.discover() + + assert len(values) == 2 + assert values["key1"].value == "value1" + assert values["key2"].value == "value2" + + def test_file_path_ordering_first_wins(self, tmp_path): + """First file path with value should win for same key.""" + file1 = tmp_path / "config1.toml" + file2 = tmp_path / "config2.toml" + file1.touch() + file2.touch() + + handler = MockFileHandler( + { + file1: {"account": "account_from_file1"}, + file2: {"account": "account_from_file2"}, + }, + "toml_handler", + ) + + source = FileSource(file_paths=[file1, file2], handlers=[handler]) + values = source.discover(key="account") + + assert values["account"].value == "account_from_file1" + + def test_handler_ordering_first_wins(self, tmp_path): + """First handler that can read file should win for same key.""" + file_path = tmp_path / "config.toml" + file_path.touch() + + handler1 = MockFileHandler( + {file_path: {"account": "handler1_account"}}, "snowcli_toml" + ) + handler2 = MockFileHandler( + {file_path: {"account": "handler2_account"}}, "legacy_toml" + ) + + source = FileSource(file_paths=[file_path], handlers=[handler1, handler2]) + values = source.discover(key="account") + + assert values["account"].value == "handler1_account" + assert values["account"].source_name == "snowcli_toml" + + def test_skips_nonexistent_files(self, tmp_path): + """Should skip files that don't exist.""" + existing_file = tmp_path / "exists.toml" + nonexistent_file = tmp_path / "does_not_exist.toml" + existing_file.touch() + + handler = MockFileHandler( + { + existing_file: {"key1": "value1"}, + nonexistent_file: {"key2": "value2"}, + }, + "handler", + ) + + source = FileSource( + file_paths=[nonexistent_file, existing_file], handlers=[handler] + ) + values = source.discover() + + # Should only get value from existing file + assert len(values) == 1 + assert "key1" in values + assert "key2" not in values + + def test_skips_files_handler_cannot_handle(self, tmp_path): + """Should skip files that handler cannot handle.""" + toml_file = tmp_path / "config.toml" + json_file = tmp_path / "config.json" + toml_file.touch() + json_file.touch() + + # Handler only handles .toml files + handler = MockFileHandler( + { + toml_file: {"key1": "value1"}, + json_file: {"key2": "value2"}, + }, + "toml_handler", + file_extensions=[".toml"], + ) + + source = FileSource(file_paths=[toml_file, json_file], handlers=[handler]) + values = source.discover() + + # Should only get value from .toml file + assert len(values) == 1 + assert "key1" in values + assert "key2" not in values + + def test_migration_scenario_snowcli_overrides_snowsql(self, tmp_path): + """ + Migration scenario: SnowCLI files should override SnowSQL files. + Simulates file ordering for migration support. + """ + snowcli_file = tmp_path / "connections.toml" + snowsql_file = tmp_path / "snowsql_config" + snowcli_file.touch() + snowsql_file.touch() + + # SnowCLI handler only handles .toml files + snowcli_handler = MockFileHandler( + {snowcli_file: {"account": "new_account", "user": "new_user"}}, + "snowcli_toml", + file_extensions=[".toml"], + ) + # SnowSQL handler handles files without extension + snowsql_handler = MockFileHandler( + { + snowsql_file: { + "account": "old_account", + "user": "old_user", + "password": "old_password", + } + }, + "snowsql_config", + file_extensions=[""], # No extension + ) + + # SnowCLI file comes first (higher precedence) + source = FileSource( + file_paths=[snowcli_file, snowsql_file], + handlers=[snowcli_handler, snowsql_handler], + ) + values = source.discover() + + # New values from SnowCLI should win + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowcli_toml" + assert values["user"].value == "new_user" + + # Fallback to SnowSQL for unmigrated keys + assert values["password"].value == "old_password" + assert values["password"].source_name == "snowsql_config" + + def test_discover_specific_key(self, tmp_path): + """Should discover specific key when provided.""" + file_path = tmp_path / "config.toml" + file_path.touch() + + handler = MockFileHandler( + {file_path: {"account": "my_account", "user": "my_user"}}, "handler" + ) + + source = FileSource(file_paths=[file_path], handlers=[handler]) + values = source.discover(key="account") + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "my_account" + + def test_discover_nonexistent_key(self, tmp_path): + """Should return empty dict for nonexistent key.""" + file_path = tmp_path / "config.toml" + file_path.touch() + + handler = MockFileHandler({file_path: {"account": "my_account"}}, "handler") + + source = FileSource(file_paths=[file_path], handlers=[handler]) + values = source.discover(key="nonexistent") + + assert len(values) == 0 + + def test_supports_key_from_any_handler(self, tmp_path): + """Should return True if any handler supports the key.""" + handler1 = MockFileHandler({tmp_path / "f1": {"key1": "value1"}}, "handler1") + handler2 = MockFileHandler({tmp_path / "f2": {"key2": "value2"}}, "handler2") + + source = FileSource(handlers=[handler1, handler2]) + + assert source.supports_key("key1") is True + assert source.supports_key("key2") is True + assert source.supports_key("nonexistent") is False + + def test_handler_failure_does_not_break_discovery(self, tmp_path): + """Failed handler should not prevent other handlers from working.""" + file_path = tmp_path / "config.toml" + file_path.touch() + + class FailingHandler(SourceHandler): + @property + def source_name(self) -> str: + return "failing" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + @property + def handler_type(self) -> str: + return "failing" + + def can_handle(self) -> bool: + return True + + def can_handle_file(self, file_path: Path) -> bool: + return True + + def discover_from_file(self, file_path: Path, key=None): + raise RuntimeError("Handler failed") + + def discover(self, key=None): + return {} + + def supports_key(self, key: str) -> bool: + return True + + failing = FailingHandler() + working = MockFileHandler({file_path: {"account": "my_account"}}, "working") + + source = FileSource(file_paths=[file_path], handlers=[failing, working]) + values = source.discover() + + # Should still get value from working handler + assert len(values) == 1 + assert values["account"].value == "my_account" + + def test_add_file_path_append(self, tmp_path): + """Should append file path to end of list.""" + file1 = tmp_path / "config1.toml" + file2 = tmp_path / "config2.toml" + + source = FileSource(file_paths=[file1]) + source.add_file_path(file2) + + paths = source.get_file_paths() + assert len(paths) == 2 + assert paths[1] == file2 + + def test_add_file_path_prepend(self, tmp_path): + """Should prepend file path to beginning of list.""" + file1 = tmp_path / "config1.toml" + file2 = tmp_path / "config2.toml" + + source = FileSource(file_paths=[file1]) + source.add_file_path(file2, position=0) + + paths = source.get_file_paths() + assert len(paths) == 2 + assert paths[0] == file2 + + def test_set_file_paths(self, tmp_path): + """Should replace all file paths with new list.""" + file1 = tmp_path / "config1.toml" + file2 = tmp_path / "config2.toml" + file3 = tmp_path / "config3.toml" + + source = FileSource(file_paths=[file1, file2]) + source.set_file_paths([file3]) + + paths = source.get_file_paths() + assert len(paths) == 1 + assert paths[0] == file3 + + def test_get_file_paths_returns_copy(self, tmp_path): + """get_file_paths should return a copy, not the original list.""" + file1 = tmp_path / "config.toml" + source = FileSource(file_paths=[file1]) + + paths = source.get_file_paths() + paths.clear() + + # Original list should be unchanged + assert len(source.get_file_paths()) == 1 + + def test_no_files_returns_empty(self): + """With no file paths, should return empty dict.""" + handler = MockFileHandler({}, "handler") + source = FileSource(file_paths=[], handlers=[handler]) + + values = source.discover() + + assert len(values) == 0 + + def test_values_have_correct_priority(self, tmp_path): + """All values should have FILE priority.""" + file_path = tmp_path / "config.toml" + file_path.touch() + + handler = MockFileHandler({file_path: {"account": "my_account"}}, "handler") + source = FileSource(file_paths=[file_path], handlers=[handler]) + + values = source.discover() + + assert values["account"].priority == SourcePriority.FILE From 0fb6eb6256db94bd0f4dbd737c49b9dacb0f4a78 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 6 Oct 2025 10:07:11 +0200 Subject: [PATCH 05/78] SNOW-2306184: config refactory - env vars discovery --- src/snowflake/cli/api/config_ng/__init__.py | 6 + .../cli/api/config_ng/env_handlers.py | 218 ++++++++++++ tests/config_ng/test_env_handler_migration.py | 321 ++++++++++++++++++ tests/config_ng/test_snowcli_env_handler.py | 287 ++++++++++++++++ tests/config_ng/test_snowsql_env_handler.py | 309 +++++++++++++++++ 5 files changed, 1141 insertions(+) create mode 100644 src/snowflake/cli/api/config_ng/env_handlers.py create mode 100644 tests/config_ng/test_env_handler_migration.py create mode 100644 tests/config_ng/test_snowcli_env_handler.py create mode 100644 tests/config_ng/test_snowsql_env_handler.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index e31b54da80..fe6dfdaed1 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -29,6 +29,10 @@ SourcePriority, ValueSource, ) +from snowflake.cli.api.config_ng.env_handlers import ( + SnowCliEnvHandler, + SnowSqlEnvHandler, +) from snowflake.cli.api.config_ng.sources import ( CliArgumentSource, ConfigurationSource, @@ -44,6 +48,8 @@ "FileSource", "ResolutionEntry", "ResolutionHistory", + "SnowCliEnvHandler", + "SnowSqlEnvHandler", "SourcePriority", "ValueSource", ] diff --git a/src/snowflake/cli/api/config_ng/env_handlers.py b/src/snowflake/cli/api/config_ng/env_handlers.py new file mode 100644 index 0000000000..6ec505306d --- /dev/null +++ b/src/snowflake/cli/api/config_ng/env_handlers.py @@ -0,0 +1,218 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Environment variable handlers for configuration system. + +This module implements handlers for: +- SNOWFLAKE_* environment variables (SnowCLI format) +- SNOWSQL_* environment variables (Legacy SnowSQL format with key mapping) +""" + +from __future__ import annotations + +import os +from typing import Any, Dict, Optional + +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority +from snowflake.cli.api.config_ng.handlers import SourceHandler + + +class SnowCliEnvHandler(SourceHandler): + """ + Handler for Snowflake CLI environment variables. + Format: SNOWFLAKE_ → key + Example: SNOWFLAKE_ACCOUNT → account + """ + + PREFIX = "SNOWFLAKE_" + + @property + def source_name(self) -> str: + return "snowflake_cli_env" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.ENVIRONMENT + + @property + def handler_type(self) -> str: + return "snowflake_cli_env" + + def can_handle(self) -> bool: + """Check if any SNOWFLAKE_* env vars are set.""" + return any(k.startswith(self.PREFIX) for k in os.environ) + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """Discover values from SNOWFLAKE_* environment variables.""" + values = {} + + if key is not None: + # Discover specific key + env_key = f"{self.PREFIX}{key.upper()}" + if env_key in os.environ: + raw = os.environ[env_key] + values[key] = ConfigValue( + key=key, + value=self._parse_value(raw), + source_name=self.source_name, + priority=self.priority, + raw_value=raw, + ) + else: + # Discover all SNOWFLAKE_* variables + for env_key, env_value in os.environ.items(): + if env_key.startswith(self.PREFIX): + config_key = env_key[len(self.PREFIX) :].lower() + values[config_key] = ConfigValue( + key=config_key, + value=self._parse_value(env_value), + source_name=self.source_name, + priority=self.priority, + raw_value=env_value, + ) + + return values + + def supports_key(self, key: str) -> bool: + """Any string key can be represented as SNOWFLAKE_* env var.""" + return isinstance(key, str) + + def _parse_value(self, value: str) -> Any: + """ + Parse string value to appropriate type. + Supports: boolean, integer, string + """ + # Boolean - case-insensitive + lower_val = value.lower() + if lower_val in ("true", "1", "yes", "on"): + return True + if lower_val in ("false", "0", "no", "off"): + return False + + # Integer + try: + return int(value) + except ValueError: + pass + + # String (default) + return value + + +class SnowSqlEnvHandler(SourceHandler): + """ + Handler for SnowSQL-compatible environment variables. + Format: SNOWSQL_ → key + Supports key mappings for SnowSQL-specific naming. + + Key Mappings (SnowSQL → SnowCLI): + - PWD → password + - All other keys map directly (ACCOUNT → account, USER → user, etc.) + """ + + PREFIX = "SNOWSQL_" + + # Key mappings from SnowSQL to SnowCLI + # SnowSQL uses PWD, but SnowCLI uses PASSWORD + KEY_MAPPINGS: Dict[str, str] = { + "pwd": "password", + } + + @property + def source_name(self) -> str: + return "snowsql_env" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.ENVIRONMENT + + @property + def handler_type(self) -> str: + return "snowsql_env" + + def can_handle(self) -> bool: + """Check if any SNOWSQL_* env vars are set.""" + return any(k.startswith(self.PREFIX) for k in os.environ) + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Discover values from SNOWSQL_* environment variables. + Applies key mappings for compatibility. + """ + values = {} + + if key is not None: + # Reverse lookup: find SnowSQL key for CLI key + snowsql_key = self.get_snowsql_key(key) + env_key = f"{self.PREFIX}{snowsql_key.upper()}" + + if env_key in os.environ: + raw = os.environ[env_key] + values[key] = ConfigValue( + key=key, # Normalized SnowCLI key + value=self._parse_value(raw), + source_name=self.source_name, + priority=self.priority, + raw_value=raw, + ) + else: + # Discover all SNOWSQL_* variables + for env_key, env_value in os.environ.items(): + if env_key.startswith(self.PREFIX): + snowsql_key = env_key[len(self.PREFIX) :].lower() + # Map to SnowCLI key + config_key = self.KEY_MAPPINGS.get(snowsql_key, snowsql_key) + + values[config_key] = ConfigValue( + key=config_key, + value=self._parse_value(env_value), + source_name=self.source_name, + priority=self.priority, + raw_value=env_value, + ) + + return values + + def supports_key(self, key: str) -> bool: + """Any string key can be represented as SNOWSQL_* env var.""" + return isinstance(key, str) + + def get_snowsql_key(self, cli_key: str) -> str: + """Reverse mapping: CLI key → SnowSQL key.""" + for snowsql_key, cli_mapped_key in self.KEY_MAPPINGS.items(): + if cli_mapped_key == cli_key: + return snowsql_key + return cli_key + + def _parse_value(self, value: str) -> Any: + """ + Parse string value to appropriate type. + Supports: boolean, integer, string + """ + # Boolean - case-insensitive + lower_val = value.lower() + if lower_val in ("true", "1", "yes", "on"): + return True + if lower_val in ("false", "0", "no", "off"): + return False + + # Integer + try: + return int(value) + except ValueError: + pass + + # String (default) + return value diff --git a/tests/config_ng/test_env_handler_migration.py b/tests/config_ng/test_env_handler_migration.py new file mode 100644 index 0000000000..4e3fcc29c6 --- /dev/null +++ b/tests/config_ng/test_env_handler_migration.py @@ -0,0 +1,321 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Integration tests for environment variable handler migration scenarios. + +Tests verify: +- Migration from SnowSQL to SnowCLI environment variables +- Handler ordering (SNOWFLAKE_* overrides SNOWSQL_*) +- Fallback behavior for unmigrated keys +- Complete migration scenarios +""" + +import os +from unittest.mock import patch + +from snowflake.cli.api.config_ng.env_handlers import ( + SnowCliEnvHandler, + SnowSqlEnvHandler, +) +from snowflake.cli.api.config_ng.sources import EnvironmentSource + + +class TestEnvironmentHandlerMigration: + """Test suite for environment variable migration scenarios.""" + + def test_pure_snowsql_environment(self): + """Scenario: User has only SNOWSQL_* environment variables.""" + env_vars = { + "SNOWSQL_ACCOUNT": "old_account", + "SNOWSQL_USER": "old_user", + "SNOWSQL_PWD": "old_password", + "SNOWSQL_WAREHOUSE": "old_warehouse", + } + + with patch.dict(os.environ, env_vars, clear=True): + # Setup source with both handlers (SnowCLI first, SnowSQL second) + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + + # All values should come from SnowSQL handler + assert len(values) == 4 + assert values["account"].value == "old_account" + assert values["account"].source_name == "snowsql_env" + assert values["user"].value == "old_user" + assert values["password"].value == "old_password" # Mapped from PWD + assert values["warehouse"].value == "old_warehouse" + + def test_pure_snowflake_cli_environment(self): + """Scenario: User has migrated to SNOWFLAKE_* environment variables.""" + env_vars = { + "SNOWFLAKE_ACCOUNT": "new_account", + "SNOWFLAKE_USER": "new_user", + "SNOWFLAKE_PASSWORD": "new_password", + "SNOWFLAKE_WAREHOUSE": "new_warehouse", + } + + with patch.dict(os.environ, env_vars, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + + # All values should come from SnowCLI handler + assert len(values) == 4 + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowflake_cli_env" + assert values["user"].value == "new_user" + assert values["password"].value == "new_password" + assert values["warehouse"].value == "new_warehouse" + + def test_partial_migration_snowflake_overrides_snowsql(self): + """ + Scenario: User is migrating - some SNOWFLAKE_* vars override SNOWSQL_*. + This is the key migration scenario. + """ + env_vars = { + # Legacy SnowSQL vars (complete set) + "SNOWSQL_ACCOUNT": "old_account", + "SNOWSQL_USER": "old_user", + "SNOWSQL_PWD": "old_password", + "SNOWSQL_WAREHOUSE": "old_warehouse", + "SNOWSQL_DATABASE": "old_database", + # New SnowCLI vars (partial migration) + "SNOWFLAKE_ACCOUNT": "new_account", + "SNOWFLAKE_USER": "new_user", + } + + with patch.dict(os.environ, env_vars, clear=True): + # Handler order: SnowCLI first (higher priority), SnowSQL second (fallback) + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + + # Migrated keys should use SNOWFLAKE_* values + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowflake_cli_env" + assert values["user"].value == "new_user" + assert values["user"].source_name == "snowflake_cli_env" + + # Unmigrated keys should fallback to SNOWSQL_* values + assert values["password"].value == "old_password" + assert values["password"].source_name == "snowsql_env" + assert values["warehouse"].value == "old_warehouse" + assert values["warehouse"].source_name == "snowsql_env" + assert values["database"].value == "old_database" + assert values["database"].source_name == "snowsql_env" + + def test_migration_with_pwd_to_password_mapping(self): + """ + Scenario: User migrates from SNOWSQL_PWD to SNOWFLAKE_PASSWORD. + Tests the key mapping during migration. + """ + env_vars = { + "SNOWSQL_PWD": "old_password", + "SNOWFLAKE_PASSWORD": "new_password", + } + + with patch.dict(os.environ, env_vars, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + + # SNOWFLAKE_PASSWORD should override SNOWSQL_PWD + assert len(values) == 1 # Only "password" key + assert values["password"].value == "new_password" + assert values["password"].source_name == "snowflake_cli_env" + + def test_migration_only_pwd_remains_in_snowsql(self): + """ + Scenario: User has migrated everything except password. + SNOWSQL_PWD should still work as fallback. + """ + env_vars = { + "SNOWFLAKE_ACCOUNT": "new_account", + "SNOWFLAKE_USER": "new_user", + "SNOWSQL_PWD": "old_password", # Not yet migrated + } + + with patch.dict(os.environ, env_vars, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowflake_cli_env" + assert values["user"].value == "new_user" + assert values["user"].source_name == "snowflake_cli_env" + # Password from SnowSQL (mapped from PWD) + assert values["password"].value == "old_password" + assert values["password"].source_name == "snowsql_env" + + def test_both_handlers_provide_different_keys(self): + """ + Scenario: Each handler provides unique keys that don't overlap. + """ + env_vars = { + "SNOWFLAKE_ACCOUNT": "new_account", + "SNOWSQL_WAREHOUSE": "old_warehouse", + } + + with patch.dict(os.environ, env_vars, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + + assert len(values) == 2 + assert values["account"].source_name == "snowflake_cli_env" + assert values["warehouse"].source_name == "snowsql_env" + + def test_handler_order_matters(self): + """ + Verify that handler order determines precedence. + First handler with value wins. + """ + env_vars = { + "SNOWFLAKE_ACCOUNT": "snowflake_value", + "SNOWSQL_ACCOUNT": "snowsql_value", + } + + with patch.dict(os.environ, env_vars, clear=True): + # Test SnowCLI first (correct order for migration) + source_snowcli_first = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + values = source_snowcli_first.discover() + assert values["account"].value == "snowflake_value" + + # Test SnowSQL first (wrong order, but tests the mechanism) + source_snowsql_first = EnvironmentSource( + handlers=[SnowSqlEnvHandler(), SnowCliEnvHandler()] + ) + values = source_snowsql_first.discover() + assert values["account"].value == "snowsql_value" + + def test_discover_specific_key_with_both_handlers(self): + """Should discover specific key considering both handlers.""" + env_vars = { + "SNOWFLAKE_ACCOUNT": "new_account", + "SNOWSQL_USER": "old_user", + } + + with patch.dict(os.environ, env_vars, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + # Discover account - should get from SnowCLI + values = source.discover(key="account") + assert values["account"].value == "new_account" + + # Discover user - should get from SnowSQL + values = source.discover(key="user") + assert values["user"].value == "old_user" + + def test_empty_environment_both_handlers(self): + """With no environment variables, both handlers should return nothing.""" + with patch.dict(os.environ, {}, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + assert len(values) == 0 + + def test_complete_migration_timeline(self): + """ + Simulates a complete migration timeline from Step 1 to Step 4. + """ + # Step 1: Pure SnowSQL user + env_step1 = {"SNOWSQL_ACCOUNT": "account"} + with patch.dict(os.environ, env_step1, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + values = source.discover() + assert values["account"].value == "account" + assert values["account"].source_name == "snowsql_env" + + # Step 2: Start migration - add SNOWFLAKE_ACCOUNT + env_step2 = { + "SNOWSQL_ACCOUNT": "old_account", + "SNOWFLAKE_ACCOUNT": "new_account", + } + with patch.dict(os.environ, env_step2, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + values = source.discover() + # SNOWFLAKE_* should win + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowflake_cli_env" + + # Step 3: SNOWSQL_ACCOUNT still present but ignored + env_step3 = { + "SNOWSQL_ACCOUNT": "old_account", # Still set but ignored + "SNOWFLAKE_ACCOUNT": "new_account", + } + with patch.dict(os.environ, env_step3, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + values = source.discover() + # Still uses SNOWFLAKE_* + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowflake_cli_env" + + # Step 4: Complete migration - remove SNOWSQL_ACCOUNT + env_step4 = {"SNOWFLAKE_ACCOUNT": "new_account"} + with patch.dict(os.environ, env_step4, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + values = source.discover() + # Uses SNOWFLAKE_* (no change in behavior from step 3) + assert values["account"].value == "new_account" + assert values["account"].source_name == "snowflake_cli_env" + + def test_mixed_types_from_both_handlers(self): + """Should handle different value types from both handlers.""" + env_vars = { + "SNOWFLAKE_ACCOUNT": "my_account", # String + "SNOWFLAKE_PORT": "443", # Integer + "SNOWSQL_ENABLE_DIAG": "true", # Boolean + "SNOWSQL_TIMEOUT": "30", # Integer from SnowSQL + } + + with patch.dict(os.environ, env_vars, clear=True): + source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + + values = source.discover() + + assert isinstance(values["account"].value, str) + assert isinstance(values["port"].value, int) + assert isinstance(values["enable_diag"].value, bool) + assert isinstance(values["timeout"].value, int) diff --git a/tests/config_ng/test_snowcli_env_handler.py b/tests/config_ng/test_snowcli_env_handler.py new file mode 100644 index 0000000000..23b1bf93eb --- /dev/null +++ b/tests/config_ng/test_snowcli_env_handler.py @@ -0,0 +1,287 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for SnowCliEnvHandler. + +Tests verify: +- SNOWFLAKE_* environment variable discovery +- Value type parsing (string, int, bool) +- Case handling (env vars are uppercase, keys are lowercase) +- Raw value preservation +- Priority and metadata +""" + +import os +from unittest.mock import patch + +from snowflake.cli.api.config_ng.core import SourcePriority +from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler + + +class TestSnowCliEnvHandler: + """Test suite for SnowCliEnvHandler.""" + + def test_create_handler(self): + """Should create handler with correct properties.""" + handler = SnowCliEnvHandler() + + assert handler.source_name == "snowflake_cli_env" + assert handler.priority == SourcePriority.ENVIRONMENT + assert handler.handler_type == "snowflake_cli_env" + + def test_can_handle_with_no_env_vars(self): + """Should return False when no SNOWFLAKE_* vars are set.""" + with patch.dict(os.environ, {}, clear=True): + handler = SnowCliEnvHandler() + assert handler.can_handle() is False + + def test_can_handle_with_env_vars(self): + """Should return True when SNOWFLAKE_* vars are present.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "test_account"}): + handler = SnowCliEnvHandler() + assert handler.can_handle() is True + + def test_discover_single_string_value(self): + """Should discover single string value.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "my_account" + assert values["account"].key == "account" + + def test_discover_multiple_values(self): + """Should discover multiple environment variables.""" + env_vars = { + "SNOWFLAKE_ACCOUNT": "my_account", + "SNOWFLAKE_USER": "my_user", + "SNOWFLAKE_WAREHOUSE": "my_warehouse", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert len(values) == 3 + assert values["account"].value == "my_account" + assert values["user"].value == "my_user" + assert values["warehouse"].value == "my_warehouse" + + def test_discover_specific_key(self): + """Should discover specific key when provided.""" + env_vars = { + "SNOWFLAKE_ACCOUNT": "my_account", + "SNOWFLAKE_USER": "my_user", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover(key="account") + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "my_account" + + def test_discover_nonexistent_key(self): + """Should return empty dict for nonexistent key.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover(key="nonexistent") + + assert len(values) == 0 + + def test_case_conversion(self): + """Should convert UPPERCASE env var names to lowercase config keys.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "test"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert "account" in values # lowercase key + assert "ACCOUNT" not in values + + def test_parse_string_value(self): + """Should parse string values as-is.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert values["account"].value == "my_account" + assert isinstance(values["account"].value, str) + + def test_parse_integer_value(self): + """Should parse integer strings as integers.""" + with patch.dict(os.environ, {"SNOWFLAKE_PORT": "443"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert values["port"].value == 443 + assert isinstance(values["port"].value, int) + + def test_parse_boolean_true_values(self): + """Should parse various true representations as boolean True.""" + true_values = ["true", "True", "TRUE", "1", "yes", "Yes", "on", "On"] + + for true_val in true_values: + with patch.dict( + os.environ, {"SNOWFLAKE_ENABLE_DIAG": true_val}, clear=True + ): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert values["enable_diag"].value is True, f"Failed for {true_val}" + assert isinstance(values["enable_diag"].value, bool) + + def test_parse_boolean_false_values(self): + """Should parse various false representations as boolean False.""" + false_values = ["false", "False", "FALSE", "0", "no", "No", "off", "Off"] + + for false_val in false_values: + with patch.dict( + os.environ, {"SNOWFLAKE_ENABLE_DIAG": false_val}, clear=True + ): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert values["enable_diag"].value is False, f"Failed for {false_val}" + assert isinstance(values["enable_diag"].value, bool) + + def test_raw_value_preservation(self): + """Should preserve raw string value in raw_value field.""" + with patch.dict(os.environ, {"SNOWFLAKE_PORT": "443"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + config_value = values["port"] + assert config_value.value == 443 # Parsed as int + assert config_value.raw_value == "443" # Original string + + def test_values_have_correct_metadata(self): + """Discovered values should have correct metadata.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + config_value = values["account"] + assert config_value.source_name == "snowflake_cli_env" + assert config_value.priority == SourcePriority.ENVIRONMENT + assert config_value.key == "account" + + def test_supports_any_string_key(self): + """Should support any string key.""" + handler = SnowCliEnvHandler() + + assert handler.supports_key("account") is True + assert handler.supports_key("user") is True + assert handler.supports_key("any_key") is True + assert handler.supports_key("") is True + + def test_ignores_non_snowflake_env_vars(self): + """Should ignore environment variables without SNOWFLAKE_ prefix.""" + env_vars = { + "SNOWFLAKE_ACCOUNT": "snowflake_account", + "SNOWSQL_ACCOUNT": "snowsql_account", + "ACCOUNT": "plain_account", + "PATH": "/usr/bin", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + # Should only get SNOWFLAKE_* variables + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "snowflake_account" + + def test_empty_string_value(self): + """Should handle empty string values.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": ""}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert values["account"].value == "" + assert isinstance(values["account"].value, str) + + def test_special_characters_in_value(self): + """Should handle special characters in values.""" + with patch.dict( + os.environ, + {"SNOWFLAKE_PASSWORD": "p@ss!w0rd#123"}, + clear=True, + ): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert values["password"].value == "p@ss!w0rd#123" + + def test_whitespace_in_value(self): + """Should preserve whitespace in values.""" + with patch.dict( + os.environ, + {"SNOWFLAKE_DESCRIPTION": " spaced value "}, + clear=True, + ): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert values["description"].value == " spaced value " + + def test_numeric_string_not_parsed_as_int(self): + """Should handle strings that look numeric but shouldn't be parsed.""" + # Account identifier that looks like a number + with patch.dict(os.environ, {"SNOWFLAKE_SESSION_ID": "12345abc"}, clear=True): + handler = SnowCliEnvHandler() + values = handler.discover() + + # Should remain string because "abc" makes it non-numeric + assert values["session_id"].value == "12345abc" + assert isinstance(values["session_id"].value, str) + + def test_underscore_in_key_preserved(self): + """Should preserve underscores in environment variable keys.""" + with patch.dict( + os.environ, {"SNOWFLAKE_PRIVATE_KEY_PATH": "/path/to/key"}, clear=True + ): + handler = SnowCliEnvHandler() + values = handler.discover() + + assert "private_key_path" in values + assert values["private_key_path"].value == "/path/to/key" + + def test_multiple_discover_calls_consistent(self): + """Multiple discover calls should return consistent results.""" + with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): + handler = SnowCliEnvHandler() + + values1 = handler.discover() + values2 = handler.discover() + + assert values1 == values2 + + def test_discover_with_mixed_case_produces_lowercase_keys(self): + """All config keys should be lowercase regardless of env var case.""" + with patch.dict( + os.environ, + { + "SNOWFLAKE_ACCOUNT": "test1", + "SNOWFLAKE_User": "test2", # Mixed case shouldn't happen, but test anyway + }, + clear=True, + ): + handler = SnowCliEnvHandler() + values = handler.discover() + + # All keys should be lowercase + for key in values.keys(): + assert key == key.lower() diff --git a/tests/config_ng/test_snowsql_env_handler.py b/tests/config_ng/test_snowsql_env_handler.py new file mode 100644 index 0000000000..e02312e665 --- /dev/null +++ b/tests/config_ng/test_snowsql_env_handler.py @@ -0,0 +1,309 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for SnowSqlEnvHandler. + +Tests verify: +- SNOWSQL_* environment variable discovery +- Key mapping (PWD → password) +- Value type parsing (string, int, bool) +- Case handling +- Raw value preservation +- Migration support +""" + +import os +from unittest.mock import patch + +from snowflake.cli.api.config_ng.core import SourcePriority +from snowflake.cli.api.config_ng.env_handlers import SnowSqlEnvHandler + + +class TestSnowSqlEnvHandler: + """Test suite for SnowSqlEnvHandler.""" + + def test_create_handler(self): + """Should create handler with correct properties.""" + handler = SnowSqlEnvHandler() + + assert handler.source_name == "snowsql_env" + assert handler.priority == SourcePriority.ENVIRONMENT + assert handler.handler_type == "snowsql_env" + + def test_can_handle_with_no_env_vars(self): + """Should return False when no SNOWSQL_* vars are set.""" + with patch.dict(os.environ, {}, clear=True): + handler = SnowSqlEnvHandler() + assert handler.can_handle() is False + + def test_can_handle_with_env_vars(self): + """Should return True when SNOWSQL_* vars are present.""" + with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "test_account"}): + handler = SnowSqlEnvHandler() + assert handler.can_handle() is True + + def test_discover_single_string_value(self): + """Should discover single string value.""" + with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "my_account" + + def test_key_mapping_pwd_to_password(self): + """Should map SNOWSQL_PWD to 'password' key.""" + with patch.dict(os.environ, {"SNOWSQL_PWD": "secret123"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert len(values) == 1 + assert "password" in values # Mapped key + assert "pwd" not in values # Original key should not appear + assert values["password"].value == "secret123" + + def test_discover_multiple_values_with_mapping(self): + """Should discover multiple values with key mapping applied.""" + env_vars = { + "SNOWSQL_ACCOUNT": "my_account", + "SNOWSQL_USER": "my_user", + "SNOWSQL_PWD": "my_password", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert len(values) == 3 + assert values["account"].value == "my_account" + assert values["user"].value == "my_user" + assert values["password"].value == "my_password" # Mapped from PWD + + def test_discover_specific_key_direct(self): + """Should discover specific key that doesn't require mapping.""" + env_vars = { + "SNOWSQL_ACCOUNT": "my_account", + "SNOWSQL_USER": "my_user", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover(key="account") + + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "my_account" + + def test_discover_specific_key_with_mapping(self): + """Should discover specific key using reverse mapping.""" + with patch.dict(os.environ, {"SNOWSQL_PWD": "secret123"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover(key="password") + + assert len(values) == 1 + assert "password" in values + assert values["password"].value == "secret123" + + def test_discover_nonexistent_key(self): + """Should return empty dict for nonexistent key.""" + with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover(key="nonexistent") + + assert len(values) == 0 + + def test_case_conversion(self): + """Should convert UPPERCASE env var names to lowercase config keys.""" + with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "test"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert "account" in values # lowercase key + assert "ACCOUNT" not in values + + def test_parse_value_types_same_as_snowcli(self): + """Should parse values the same way as SnowCliEnvHandler.""" + env_vars = { + "SNOWSQL_ACCOUNT": "my_account", # String + "SNOWSQL_PORT": "443", # Integer + "SNOWSQL_ENABLE_DIAG": "true", # Boolean + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert values["account"].value == "my_account" + assert isinstance(values["account"].value, str) + + assert values["port"].value == 443 + assert isinstance(values["port"].value, int) + + assert values["enable_diag"].value is True + assert isinstance(values["enable_diag"].value, bool) + + def test_parse_boolean_values(self): + """Should parse various boolean representations.""" + for true_val in ["true", "TRUE", "1", "yes", "on"]: + with patch.dict(os.environ, {"SNOWSQL_ENABLE_DIAG": true_val}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + assert values["enable_diag"].value is True + + for false_val in ["false", "FALSE", "0", "no", "off"]: + with patch.dict(os.environ, {"SNOWSQL_ENABLE_DIAG": false_val}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + assert values["enable_diag"].value is False + + def test_raw_value_preservation(self): + """Should preserve raw string value in raw_value field.""" + with patch.dict(os.environ, {"SNOWSQL_PORT": "443"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + config_value = values["port"] + assert config_value.value == 443 # Parsed as int + assert config_value.raw_value == "443" # Original string + + def test_values_have_correct_metadata(self): + """Discovered values should have correct metadata.""" + with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + config_value = values["account"] + assert config_value.source_name == "snowsql_env" + assert config_value.priority == SourcePriority.ENVIRONMENT + assert config_value.key == "account" + + def test_supports_any_string_key(self): + """Should support any string key.""" + handler = SnowSqlEnvHandler() + + assert handler.supports_key("account") is True + assert handler.supports_key("password") is True + assert handler.supports_key("any_key") is True + + def test_ignores_non_snowsql_env_vars(self): + """Should ignore environment variables without SNOWSQL_ prefix.""" + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql_account", + "SNOWFLAKE_ACCOUNT": "snowflake_account", + "ACCOUNT": "plain_account", + "PATH": "/usr/bin", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + # Should only get SNOWSQL_* variables + assert len(values) == 1 + assert "account" in values + assert values["account"].value == "snowsql_account" + + def test_reverse_mapping_lookup(self): + """Should correctly perform reverse lookup for mapped keys.""" + handler = SnowSqlEnvHandler() + + # Test reverse mapping: password -> pwd + snowsql_key = handler.get_snowsql_key("password") + assert snowsql_key == "pwd" + + # Test non-mapped key returns itself + snowsql_key = handler.get_snowsql_key("account") + assert snowsql_key == "account" + + def test_migration_scenario_all_snowsql_vars(self): + """Simulates user with only SnowSQL environment variables.""" + env_vars = { + "SNOWSQL_ACCOUNT": "legacy_account", + "SNOWSQL_USER": "legacy_user", + "SNOWSQL_PWD": "legacy_password", + "SNOWSQL_WAREHOUSE": "legacy_warehouse", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert len(values) == 4 + assert values["account"].value == "legacy_account" + assert values["user"].value == "legacy_user" + assert values["password"].value == "legacy_password" + assert values["warehouse"].value == "legacy_warehouse" + + def test_common_snowsql_variables(self): + """Should handle common SnowSQL environment variables.""" + env_vars = { + "SNOWSQL_ACCOUNT": "my_account", + "SNOWSQL_USER": "my_user", + "SNOWSQL_PWD": "my_password", + "SNOWSQL_DATABASE": "my_database", + "SNOWSQL_SCHEMA": "my_schema", + "SNOWSQL_WAREHOUSE": "my_warehouse", + "SNOWSQL_ROLE": "my_role", + } + with patch.dict(os.environ, env_vars, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert len(values) == 7 + assert all( + key in values + for key in [ + "account", + "user", + "password", + "database", + "schema", + "warehouse", + "role", + ] + ) + + def test_empty_string_value(self): + """Should handle empty string values.""" + with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": ""}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert values["account"].value == "" + + def test_special_characters_in_value(self): + """Should handle special characters in values.""" + with patch.dict(os.environ, {"SNOWSQL_PWD": "p@ss!w0rd#123"}, clear=True): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert values["password"].value == "p@ss!w0rd#123" + + def test_underscore_in_key_preserved(self): + """Should preserve underscores in environment variable keys.""" + with patch.dict( + os.environ, {"SNOWSQL_PRIVATE_KEY_PATH": "/path/to/key"}, clear=True + ): + handler = SnowSqlEnvHandler() + values = handler.discover() + + assert "private_key_path" in values + assert values["private_key_path"].value == "/path/to/key" + + def test_multiple_discover_calls_consistent(self): + """Multiple discover calls should return consistent results.""" + with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): + handler = SnowSqlEnvHandler() + + values1 = handler.discover() + values2 = handler.discover() + + assert values1 == values2 From 4749dd9baa6b5d24fc7a6bf54ac80421c58da976 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 6 Oct 2025 10:14:11 +0200 Subject: [PATCH 06/78] SNOW-2306184: config refactory - move key mappings to module level --- src/snowflake/cli/api/config_ng/__init__.py | 2 ++ src/snowflake/cli/api/config_ng/env_handlers.py | 13 ++++++++----- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index fe6dfdaed1..7ff265874b 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -30,6 +30,7 @@ ValueSource, ) from snowflake.cli.api.config_ng.env_handlers import ( + SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS, SnowCliEnvHandler, SnowSqlEnvHandler, ) @@ -49,6 +50,7 @@ "ResolutionEntry", "ResolutionHistory", "SnowCliEnvHandler", + "SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS", "SnowSqlEnvHandler", "SourcePriority", "ValueSource", diff --git a/src/snowflake/cli/api/config_ng/env_handlers.py b/src/snowflake/cli/api/config_ng/env_handlers.py index 6ec505306d..9daad47117 100644 --- a/src/snowflake/cli/api/config_ng/env_handlers.py +++ b/src/snowflake/cli/api/config_ng/env_handlers.py @@ -28,6 +28,12 @@ from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority from snowflake.cli.api.config_ng.handlers import SourceHandler +# Key mappings from SnowSQL to SnowCLI naming conventions +# These mappings are used by handlers that need to support SnowSQL compatibility +SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS: Dict[str, str] = { + "pwd": "password", +} + class SnowCliEnvHandler(SourceHandler): """ @@ -124,11 +130,8 @@ class SnowSqlEnvHandler(SourceHandler): PREFIX = "SNOWSQL_" - # Key mappings from SnowSQL to SnowCLI - # SnowSQL uses PWD, but SnowCLI uses PASSWORD - KEY_MAPPINGS: Dict[str, str] = { - "pwd": "password", - } + # Reference to module-level key mappings + KEY_MAPPINGS = SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS @property def source_name(self) -> str: From 2a7385f7423335e19500824b4d7b8d5d968ffc4d Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 6 Oct 2025 11:01:31 +0200 Subject: [PATCH 07/78] SNOW-2306184: config refactory - SnowSQL config files behaviour --- src/snowflake/cli/api/config_ng/__init__.py | 8 + .../cli/api/config_ng/file_handlers.py | 392 +++++++++++++++++ .../config_ng/test_file_handler_migration.py | 382 ++++++++++++++++ .../config_ng/test_snowsql_config_handler.py | 406 ++++++++++++++++++ tests/config_ng/test_snowsql_config_paths.py | 233 ++++++++++ tests/config_ng/test_toml_file_handler.py | 308 +++++++++++++ 6 files changed, 1729 insertions(+) create mode 100644 src/snowflake/cli/api/config_ng/file_handlers.py create mode 100644 tests/config_ng/test_file_handler_migration.py create mode 100644 tests/config_ng/test_snowsql_config_handler.py create mode 100644 tests/config_ng/test_snowsql_config_paths.py create mode 100644 tests/config_ng/test_toml_file_handler.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index 7ff265874b..e49a1b5cf9 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -34,6 +34,11 @@ SnowCliEnvHandler, SnowSqlEnvHandler, ) +from snowflake.cli.api.config_ng.file_handlers import ( + SnowSqlConfigHandler, + TomlFileHandler, + get_snowsql_config_paths, +) from snowflake.cli.api.config_ng.sources import ( CliArgumentSource, ConfigurationSource, @@ -47,11 +52,14 @@ "ConfigValue", "EnvironmentSource", "FileSource", + "get_snowsql_config_paths", "ResolutionEntry", "ResolutionHistory", "SnowCliEnvHandler", + "SnowSqlConfigHandler", "SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS", "SnowSqlEnvHandler", "SourcePriority", + "TomlFileHandler", "ValueSource", ] diff --git a/src/snowflake/cli/api/config_ng/file_handlers.py b/src/snowflake/cli/api/config_ng/file_handlers.py new file mode 100644 index 0000000000..8ab8496b2e --- /dev/null +++ b/src/snowflake/cli/api/config_ng/file_handlers.py @@ -0,0 +1,392 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +File format handlers for configuration system. + +This module implements handlers for: +- TOML configuration files (SnowCLI format) +- SnowSQL configuration files (Legacy format with key mapping) +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Dict, List, Optional + +import tomlkit +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority +from snowflake.cli.api.config_ng.env_handlers import SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS +from snowflake.cli.api.config_ng.handlers import SourceHandler + + +def get_snowsql_config_paths() -> List[Path]: + """ + Get standard SnowSQL configuration file paths in FileSource precedence order. + + SnowSQL reads config files where "last one wins" (later files override earlier ones). + Our FileSource uses "first one wins" (earlier files override later ones). + + This function returns paths in REVERSE order of SnowSQL's CNF_FILES to maintain + compatibility with SnowSQL's precedence behavior. + + SnowSQL precedence (lowest to highest): + 1. Bundled default config + 2. System-wide configs (/etc/snowsql.cnf, /etc/snowflake/snowsql.cnf, /usr/local/etc/snowsql.cnf) + 3. User home config (~/.snowsql.cnf) + 4. User .snowsql directory config (~/.snowsql/config) + 5. RPM config (/usr/lib64/snowflake/snowsql/config) - if exists + + Returns: + List of Path objects in FileSource precedence order (highest to lowest priority). + Only includes paths that exist on the filesystem. + """ + home_dir = Path.home() + + # Define paths in FileSource order (first = highest priority) + # This is REVERSE of SnowSQL's order to maintain same effective precedence + paths_to_check = [ + # Highest priority in both systems + home_dir / ".snowsql" / "config", # User .snowsql directory config + home_dir / ".snowsql.cnf", # User home config (legacy) + Path("/usr/local/etc/snowsql.cnf"), # Local system config + Path("/etc/snowflake/snowsql.cnf"), # Alternative system config + Path("/etc/snowsql.cnf"), # System-wide config + # Bundled default config would go here but we typically don't ship one + # Lowest priority in both systems + ] + + # Check for RPM config (highest priority in SnowSQL if it exists) + rpm_config = Path("/usr/lib64/snowflake/snowsql/config") + if rpm_config.exists(): + paths_to_check.insert(0, rpm_config) # Add as highest priority + + # Return only paths that exist + return [p for p in paths_to_check if p.exists()] + + +class TomlFileHandler(SourceHandler): + """ + Handler for TOML configuration files. + Supports section navigation for nested configurations. + + Example: + # Config file: ~/.snowflake/connections.toml + [default] + account = "my_account" + user = "my_user" + + # With section_path=["default"] + TomlFileHandler(section_path=["default"]).discover_from_file(path) + # Returns: {"account": "my_account", "user": "my_user"} + """ + + def __init__(self, section_path: Optional[List[str]] = None): + """ + Initialize with optional section path. + + Args: + section_path: Path to section in TOML + Example: ["connections", "default"] for [connections.default] + None or [] means root level + """ + self._section_path = section_path or [] + self._cached_data: Optional[Dict] = None + self._cached_file: Optional[Path] = None + + @property + def source_name(self) -> str: + if self._section_path: + section = ".".join(self._section_path) + return f"toml:{section}" + return "toml:root" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + @property + def handler_type(self) -> str: + return "toml" + + def can_handle(self) -> bool: + """TOML handler is always available.""" + return True + + def can_handle_file(self, file_path: Path) -> bool: + """Check if file is TOML format.""" + return file_path.suffix.lower() in (".toml", ".tml") + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """Not directly called - file handlers use discover_from_file.""" + raise NotImplementedError( + "TomlFileHandler requires file_path. Use discover_from_file() instead." + ) + + def discover_from_file( + self, + file_path: Path, + key: Optional[str] = None, + ) -> Dict[str, ConfigValue]: + """ + Discover values from TOML file. + + Args: + file_path: Path to TOML file + key: Specific key to discover, or None for all + + Returns: + Dictionary of discovered values + """ + # Load and cache file data + if self._cached_file != file_path: + try: + with open(file_path) as f: + self._cached_data = tomlkit.load(f) + self._cached_file = file_path + except (OSError, tomlkit.exceptions.TOMLKitError): + # File doesn't exist or invalid TOML + return {} + + # Navigate to section + data = self._cached_data + for section in self._section_path: + if isinstance(data, dict) and section in data: + data = data[section] + else: + return {} # Section doesn't exist + + # Ensure data is a dictionary + if not isinstance(data, dict): + return {} + + # Extract values + values = {} + if key is not None: + if key in data: + raw = data[key] + values[key] = ConfigValue( + key=key, + value=raw, # TOML already parsed + source_name=self.source_name, + priority=self.priority, + raw_value=str(raw) if raw is not None else None, + ) + else: + for k, v in data.items(): + if isinstance(k, str): # Only process string keys + values[k] = ConfigValue( + key=k, + value=v, + source_name=self.source_name, + priority=self.priority, + raw_value=str(v) if v is not None else None, + ) + + return values + + def supports_key(self, key: str) -> bool: + """TOML can handle any string key.""" + return isinstance(key, str) + + +class SnowSqlConfigHandler(SourceHandler): + """ + Handler for SnowSQL config files. + Format: INI-like TOML with SnowSQL-specific key naming. + + SnowSQL Multi-File Support: + SnowSQL reads from multiple config file locations (system-wide, user home, etc.) + where later files override earlier ones. To maintain this behavior with FileSource: + + 1. Use get_snowsql_config_paths() to get paths in correct precedence order + 2. FileSource will process them with "first wins" logic, which matches + SnowSQL's effective behavior due to reversed ordering + + Key Mappings (SnowSQL → SnowCLI): + - accountname → account + - username → user + - dbname/databasename → database + - schemaname → schema + - warehousename → warehouse + - rolename → role + - pwd → password + + Example SnowSQL config: + [connections.default] + accountname = my_account + username = my_user + password = secret123 + + Example usage with multiple files: + from snowflake.cli.api.config_ng import ( + FileSource, SnowSqlConfigHandler, get_snowsql_config_paths + ) + + source = FileSource( + file_paths=get_snowsql_config_paths(), + handlers=[SnowSqlConfigHandler()] + ) + """ + + # Key mappings from SnowSQL to SnowCLI (in addition to env mappings) + SNOWSQL_CONFIG_KEY_MAPPINGS: Dict[str, str] = { + **SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS, # Include env mappings (pwd → password) + "accountname": "account", + "username": "user", + "dbname": "database", + "databasename": "database", + "schemaname": "schema", + "warehousename": "warehouse", + "rolename": "role", + } + + def __init__(self, section_path: Optional[List[str]] = None): + """ + Initialize with optional section path. + + Args: + section_path: Path to section in config file + Default: ["connections"] for SnowSQL compatibility + """ + self._section_path = section_path or ["connections"] + self._cached_data: Optional[Dict] = None + self._cached_file: Optional[Path] = None + + @property + def source_name(self) -> str: + return "snowsql_config" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.FILE + + @property + def handler_type(self) -> str: + return "snowsql_config" + + def can_handle(self) -> bool: + """SnowSQL handler is always available.""" + return True + + def can_handle_file(self, file_path: Path) -> bool: + """Check if file is SnowSQL config or TOML file.""" + # SnowSQL config is typically ~/.snowsql/config (no extension) + # But for flexibility, also handle any TOML file + if file_path.parent.name == ".snowsql" and file_path.name == "config": + return True + # Also handle .toml files for testing and flexibility + return file_path.suffix.lower() in (".toml", ".tml") + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """Not directly called - file handlers use discover_from_file.""" + raise NotImplementedError( + "SnowSqlConfigHandler requires file_path. Use discover_from_file() instead." + ) + + def discover_from_file( + self, + file_path: Path, + key: Optional[str] = None, + ) -> Dict[str, ConfigValue]: + """ + Discover values from SnowSQL config with key mapping. + + Args: + file_path: Path to SnowSQL config file + key: Specific key to discover (SnowCLI format), or None + + Returns: + Dictionary with normalized SnowCLI keys + """ + # Load and cache file data + if self._cached_file != file_path: + try: + with open(file_path) as f: + self._cached_data = tomlkit.load(f) + self._cached_file = file_path + except (OSError, tomlkit.exceptions.TOMLKitError): + return {} + + # Navigate to section + data = self._cached_data + for section in self._section_path: + if isinstance(data, dict) and section in data: + data = data[section] + else: + return {} # Section doesn't exist + + # Ensure data is a dictionary + if not isinstance(data, dict): + return {} + + # Extract and map keys + values = {} + + if key is not None: + # Reverse lookup: find SnowSQL key for CLI key + snowsql_key = self._get_snowsql_key(key) + if snowsql_key in data: + raw = data[snowsql_key] + values[key] = ConfigValue( + key=key, # Normalized SnowCLI key + value=raw, + source_name=self.source_name, + priority=self.priority, + raw_value=f"{snowsql_key}={raw}" + if snowsql_key != key + else str(raw), + ) + else: + for snowsql_key, value in data.items(): + if not isinstance(snowsql_key, str): + continue + + # Map to SnowCLI key (lowercase) + snowsql_key_lower = snowsql_key.lower() + cli_key = self.SNOWSQL_CONFIG_KEY_MAPPINGS.get( + snowsql_key_lower, snowsql_key_lower + ) + + values[cli_key] = ConfigValue( + key=cli_key, + value=value, + source_name=self.source_name, + priority=self.priority, + raw_value=( + f"{snowsql_key}={value}" + if snowsql_key_lower != cli_key + else str(value) + ), + ) + + return values + + def supports_key(self, key: str) -> bool: + """Any string key can be represented in SnowSQL config.""" + return isinstance(key, str) + + def _get_snowsql_key(self, cli_key: str) -> str: + """Reverse mapping: CLI key → SnowSQL key.""" + for snowsql_key, cli_mapped_key in self.SNOWSQL_CONFIG_KEY_MAPPINGS.items(): + if cli_mapped_key == cli_key: + return snowsql_key + return cli_key + + def get_cli_key(self, snowsql_key: str) -> str: + """Forward mapping: SnowSQL key → CLI key.""" + snowsql_key_lower = snowsql_key.lower() + return self.SNOWSQL_CONFIG_KEY_MAPPINGS.get( + snowsql_key_lower, snowsql_key_lower + ) diff --git a/tests/config_ng/test_file_handler_migration.py b/tests/config_ng/test_file_handler_migration.py new file mode 100644 index 0000000000..49286b4827 --- /dev/null +++ b/tests/config_ng/test_file_handler_migration.py @@ -0,0 +1,382 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Integration tests for file handler migration scenarios. + +Tests verify: +- File path precedence (first file wins) +- Handler ordering (TOML > SnowSQL) +- Migration from SnowSQL to SnowCLI TOML +- Complete integration with FileSource +""" + +from pathlib import Path +from tempfile import NamedTemporaryFile + +from snowflake.cli.api.config_ng.file_handlers import ( + SnowSqlConfigHandler, + TomlFileHandler, +) +from snowflake.cli.api.config_ng.sources import FileSource + + +class TestFileHandlerMigration: + """Test suite for file handler migration scenarios.""" + + def test_pure_toml_configuration(self): + """Scenario: User has only SnowCLI TOML configuration.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[default]\naccount = "toml_account"\nuser = "toml_user"\n') + f.flush() + toml_path = Path(f.name) + + try: + source = FileSource( + file_paths=[toml_path], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + values = source.discover() + + assert len(values) == 2 + assert values["account"].value == "toml_account" + assert values["account"].source_name == "toml:default" + assert values["user"].value == "toml_user" + finally: + toml_path.unlink() + + def test_pure_snowsql_configuration(self): + """Scenario: User has only SnowSQL configuration.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write( + "[connections]\n" + 'accountname = "snowsql_account"\n' + 'username = "snowsql_user"\n' + ) + f.flush() + snowsql_path = Path(f.name) + + try: + source = FileSource( + file_paths=[snowsql_path], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + values = source.discover() + + # Values should come from SnowSQL with key mapping + assert len(values) == 2 + assert values["account"].value == "snowsql_account" + assert values["account"].source_name == "snowsql_config" + assert values["user"].value == "snowsql_user" + finally: + snowsql_path.unlink() + + def test_partial_migration_toml_overrides_snowsql(self): + """Scenario: User has both configs, TOML should override SnowSQL.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: + f1.write('[default]\naccount = "new_account"\n') + f1.flush() + toml_path = Path(f1.name) + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: + f2.write( + "[connections]\n" + 'accountname = "old_account"\n' + 'username = "old_user"\n' + 'databasename = "old_db"\n' + ) + f2.flush() + snowsql_path = Path(f2.name) + + try: + # First file path has highest precedence + source = FileSource( + file_paths=[toml_path, snowsql_path], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + values = source.discover() + + # account from TOML (first file), others from SnowSQL (second file) + assert values["account"].value == "new_account" + assert values["account"].source_name == "toml:default" + assert values["user"].value == "old_user" + assert values["user"].source_name == "snowsql_config" + assert values["database"].value == "old_db" + finally: + toml_path.unlink() + snowsql_path.unlink() + + def test_handler_ordering_within_same_file(self): + """Handler order matters when both can handle same file.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + # File has both TOML format AND connections section + f.write( + '[default]\naccount = "toml_format"\n' + '[connections]\naccountname = "snowsql_format"\n' + ) + f.flush() + temp_path = Path(f.name) + + try: + # TOML handler first + source = FileSource( + file_paths=[temp_path], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + values = source.discover() + + # TOML handler should win (first handler) + assert values["account"].value == "toml_format" + assert values["account"].source_name == "toml:default" + finally: + temp_path.unlink() + + def test_file_path_precedence_first_wins(self): + """First file path should take precedence over later ones.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: + f1.write('[default]\naccount = "file1_account"\n') + f1.flush() + file1_path = Path(f1.name) + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: + f2.write('[default]\naccount = "file2_account"\n') + f2.flush() + file2_path = Path(f2.name) + + try: + source = FileSource( + file_paths=[file1_path, file2_path], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + values = source.discover() + + # First file wins + assert values["account"].value == "file1_account" + finally: + file1_path.unlink() + file2_path.unlink() + + def test_nonexistent_files_skipped(self): + """Should skip nonexistent files gracefully.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[default]\naccount = "existing_account"\n') + f.flush() + existing_path = Path(f.name) + + nonexistent_path = Path("/nonexistent/file.toml") + + try: + source = FileSource( + file_paths=[nonexistent_path, existing_path], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + values = source.discover() + + # Should still get values from existing file + assert values["account"].value == "existing_account" + finally: + existing_path.unlink() + + def test_complete_migration_timeline(self): + """Simulates complete migration from SnowSQL to TOML.""" + # Step 1: Pure SnowSQL user + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccountname = "account"\nusername = "user"\n') + f.flush() + snowsql_path = Path(f.name) + + try: + source = FileSource( + file_paths=[snowsql_path], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + values = source.discover() + assert values["account"].value == "account" + assert values["account"].source_name == "snowsql_config" + finally: + snowsql_path.unlink() + + # Step 2: Start migration - create TOML file + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: + f1.write('[default]\naccount = "new_account"\n') + f1.flush() + toml_path = Path(f1.name) + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: + f2.write( + '[connections]\naccountname = "old_account"\nusername = "old_user"\n' + ) + f2.flush() + snowsql_path = Path(f2.name) + + try: + source = FileSource( + file_paths=[toml_path, snowsql_path], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + values = source.discover() + # TOML overrides account, SnowSQL provides user + assert values["account"].value == "new_account" + assert values["account"].source_name == "toml:default" + assert values["user"].value == "old_user" + assert values["user"].source_name == "snowsql_config" + finally: + toml_path.unlink() + snowsql_path.unlink() + + def test_multiple_toml_handlers_different_sections(self): + """Should handle multiple TOML handlers for different sections.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write( + '[default]\naccount = "default_account"\n' + '[prod]\naccount = "prod_account"\n' + ) + f.flush() + temp_path = Path(f.name) + + try: + # Handler for [default] section + source_default = FileSource( + file_paths=[temp_path], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + # Handler for [prod] section + source_prod = FileSource( + file_paths=[temp_path], + handlers=[TomlFileHandler(section_path=["prod"])], + ) + + values_default = source_default.discover() + values_prod = source_prod.discover() + + assert values_default["account"].value == "default_account" + assert values_prod["account"].value == "prod_account" + finally: + temp_path.unlink() + + def test_discover_specific_key_with_migration(self): + """Should handle specific key discovery with migration.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: + f1.write('[default]\naccount = "toml_account"\n') + f1.flush() + toml_path = Path(f1.name) + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: + f2.write( + '[connections]\naccountname = "snowsql_account"\nusername = "user"\n' + ) + f2.flush() + snowsql_path = Path(f2.name) + + try: + source = FileSource( + file_paths=[toml_path, snowsql_path], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + # Discover specific key + values = source.discover(key="account") + + # Should get from TOML (first file) + assert len(values) == 1 + assert values["account"].value == "toml_account" + + # Discover different key + values = source.discover(key="user") + + # Should get from SnowSQL (second file) + assert len(values) == 1 + assert values["user"].value == "user" + finally: + toml_path.unlink() + snowsql_path.unlink() + + def test_complex_configuration_with_all_features(self): + """Complex scenario with multiple files, handlers, and sections.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: + f1.write('[connections]\naccount = "connections_account"\n') + f1.flush() + connections_toml = Path(f1.name) + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: + f2.write("[cli]\nverbose = true\n") + f2.flush() + config_toml = Path(f2.name) + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f3: + f3.write( + "[connections]\n" + 'accountname = "legacy_account"\n' + 'username = "legacy_user"\n' + ) + f3.flush() + snowsql_config = Path(f3.name) + + try: + source = FileSource( + file_paths=[connections_toml, config_toml, snowsql_config], + handlers=[ + TomlFileHandler(section_path=["connections"]), + TomlFileHandler(section_path=["cli"]), + SnowSqlConfigHandler(), + ], + ) + + values = source.discover() + + # Should get account from connections.toml (first file, first handler) + assert values["account"].value == "connections_account" + assert values["account"].source_name == "toml:connections" + + # Should get verbose from config.toml (second file, second handler) + assert values["verbose"].value is True + assert values["verbose"].source_name == "toml:cli" + + # Should get user from snowsql config (third file, third handler) + assert values["user"].value == "legacy_user" + assert values["user"].source_name == "snowsql_config" + finally: + connections_toml.unlink() + config_toml.unlink() + snowsql_config.unlink() diff --git a/tests/config_ng/test_snowsql_config_handler.py b/tests/config_ng/test_snowsql_config_handler.py new file mode 100644 index 0000000000..0a2d705d59 --- /dev/null +++ b/tests/config_ng/test_snowsql_config_handler.py @@ -0,0 +1,406 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for SnowSqlConfigHandler. + +Tests verify: +- SnowSQL config file discovery +- Key mapping (accountname → account, username → user, etc.) +- Section navigation +- Migration support +- Raw value preservation showing original key names +""" + +from pathlib import Path +from tempfile import NamedTemporaryFile + +import pytest +from snowflake.cli.api.config_ng.core import SourcePriority +from snowflake.cli.api.config_ng.file_handlers import SnowSqlConfigHandler + + +class TestSnowSqlConfigHandler: + """Test suite for SnowSqlConfigHandler.""" + + def test_create_handler(self): + """Should create handler with correct properties.""" + handler = SnowSqlConfigHandler() + + assert handler.source_name == "snowsql_config" + assert handler.priority == SourcePriority.FILE + assert handler.handler_type == "snowsql_config" + + def test_default_section_path(self): + """Should default to connections section.""" + # Verify by testing that it can discover from [connections] section + from pathlib import Path + from tempfile import NamedTemporaryFile + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccount = "test"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + # Should find value in [connections] section + assert "account" in values + finally: + temp_path.unlink() + + def test_custom_section_path(self): + """Should allow custom section path.""" + from pathlib import Path + from tempfile import NamedTemporaryFile + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\n[connections.prod]\naccount = "prod_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler(section_path=["connections", "prod"]) + values = handler.discover_from_file(temp_path) + # Should find value in custom section path + assert values["account"].value == "prod_account" + finally: + temp_path.unlink() + + def test_can_handle_always_true(self): + """Should always return True.""" + handler = SnowSqlConfigHandler() + assert handler.can_handle() is True + + def test_can_handle_snowsql_config_files(self): + """Should detect SnowSQL config files.""" + handler = SnowSqlConfigHandler() + + # Typical SnowSQL config path + assert handler.can_handle_file(Path("~/.snowsql/config")) is True + assert handler.can_handle_file(Path("/home/user/.snowsql/config")) is True + + def test_can_handle_toml_files(self): + """Should also handle .toml files.""" + handler = SnowSqlConfigHandler() + + assert handler.can_handle_file(Path("config.toml")) is True + + def test_discover_raises_not_implemented(self): + """Should raise NotImplementedError for discover() without file_path.""" + handler = SnowSqlConfigHandler() + + with pytest.raises(NotImplementedError, match="requires file_path"): + handler.discover() + + def test_discover_from_nonexistent_file(self): + """Should return empty dict for nonexistent file.""" + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(Path("/nonexistent/config")) + + assert len(values) == 0 + + def test_key_mapping_accountname(self): + """Should map accountname → account.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccountname = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert len(values) == 1 + assert "account" in values + assert "accountname" not in values + assert values["account"].value == "my_account" + assert values["account"].raw_value == "accountname=my_account" + finally: + temp_path.unlink() + + def test_key_mapping_username(self): + """Should map username → user.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\nusername = "my_user"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert values["user"].value == "my_user" + assert values["user"].raw_value == "username=my_user" + finally: + temp_path.unlink() + + def test_key_mapping_multiple_database_keys(self): + """Should map both dbname and databasename → database.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\ndatabasename = "my_db"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert values["database"].value == "my_db" + finally: + temp_path.unlink() + + def test_key_mapping_warehouse_schema_role(self): + """Should map warehouse, schema, and role names.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write( + "[connections]\n" + 'warehousename = "my_wh"\n' + 'schemaname = "my_schema"\n' + 'rolename = "my_role"\n' + ) + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert values["warehouse"].value == "my_wh" + assert values["schema"].value == "my_schema" + assert values["role"].value == "my_role" + finally: + temp_path.unlink() + + def test_key_mapping_pwd_to_password(self): + """Should map pwd → password (from env mappings).""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\npwd = "secret123"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert "password" in values + assert "pwd" not in values + assert values["password"].value == "secret123" + finally: + temp_path.unlink() + + def test_unmapped_keys_passthrough(self): + """Keys without mappings should pass through.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\ncustom_key = "custom_value"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert values["custom_key"].value == "custom_value" + finally: + temp_path.unlink() + + def test_discover_all_common_keys(self): + """Should discover all common SnowSQL keys with mapping.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write( + "[connections]\n" + 'accountname = "my_account"\n' + 'username = "my_user"\n' + 'pwd = "my_password"\n' + 'databasename = "my_db"\n' + 'schemaname = "my_schema"\n' + 'warehousename = "my_wh"\n' + 'rolename = "my_role"\n' + ) + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert len(values) == 7 + assert all( + key in values + for key in [ + "account", + "user", + "password", + "database", + "schema", + "warehouse", + "role", + ] + ) + finally: + temp_path.unlink() + + def test_discover_specific_key(self): + """Should discover specific key with mapping.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccountname = "my_account"\nusername = "my_user"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path, key="account") + + assert len(values) == 1 + assert "account" in values + assert "user" not in values + finally: + temp_path.unlink() + + def test_discover_nonexistent_key(self): + """Should return empty dict for nonexistent key.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccountname = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path, key="nonexistent") + + assert len(values) == 0 + finally: + temp_path.unlink() + + def test_discover_nonexistent_section(self): + """Should return empty dict for nonexistent section.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('accountname = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() # Default section: connections + values = handler.discover_from_file(temp_path) + + assert len(values) == 0 + finally: + temp_path.unlink() + + def test_values_have_correct_metadata(self): + """Discovered values should have correct metadata.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccountname = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + config_value = values["account"] + assert config_value.source_name == "snowsql_config" + assert config_value.priority == SourcePriority.FILE + assert config_value.key == "account" + assert config_value.value == "my_account" + # Raw value shows original SnowSQL key + assert config_value.raw_value == "accountname=my_account" + finally: + temp_path.unlink() + + def test_supports_any_string_key(self): + """Should support any string key.""" + handler = SnowSqlConfigHandler() + + assert handler.supports_key("account") is True + assert handler.supports_key("any_key") is True + + def test_reverse_mapping_for_specific_key_query(self): + """Should use reverse mapping when querying specific key.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccountname = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + # Query for "account" should find "accountname" + values = handler.discover_from_file(temp_path, key="account") + + assert len(values) == 1 + assert values["account"].value == "my_account" + finally: + temp_path.unlink() + + def test_get_cli_key_method(self): + """Should convert SnowSQL keys to CLI keys.""" + handler = SnowSqlConfigHandler() + + assert handler.get_cli_key("accountname") == "account" + assert handler.get_cli_key("username") == "user" + assert handler.get_cli_key("pwd") == "password" + assert handler.get_cli_key("unmapped") == "unmapped" + + def test_case_insensitive_key_mapping(self): + """Key mappings should be case-insensitive.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\nAccountName = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + # Should still map to "account" + assert "account" in values + assert values["account"].value == "my_account" + finally: + temp_path.unlink() + + def test_invalid_toml_returns_empty(self): + """Should handle invalid TOML gracefully.""" + with NamedTemporaryFile(mode="w", delete=False) as f: + f.write("invalid toml content [[[") + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + values = handler.discover_from_file(temp_path) + + assert len(values) == 0 + finally: + temp_path.unlink() + + def test_caching_behavior(self): + """Should cache file data for performance.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\naccountname = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = SnowSqlConfigHandler() + + # First call loads file + values1 = handler.discover_from_file(temp_path) + # Second call uses cache + values2 = handler.discover_from_file(temp_path) + + assert values1 == values2 + # Verify caching by checking results are consistent + finally: + temp_path.unlink() diff --git a/tests/config_ng/test_snowsql_config_paths.py b/tests/config_ng/test_snowsql_config_paths.py new file mode 100644 index 0000000000..dc26c771a5 --- /dev/null +++ b/tests/config_ng/test_snowsql_config_paths.py @@ -0,0 +1,233 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for get_snowsql_config_paths() helper function. + +Tests verify: +- Returns paths in correct precedence order (highest to lowest) +- Order is reversed from SnowSQL's CNF_FILES to match FileSource's "first wins" logic +- Only returns paths that exist +- Handles RPM config precedence correctly +""" + +from pathlib import Path +from unittest.mock import patch + +from snowflake.cli.api.config_ng.file_handlers import get_snowsql_config_paths + + +class TestGetSnowSqlConfigPaths: + """Test suite for get_snowsql_config_paths() function.""" + + def test_returns_list_of_paths(self): + """Should return a list of Path objects.""" + paths = get_snowsql_config_paths() + + assert isinstance(paths, list) + assert all(isinstance(p, Path) for p in paths) + + def test_only_returns_existing_paths(self, tmp_path): + """Should only return paths that exist on the filesystem.""" + with patch("pathlib.Path.home", return_value=tmp_path): + # Create only one of the expected files + snowsql_dir = tmp_path / ".snowsql" + snowsql_dir.mkdir() + config_file = snowsql_dir / "config" + config_file.touch() + + paths = get_snowsql_config_paths() + + # Should only return the one file that exists + assert len(paths) == 1 + assert paths[0] == config_file + + def test_user_config_has_highest_priority(self, tmp_path): + """User config should come first in the list (highest priority).""" + with patch("pathlib.Path.home", return_value=tmp_path): + # Create user .snowsql directory config + snowsql_dir = tmp_path / ".snowsql" + snowsql_dir.mkdir() + user_config = snowsql_dir / "config" + user_config.touch() + + # Create legacy user config + legacy_config = tmp_path / ".snowsql.cnf" + legacy_config.touch() + + paths = get_snowsql_config_paths() + + # User .snowsql/config should come before .snowsql.cnf + assert len(paths) == 2 + assert paths[0] == user_config + assert paths[1] == legacy_config + + def test_rpm_config_has_highest_priority_if_exists(self, tmp_path): + """RPM config should be first if it exists (concept test).""" + # This test verifies the logic conceptually + # In reality, RPM config path is unlikely to exist in test environment + # The important part is that IF it exists, it gets inserted at position 0 + + with patch("pathlib.Path.home", return_value=tmp_path): + # Create user config + snowsql_dir = tmp_path / ".snowsql" + snowsql_dir.mkdir() + user_config = snowsql_dir / "config" + user_config.touch() + + paths = get_snowsql_config_paths() + + # User config should be first (RPM likely doesn't exist) + assert len(paths) >= 1 + assert paths[0] == user_config + + # Verify that the logic in get_snowsql_config_paths checks for RPM + # This is validated by code inspection - the function checks rpm_config.exists() + + def test_precedence_order_matches_snowsql_behavior(self, tmp_path): + """ + Test that the returned order matches SnowSQL's effective precedence. + + SnowSQL reads files where "last one wins", so: + - bundled config (read first, lowest priority) + - system configs + - user configs (read last, highest priority) + + FileSource uses "first one wins", so we reverse the order: + - user configs (first in list, highest priority) + - system configs + - bundled config (last in list, lowest priority) + """ + with patch("pathlib.Path.home", return_value=tmp_path): + # Create all user config files + snowsql_dir = tmp_path / ".snowsql" + snowsql_dir.mkdir() + user_snowsql_config = snowsql_dir / "config" + user_snowsql_config.touch() + + user_legacy_config = tmp_path / ".snowsql.cnf" + user_legacy_config.touch() + + paths = get_snowsql_config_paths() + + # Verify order: most specific (user) configs first + assert len(paths) == 2 + assert paths[0] == user_snowsql_config # Highest priority + assert paths[1] == user_legacy_config # Second priority + + def test_handles_missing_home_directory_gracefully(self): + """Should handle case where home directory doesn't exist.""" + with patch("pathlib.Path.home", return_value=Path("/nonexistent")): + paths = get_snowsql_config_paths() + + # Should return empty list or only system paths that exist + assert isinstance(paths, list) + + def test_returns_empty_list_when_no_configs_exist(self, tmp_path): + """Should return empty list if no config files exist.""" + with patch("pathlib.Path.home", return_value=tmp_path): + paths = get_snowsql_config_paths() + + assert paths == [] + + def test_system_configs_have_lower_priority_than_user(self, tmp_path): + """System configs should appear after user configs in the list.""" + # This test verifies the concept even if system paths don't exist in test env + with patch("pathlib.Path.home", return_value=tmp_path): + snowsql_dir = tmp_path / ".snowsql" + snowsql_dir.mkdir() + user_config = snowsql_dir / "config" + user_config.touch() + + paths = get_snowsql_config_paths() + + # User config should be first (if any paths are returned) + if len(paths) > 0: + assert paths[0] == user_config + + +class TestSnowSqlConfigPathsIntegration: + """Integration tests with FileSource and SnowSqlConfigHandler.""" + + def test_paths_work_with_file_source(self, tmp_path): + """Paths should work correctly with FileSource.""" + from snowflake.cli.api.config_ng.file_handlers import SnowSqlConfigHandler + from snowflake.cli.api.config_ng.sources import FileSource + + with patch("pathlib.Path.home", return_value=tmp_path): + # Create a user config file + snowsql_dir = tmp_path / ".snowsql" + snowsql_dir.mkdir() + user_config = snowsql_dir / "config" + user_config.write_text( + '[connections]\naccountname = "user_account"\nusername = "user"\n' + ) + + # Get paths using helper + paths = get_snowsql_config_paths() + + # Create FileSource with these paths + source = FileSource(file_paths=paths, handlers=[SnowSqlConfigHandler()]) + + values = source.discover() + + # Should discover values from user config + assert values["account"].value == "user_account" + assert values["user"].value == "user" + + def test_file_precedence_with_multiple_configs(self, tmp_path): + """ + Test that file precedence matches SnowSQL behavior. + + In SnowSQL: later files override earlier ones + In FileSource: earlier files override later ones + With reversed order: same effective behavior + """ + from snowflake.cli.api.config_ng.file_handlers import SnowSqlConfigHandler + from snowflake.cli.api.config_ng.sources import FileSource + + with patch("pathlib.Path.home", return_value=tmp_path): + # Create user .snowsql/config (should have highest priority) + snowsql_dir = tmp_path / ".snowsql" + snowsql_dir.mkdir() + user_config = snowsql_dir / "config" + user_config.write_text( + "[connections]\n" + 'accountname = "priority1_account"\n' + 'username = "priority1_user"\n' + ) + + # Create another config in snowsql dir (should have lower priority) + # Using .toml extension so handler can process it + legacy_config = snowsql_dir / "legacy.toml" + legacy_config.write_text( + "[connections]\n" + 'accountname = "priority2_account"\n' + 'username = "priority2_user"\n' + 'databasename = "priority2_db"\n' + ) + + # Manually specify paths to test precedence + paths = [user_config, legacy_config] + + source = FileSource(file_paths=paths, handlers=[SnowSqlConfigHandler()]) + + values = source.discover() + + # Values from user_config should win (it's first in the list) + assert values["account"].value == "priority1_account" + assert values["user"].value == "priority1_user" + + # Database only exists in legacy config, so it should be found + assert values["database"].value == "priority2_db" diff --git a/tests/config_ng/test_toml_file_handler.py b/tests/config_ng/test_toml_file_handler.py new file mode 100644 index 0000000000..2cc183d8f2 --- /dev/null +++ b/tests/config_ng/test_toml_file_handler.py @@ -0,0 +1,308 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for TomlFileHandler. + +Tests verify: +- TOML file discovery +- Section navigation +- Caching behavior +- File format detection +- Value metadata +""" + +from pathlib import Path +from tempfile import NamedTemporaryFile + +import pytest +from snowflake.cli.api.config_ng.core import SourcePriority +from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler + + +class TestTomlFileHandler: + """Test suite for TomlFileHandler.""" + + def test_create_handler(self): + """Should create handler with correct properties.""" + handler = TomlFileHandler() + + assert handler.source_name == "toml:root" + assert handler.priority == SourcePriority.FILE + assert handler.handler_type == "toml" + + def test_create_handler_with_section_path(self): + """Should create handler with section path.""" + handler = TomlFileHandler(section_path=["connections", "default"]) + + assert handler.source_name == "toml:connections.default" + assert handler.priority == SourcePriority.FILE + + def test_can_handle_always_true(self): + """Should always return True.""" + handler = TomlFileHandler() + assert handler.can_handle() is True + + def test_can_handle_toml_files(self): + """Should detect TOML files by extension.""" + handler = TomlFileHandler() + + assert handler.can_handle_file(Path("config.toml")) is True + assert handler.can_handle_file(Path("connections.toml")) is True + assert handler.can_handle_file(Path("file.tml")) is True + + def test_cannot_handle_non_toml_files(self): + """Should reject non-TOML files.""" + handler = TomlFileHandler() + + assert handler.can_handle_file(Path("config.json")) is False + assert handler.can_handle_file(Path("config.yaml")) is False + assert handler.can_handle_file(Path("config")) is False + + def test_discover_raises_not_implemented(self): + """Should raise NotImplementedError for discover() without file_path.""" + handler = TomlFileHandler() + + with pytest.raises(NotImplementedError, match="requires file_path"): + handler.discover() + + def test_discover_from_nonexistent_file(self): + """Should return empty dict for nonexistent file.""" + handler = TomlFileHandler() + values = handler.discover_from_file(Path("/nonexistent/file.toml")) + + assert len(values) == 0 + + def test_discover_from_simple_toml(self): + """Should discover values from simple TOML file.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[default]\naccount = "my_account"\nuser = "my_user"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler(section_path=["default"]) + values = handler.discover_from_file(temp_path) + + assert len(values) == 2 + assert values["account"].value == "my_account" + assert values["user"].value == "my_user" + finally: + temp_path.unlink() + + def test_discover_root_level(self): + """Should discover values at root level.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('account = "my_account"\nuser = "my_user"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler() # No section path + values = handler.discover_from_file(temp_path) + + assert len(values) == 2 + assert values["account"].value == "my_account" + finally: + temp_path.unlink() + + def test_discover_nested_section(self): + """Should navigate to nested sections.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[connections]\n[connections.default]\naccount = "test"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler(section_path=["connections", "default"]) + values = handler.discover_from_file(temp_path) + + assert len(values) == 1 + assert values["account"].value == "test" + finally: + temp_path.unlink() + + def test_discover_nonexistent_section(self): + """Should return empty dict for nonexistent section.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('account = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler(section_path=["nonexistent"]) + values = handler.discover_from_file(temp_path) + + assert len(values) == 0 + finally: + temp_path.unlink() + + def test_discover_specific_key(self): + """Should discover only specific key.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('account = "my_account"\nuser = "my_user"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler() + values = handler.discover_from_file(temp_path, key="account") + + assert len(values) == 1 + assert "account" in values + assert "user" not in values + finally: + temp_path.unlink() + + def test_discover_nonexistent_key(self): + """Should return empty dict for nonexistent key.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('account = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler() + values = handler.discover_from_file(temp_path, key="nonexistent") + + assert len(values) == 0 + finally: + temp_path.unlink() + + def test_values_have_correct_metadata(self): + """Discovered values should have correct metadata.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('[default]\naccount = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler(section_path=["default"]) + values = handler.discover_from_file(temp_path) + + config_value = values["account"] + assert config_value.source_name == "toml:default" + assert config_value.priority == SourcePriority.FILE + assert config_value.key == "account" + assert config_value.value == "my_account" + assert config_value.raw_value == "my_account" + finally: + temp_path.unlink() + + def test_handles_various_value_types(self): + """Should handle different TOML value types.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write( + 'string_val = "text"\n' + "int_val = 42\n" + "bool_val = true\n" + 'list_val = ["a", "b"]\n' + ) + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler() + values = handler.discover_from_file(temp_path) + + assert values["string_val"].value == "text" + assert values["int_val"].value == 42 + assert values["bool_val"].value is True + assert values["list_val"].value == ["a", "b"] + finally: + temp_path.unlink() + + def test_caching_behavior(self): + """Should cache file data for performance.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('account = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler() + + # First call loads file + values1 = handler.discover_from_file(temp_path) + # Second call uses cache + values2 = handler.discover_from_file(temp_path) + + assert values1 == values2 + # Verify caching by checking results are consistent + finally: + temp_path.unlink() + + def test_cache_invalidation_on_different_file(self): + """Should invalidate cache when file changes.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: + f1.write('account = "account1"\n') + f1.flush() + temp_path1 = Path(f1.name) + + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: + f2.write('account = "account2"\n') + f2.flush() + temp_path2 = Path(f2.name) + + try: + handler = TomlFileHandler() + + values1 = handler.discover_from_file(temp_path1) + values2 = handler.discover_from_file(temp_path2) + + assert values1["account"].value == "account1" + assert values2["account"].value == "account2" + finally: + temp_path1.unlink() + temp_path2.unlink() + + def test_supports_any_string_key(self): + """Should support any string key.""" + handler = TomlFileHandler() + + assert handler.supports_key("account") is True + assert handler.supports_key("any_key") is True + assert handler.supports_key("") is True + + def test_invalid_toml_returns_empty(self): + """Should handle invalid TOML gracefully.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write("invalid toml content [[[") + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler() + values = handler.discover_from_file(temp_path) + + assert len(values) == 0 + finally: + temp_path.unlink() + + def test_multiple_discover_calls_consistent(self): + """Multiple discover calls should return consistent results.""" + with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + f.write('account = "my_account"\n') + f.flush() + temp_path = Path(f.name) + + try: + handler = TomlFileHandler() + + values1 = handler.discover_from_file(temp_path) + values2 = handler.discover_from_file(temp_path) + + assert values1 == values2 + finally: + temp_path.unlink() From 64d715bf8dd79dd25eb447e77a9bd9b3f46fd7f4 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 6 Oct 2025 16:35:18 +0200 Subject: [PATCH 08/78] SNOW-2306184: config refactory - resolver history --- src/snowflake/cli/api/config_ng/__init__.py | 6 + src/snowflake/cli/api/config_ng/resolver.py | 611 ++++++++++++++++++ .../config_ng/test_configuration_resolver.py | 441 +++++++++++++ .../test_resolution_history_tracker.py | 431 ++++++++++++ tests/config_ng/test_resolver_integration.py | 377 +++++++++++ 5 files changed, 1866 insertions(+) create mode 100644 src/snowflake/cli/api/config_ng/resolver.py create mode 100644 tests/config_ng/test_configuration_resolver.py create mode 100644 tests/config_ng/test_resolution_history_tracker.py create mode 100644 tests/config_ng/test_resolver_integration.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index e49a1b5cf9..b8b416d8e4 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -39,6 +39,10 @@ TomlFileHandler, get_snowsql_config_paths, ) +from snowflake.cli.api.config_ng.resolver import ( + ConfigurationResolver, + ResolutionHistoryTracker, +) from snowflake.cli.api.config_ng.sources import ( CliArgumentSource, ConfigurationSource, @@ -48,6 +52,7 @@ __all__ = [ "CliArgumentSource", + "ConfigurationResolver", "ConfigurationSource", "ConfigValue", "EnvironmentSource", @@ -55,6 +60,7 @@ "get_snowsql_config_paths", "ResolutionEntry", "ResolutionHistory", + "ResolutionHistoryTracker", "SnowCliEnvHandler", "SnowSqlConfigHandler", "SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS", diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py new file mode 100644 index 0000000000..c5f247e4d2 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -0,0 +1,611 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Configuration resolver with resolution history tracking. + +This module implements: +- ResolutionHistoryTracker: Tracks configuration value discoveries and precedence +- ConfigurationResolver: Orchestrates sources and resolves configuration values +""" + +from __future__ import annotations + +import json +import logging +from collections import defaultdict +from datetime import datetime +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from snowflake.cli.api.config_ng.core import ( + ConfigValue, + ResolutionEntry, + ResolutionHistory, +) +from snowflake.cli.api.console import cli_console + +if TYPE_CHECKING: + from snowflake.cli.api.config_ng.sources import ConfigurationSource + +log = logging.getLogger(__name__) + +# Sensitive configuration keys that should be masked when displayed +SENSITIVE_KEYS = { + "password", + "pwd", + "oauth_client_secret", + "token", + "session_token", + "master_token", + "mfa_passcode", + "private_key", # Private key content (not path) + "passphrase", + "secret", +} + +# Keys that contain file paths (paths are OK to display, but not file contents) +PATH_KEYS = { + "private_key_file", + "private_key_path", + "token_file_path", +} + + +def _should_mask_value(key: str) -> bool: + """ + Determine if a configuration value should be masked for security. + + Args: + key: Configuration key name + + Returns: + True if the value should be masked, False if it can be displayed + """ + key_lower = key.lower() + + # Check if it's a path key (paths are OK to display) + if any(path_key in key_lower for path_key in PATH_KEYS): + return False + + # Check if it contains sensitive keywords + return any(sensitive_key in key_lower for sensitive_key in SENSITIVE_KEYS) + + +def _mask_sensitive_value(key: str, value: Any) -> str: + """ + Mask sensitive configuration values for display. + + Args: + key: Configuration key name + value: Configuration value + + Returns: + Masked representation of the value + """ + if _should_mask_value(key): + return "****" + return str(value) + + +class ResolutionHistoryTracker: + """ + Tracks the complete resolution process for all configuration keys. + + This class records: + - Every value discovered from every source + - The order in which values were considered + - Which value was ultimately selected + - Which values were overridden and by what + + Provides debugging utilities and export functionality. + """ + + def __init__(self): + """Initialize empty history tracker.""" + self._histories: Dict[str, ResolutionHistory] = {} + self._discoveries: Dict[str, List[tuple[ConfigValue, datetime]]] = defaultdict( + list + ) + self._enabled = True + + def enable(self) -> None: + """Enable history tracking.""" + self._enabled = True + + def disable(self) -> None: + """Disable history tracking for performance.""" + self._enabled = False + + def is_enabled(self) -> bool: + """Check if history tracking is enabled.""" + return self._enabled + + def clear(self) -> None: + """Clear all recorded history.""" + self._histories.clear() + self._discoveries.clear() + + def record_discovery(self, key: str, config_value: ConfigValue) -> None: + """ + Record a value discovery from a source. + + Args: + key: Configuration key + config_value: The discovered ConfigValue with metadata + """ + if not self._enabled: + return + + timestamp = datetime.now() + self._discoveries[key].append((config_value, timestamp)) + + def mark_selected(self, key: str, source_name: str) -> None: + """ + Mark which source's value was selected for a key. + + Args: + key: Configuration key + source_name: Name of the source whose value was selected + """ + if not self._enabled or key not in self._discoveries: + return + + # Build resolution history for this key + entries: List[ResolutionEntry] = [] + selected_value = None + + for config_value, timestamp in self._discoveries[key]: + was_selected = config_value.source_name == source_name + overridden_by = source_name if not was_selected else None + + entry = ResolutionEntry( + config_value=config_value, + timestamp=timestamp, + was_used=was_selected, + overridden_by=overridden_by, + ) + entries.append(entry) + + if was_selected: + selected_value = config_value.value + + self._histories[key] = ResolutionHistory( + key=key, entries=entries, final_value=selected_value, default_used=False + ) + + def mark_default_used(self, key: str, default_value: Any) -> None: + """ + Mark that a default value was used for a key. + + Args: + key: Configuration key + default_value: The default value used + """ + if not self._enabled: + return + + # Create or update history to indicate default usage + if key in self._histories: + self._histories[key].default_used = True + self._histories[key].final_value = default_value + else: + # No discoveries, only default + self._histories[key] = ResolutionHistory( + key=key, entries=[], final_value=default_value, default_used=True + ) + + def get_history(self, key: str) -> Optional[ResolutionHistory]: + """ + Get resolution history for a specific key. + + Args: + key: Configuration key + + Returns: + ResolutionHistory object or None if key not tracked + """ + return self._histories.get(key) + + def get_all_histories(self) -> Dict[str, ResolutionHistory]: + """ + Get all resolution histories. + + Returns: + Dictionary mapping keys to their ResolutionHistory objects + """ + return self._histories.copy() + + def get_summary(self) -> dict: + """ + Get summary statistics about configuration resolution. + + Returns: + Dictionary with statistics: + - total_keys_resolved: Number of keys resolved + - keys_with_overrides: Number of keys where values were overridden + - keys_using_defaults: Number of keys using default values + - source_usage: Dict of source_name -> count of values provided + - source_wins: Dict of source_name -> count of values selected + """ + total_keys = len(self._histories) + keys_with_overrides = sum( + 1 for h in self._histories.values() if len(h.overridden_entries) > 0 + ) + keys_using_defaults = sum(1 for h in self._histories.values() if h.default_used) + + source_usage: Dict[str, int] = defaultdict(int) + source_wins: Dict[str, int] = defaultdict(int) + + for history in self._histories.values(): + for entry in history.entries: + source_usage[entry.config_value.source_name] += 1 + if entry.was_used: + source_wins[entry.config_value.source_name] += 1 + + return { + "total_keys_resolved": total_keys, + "keys_with_overrides": keys_with_overrides, + "keys_using_defaults": keys_using_defaults, + "source_usage": dict(source_usage), + "source_wins": dict(source_wins), + } + + +class ConfigurationResolver: + """ + Orchestrates configuration sources with full resolution history tracking. + + This is the main entry point for configuration resolution. It: + - Manages multiple configuration sources (CLI, Environment, Files) + - Applies precedence rules (CLI > Env > Files) + - Tracks complete resolution history + - Provides debugging and export utilities + + Example: + resolver = ConfigurationResolver( + sources=[cli_source, env_source, file_source], + track_history=True + ) + + # Resolve all configuration + config = resolver.resolve() + + # Debug: where did 'account' come from? + resolver.print_resolution_chain("account") + + # Export for support + resolver.export_history(Path("debug_config.json")) + """ + + def __init__( + self, + sources: Optional[List[ConfigurationSource]] = None, + track_history: bool = True, + ): + """ + Initialize resolver with sources and history tracking. + + Args: + sources: List of configuration sources (will be sorted by priority) + track_history: Enable resolution history tracking (default: True) + """ + self._sources = sources or [] + self._sort_sources() + self._history_tracker = ResolutionHistoryTracker() + + if not track_history: + self._history_tracker.disable() + + def _sort_sources(self) -> None: + """Sort sources by priority (highest first).""" + self._sources.sort(key=lambda s: s.priority.value) + + def add_source(self, source: ConfigurationSource) -> None: + """ + Add a configuration source and re-sort. + + Args: + source: ConfigurationSource to add + """ + self._sources.append(source) + self._sort_sources() + + def get_sources(self) -> List[ConfigurationSource]: + """Get list of all sources (for inspection).""" + return self._sources.copy() + + def enable_history(self) -> None: + """Enable resolution history tracking.""" + self._history_tracker.enable() + + def disable_history(self) -> None: + """Disable history tracking (for performance).""" + self._history_tracker.disable() + + def clear_history(self) -> None: + """Clear all resolution history.""" + self._history_tracker.clear() + + def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, Any]: + """ + Resolve configuration values from all sources with history tracking. + + Resolution Process: + 1. Query all sources for the key (lowest to highest priority) + 2. Record all discovered values in history + 3. Apply precedence rules (higher priority overwrites lower) + 4. Mark which value was selected + 5. Return final resolved values + + Args: + key: Specific key to resolve (None = all keys) + default: Default value if key not found + + Returns: + Dictionary of resolved values (key -> value) + """ + all_values: Dict[str, ConfigValue] = {} + + # Process sources in REVERSE priority order (lowest first) + # This way higher priority values will overwrite lower ones + for source in reversed(self._sources): + try: + source_values = source.discover(key) + + # Record discoveries in history + for k, config_value in source_values.items(): + self._history_tracker.record_discovery(k, config_value) + + # Update current values (higher priority overwrites) + all_values.update(source_values) + + except Exception as e: + log.warning("Error from source %s: %s", source.source_name, e) + + # Mark which values were selected in history + for k, config_value in all_values.items(): + self._history_tracker.mark_selected(k, config_value.source_name) + + # Convert ConfigValue objects to plain values + resolved = {k: v.value for k, v in all_values.items()} + + # Handle default for specific key + if key is not None and key not in resolved: + if default is not None: + resolved[key] = default + self._history_tracker.mark_default_used(key, default) + + return resolved + + def resolve_value(self, key: str, default: Any = None) -> Any: + """ + Resolve a single configuration value. + + Args: + key: Configuration key + default: Default value if not found + + Returns: + Resolved value or default + """ + resolved = self.resolve(key=key, default=default) + return resolved.get(key, default) + + def get_value_metadata(self, key: str) -> Optional[ConfigValue]: + """ + Get metadata for the selected value. + + Args: + key: Configuration key + + Returns: + ConfigValue for the selected value, or None if not found + """ + history = self._history_tracker.get_history(key) + if history and history.selected_entry: + return history.selected_entry.config_value + + # Fallback to live query if history not available + for source in self._sources: + values = source.discover(key) + if key in values: + return values[key] + + return None + + def get_resolution_history(self, key: str) -> Optional[ResolutionHistory]: + """ + Get complete resolution history for a key. + + Args: + key: Configuration key + + Returns: + ResolutionHistory showing the full precedence chain + """ + return self._history_tracker.get_history(key) + + def get_all_histories(self) -> Dict[str, ResolutionHistory]: + """Get resolution histories for all keys.""" + return self._history_tracker.get_all_histories() + + def get_history_summary(self) -> dict: + """ + Get summary statistics about configuration resolution. + + Returns: + Dictionary with statistics: + - total_keys_resolved + - keys_with_overrides + - keys_using_defaults + - source_usage (how many values each source provided) + - source_wins (how many final values came from each source) + """ + return self._history_tracker.get_summary() + + def format_resolution_chain(self, key: str) -> str: + """ + Format the resolution chain for a key (debugging helper). + + Args: + key: Configuration key + + Returns: + Formatted resolution chain as a string + """ + history = self.get_resolution_history(key) + if history: + return history.format_chain() + return f"No resolution history found for key: {key}" + + def format_all_chains(self) -> str: + """ + Format resolution chains for all keys (debugging helper). + + Returns: + Formatted resolution chains as a string + """ + histories = self.get_all_histories() + if not histories: + return "No resolution history available" + + lines = [ + f"\n{'=' * 80}", + f"Configuration Resolution History ({len(histories)} keys)", + f"{'=' * 80}\n", + ] + + for key in sorted(histories.keys()): + lines.append(histories[key].format_chain()) + lines.append("") + + return "\n".join(lines) + + def print_resolution_chain(self, key: str) -> None: + """ + Print the resolution chain for a key using cli_console formatting. + Sensitive values (passwords, tokens, etc.) are automatically masked. + + Args: + key: Configuration key + """ + history = self.get_resolution_history(key) + if not history: + cli_console.warning(f"No resolution history found for key: {key}") + return + + with cli_console.phase( + f"{key} resolution chain ({len(history.entries)} sources):" + ): + for i, entry in enumerate(history.entries, 1): + cv = entry.config_value + status_icon = "✅" if entry.was_used else "❌" + + if entry.was_used: + status_text = "(SELECTED)" + elif entry.overridden_by: + status_text = f"(overridden by {entry.overridden_by})" + else: + status_text = "(not used)" + + # Mask sensitive values + masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_raw = ( + _mask_sensitive_value(cv.key, cv.raw_value) + if cv.raw_value is not None + else None + ) + + # Show raw value if different from parsed value + value_display = f'"{masked_value}"' + if masked_raw is not None and cv.raw_value != cv.value: + value_display = f'"{masked_raw}" → {masked_value}' + + cli_console.step( + f"{i}. {status_icon} {cv.source_name}: {value_display} {status_text}" + ) + + if history.default_used: + masked_default = _mask_sensitive_value(key, history.final_value) + cli_console.step(f"Default value used: {masked_default}") + + def print_all_chains(self) -> None: + """ + Print resolution chains for all keys using cli_console formatting. + Sensitive values (passwords, tokens, etc.) are automatically masked. + """ + histories = self.get_all_histories() + if not histories: + cli_console.warning("No resolution history available") + return + + with cli_console.phase( + f"Configuration Resolution History ({len(histories)} keys)" + ): + for key in sorted(histories.keys()): + history = histories[key] + cli_console.message( + f"\n{key} resolution chain ({len(history.entries)} sources):" + ) + with cli_console.indented(): + for i, entry in enumerate(history.entries, 1): + cv = entry.config_value + status_icon = "✅" if entry.was_used else "❌" + + if entry.was_used: + status_text = "(SELECTED)" + elif entry.overridden_by: + status_text = f"(overridden by {entry.overridden_by})" + else: + status_text = "(not used)" + + # Mask sensitive values + masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_raw = ( + _mask_sensitive_value(cv.key, cv.raw_value) + if cv.raw_value is not None + else None + ) + + # Show raw value if different from parsed value + value_display = f'"{masked_value}"' + if masked_raw is not None and cv.raw_value != cv.value: + value_display = f'"{masked_raw}" → {masked_value}' + + cli_console.step( + f"{i}. {status_icon} {cv.source_name}: {value_display} {status_text}" + ) + + if history.default_used: + masked_default = _mask_sensitive_value(key, history.final_value) + cli_console.step(f"Default value used: {masked_default}") + + def export_history(self, filepath: Path) -> None: + """ + Export resolution history to JSON file. + + Args: + filepath: Path to output file + """ + histories = self.get_all_histories() + data = { + "summary": self.get_history_summary(), + "histories": {key: history.to_dict() for key, history in histories.items()}, + } + + with open(filepath, "w") as f: + json.dump(data, f, indent=2) + + log.info("Resolution history exported to %s", filepath) diff --git a/tests/config_ng/test_configuration_resolver.py b/tests/config_ng/test_configuration_resolver.py new file mode 100644 index 0000000000..07e87efcb7 --- /dev/null +++ b/tests/config_ng/test_configuration_resolver.py @@ -0,0 +1,441 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for ConfigurationResolver. + +Tests verify: +- Source orchestration +- Precedence rules (CLI > Env > Files) +- History tracking integration +- Resolution methods +- Debugging utilities +""" + +import json + +from snowflake.cli.api.config_ng.resolver import ConfigurationResolver +from snowflake.cli.api.config_ng.sources import ( + CliArgumentSource, + EnvironmentSource, + FileSource, +) + + +class TestConfigurationResolver: + """Test suite for ConfigurationResolver.""" + + def test_create_resolver_empty(self): + """Should create resolver with no sources.""" + resolver = ConfigurationResolver() + + assert len(resolver.get_sources()) == 0 + + def test_create_resolver_with_sources(self): + """Should create resolver with provided sources.""" + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + sources = resolver.get_sources() + assert len(sources) == 1 + + def test_sources_sorted_by_priority(self): + """Should sort sources by priority (highest first).""" + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[]) + file_source = FileSource(file_paths=[], handlers=[]) + + # Add in wrong order + resolver = ConfigurationResolver(sources=[file_source, cli_source, env_source]) + + sources = resolver.get_sources() + # Should be sorted: CLI (1), Env (2), File (3) + assert sources[0].priority.value == 1 # CLI + assert sources[1].priority.value == 2 # Env + assert sources[2].priority.value == 3 # File + + def test_add_source(self): + """Should add source and re-sort.""" + resolver = ConfigurationResolver() + + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + resolver.add_source(cli_source) + + assert len(resolver.get_sources()) == 1 + + def test_resolve_from_single_source(self): + """Should resolve values from single source.""" + cli_source = CliArgumentSource( + cli_context={"account": "my_account", "user": "my_user"} + ) + resolver = ConfigurationResolver(sources=[cli_source]) + + config = resolver.resolve() + + assert config["account"] == "my_account" + assert config["user"] == "my_user" + + def test_resolve_specific_key(self): + """Should resolve specific key only.""" + cli_source = CliArgumentSource( + cli_context={"account": "my_account", "user": "my_user"} + ) + resolver = ConfigurationResolver(sources=[cli_source]) + + config = resolver.resolve(key="account") + + assert len(config) == 1 + assert config["account"] == "my_account" + + def test_resolve_value_method(self): + """Should resolve single value.""" + cli_source = CliArgumentSource(cli_context={"account": "my_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + account = resolver.resolve_value("account") + + assert account == "my_account" + + def test_resolve_with_default(self): + """Should return default when key not found.""" + resolver = ConfigurationResolver() + + value = resolver.resolve_value("missing_key", default="default_value") + + assert value == "default_value" + + def test_cli_overrides_env(self, monkeypatch): + """CLI values should override environment values.""" + from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + + resolver = ConfigurationResolver(sources=[cli_source, env_source]) + + account = resolver.resolve_value("account") + + assert account == "cli_account" + + def test_env_overrides_file(self, tmp_path, monkeypatch): + """Environment values should override file values.""" + from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler + from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler + + # Create config file + config_file = tmp_path / "config.toml" + config_file.write_text('[default]\naccount = "file_account"\n') + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[env_source, file_source]) + + account = resolver.resolve_value("account") + + assert account == "env_account" + + def test_complete_precedence_chain(self, tmp_path, monkeypatch): + """Test complete precedence: CLI > Env > File.""" + from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler + from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler + + # Create config file + config_file = tmp_path / "config.toml" + config_file.write_text('[default]\naccount = "file_account"\n') + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + account = resolver.resolve_value("account") + + # CLI should win + assert account == "cli_account" + + def test_fallback_to_lower_priority(self, tmp_path, monkeypatch): + """Should use lower priority source when higher doesn't have value.""" + from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler + from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler + + # Create config file + config_file = tmp_path / "config.toml" + config_file.write_text( + '[default]\naccount = "file_account"\nuser = "file_user"\n' + ) + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + + # CLI doesn't have any values + cli_source = CliArgumentSource(cli_context={}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + config = resolver.resolve() + + # Account from env, user from file + assert config["account"] == "env_account" + assert config["user"] == "file_user" + + def test_get_resolution_history(self): + """Should get resolution history for a key.""" + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + resolver.resolve() + + history = resolver.get_resolution_history("account") + + assert history is not None + assert history.key == "account" + assert history.final_value == "cli_account" + + def test_get_all_histories(self): + """Should get all resolution histories.""" + cli_source = CliArgumentSource( + cli_context={"account": "my_account", "user": "my_user"} + ) + resolver = ConfigurationResolver(sources=[cli_source]) + + resolver.resolve() + + histories = resolver.get_all_histories() + + assert len(histories) == 2 + assert "account" in histories + assert "user" in histories + + def test_get_value_metadata(self): + """Should get metadata for resolved value.""" + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + resolver.resolve() + + metadata = resolver.get_value_metadata("account") + + assert metadata is not None + assert metadata.key == "account" + assert metadata.value == "cli_account" + assert metadata.source_name == "cli_arguments" + + def test_get_history_summary(self, tmp_path, monkeypatch): + """Should get summary statistics.""" + from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler + from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler + + config_file = tmp_path / "config.toml" + config_file.write_text( + '[default]\naccount = "file_account"\nuser = "file_user"\n' + ) + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + + cli_source = CliArgumentSource(cli_context={}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + config = resolver.resolve() + + summary = resolver.get_history_summary() + + # Check that we resolved at least the expected keys + assert summary["total_keys_resolved"] >= 2 + assert summary["keys_with_overrides"] >= 1 # account overridden + assert ( + summary["source_wins"]["snowflake_cli_env"] >= 1 + ) # account (and possibly others) + assert summary["source_wins"]["toml:default"] >= 1 # user and possibly others + + def test_disable_enable_history(self): + """Should disable and enable history tracking.""" + cli_source = CliArgumentSource(cli_context={"account": "my_account"}) + resolver = ConfigurationResolver(sources=[cli_source], track_history=False) + + resolver.resolve() + + # No history tracked + histories = resolver.get_all_histories() + assert len(histories) == 0 + + # Enable and resolve again + resolver.enable_history() + resolver.resolve() + + histories = resolver.get_all_histories() + assert len(histories) == 1 + + def test_clear_history(self): + """Should clear resolution history.""" + cli_source = CliArgumentSource(cli_context={"account": "my_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + resolver.resolve() + assert len(resolver.get_all_histories()) == 1 + + resolver.clear_history() + assert len(resolver.get_all_histories()) == 0 + + def test_format_resolution_chain(self): + """Should format resolution chain.""" + cli_source = CliArgumentSource(cli_context={"account": "my_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + resolver.resolve() + formatted = resolver.format_resolution_chain("account") + + assert "account resolution chain" in formatted + assert "my_account" in formatted + assert "SELECTED" in formatted + + def test_format_resolution_chain_nonexistent_key(self): + """Should return message for nonexistent key.""" + resolver = ConfigurationResolver() + + formatted = resolver.format_resolution_chain("nonexistent") + + assert "No resolution history found" in formatted + + def test_format_all_chains(self): + """Should format all resolution chains.""" + cli_source = CliArgumentSource( + cli_context={"account": "my_account", "user": "my_user"} + ) + resolver = ConfigurationResolver(sources=[cli_source]) + + resolver.resolve() + formatted = resolver.format_all_chains() + + assert "Configuration Resolution History" in formatted + assert "account resolution chain" in formatted + assert "user resolution chain" in formatted + + def test_format_all_chains_when_empty(self): + """Should return message when no history available.""" + resolver = ConfigurationResolver(track_history=False) + + formatted = resolver.format_all_chains() + + assert "No resolution history available" in formatted + + def test_export_history(self, tmp_path): + """Should export history to JSON file.""" + cli_source = CliArgumentSource( + cli_context={"account": "my_account", "user": "my_user"} + ) + resolver = ConfigurationResolver(sources=[cli_source]) + + resolver.resolve() + + export_file = tmp_path / "debug_config.json" + resolver.export_history(export_file) + + assert export_file.exists() + + # Check JSON structure + with open(export_file) as f: + data = json.load(f) + + assert "summary" in data + assert "histories" in data + assert "account" in data["histories"] + assert "user" in data["histories"] + + def test_source_error_does_not_break_resolution(self): + """Should continue resolution if a source fails.""" + from snowflake.cli.api.config_ng.core import SourcePriority, ValueSource + + class FailingSource(ValueSource): + @property + def source_name(self) -> str: + return "failing_source" + + @property + def priority(self) -> SourcePriority: + return SourcePriority.ENVIRONMENT + + def discover(self, key=None): + raise RuntimeError("Source failed") + + def supports_key(self, key: str) -> bool: + return True + + failing_source = FailingSource() + cli_source = CliArgumentSource(cli_context={"account": "my_account"}) + + resolver = ConfigurationResolver(sources=[failing_source, cli_source]) + + # Should still get value from CLI source + account = resolver.resolve_value("account") + assert account == "my_account" + + def test_get_sources_returns_copy(self): + """get_sources should return a copy.""" + cli_source = CliArgumentSource(cli_context={"account": "my_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + sources = resolver.get_sources() + sources.clear() + + # Original sources should be unchanged + assert len(resolver.get_sources()) == 1 + + def test_resolve_with_no_sources(self): + """Should return empty dict when no sources configured.""" + resolver = ConfigurationResolver() + + config = resolver.resolve() + + assert config == {} + + def test_resolve_value_returns_default_when_not_found(self): + """Should return default value when key not found.""" + resolver = ConfigurationResolver() + + value = resolver.resolve_value("missing", default="default_value") + + assert value == "default_value" + + def test_multiple_resolve_calls_consistent(self): + """Multiple resolve calls should return consistent results.""" + cli_source = CliArgumentSource(cli_context={"account": "my_account"}) + resolver = ConfigurationResolver(sources=[cli_source]) + + config1 = resolver.resolve() + config2 = resolver.resolve() + + assert config1 == config2 diff --git a/tests/config_ng/test_resolution_history_tracker.py b/tests/config_ng/test_resolution_history_tracker.py new file mode 100644 index 0000000000..c7ca164de1 --- /dev/null +++ b/tests/config_ng/test_resolution_history_tracker.py @@ -0,0 +1,431 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit tests for ResolutionHistoryTracker. + +Tests verify: +- Discovery recording +- Selection marking +- Default value tracking +- History retrieval +- Summary statistics +""" + +from datetime import datetime + +from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority +from snowflake.cli.api.config_ng.resolver import ResolutionHistoryTracker + + +class TestResolutionHistoryTracker: + """Test suite for ResolutionHistoryTracker.""" + + def test_create_tracker(self): + """Should create empty tracker with tracking enabled.""" + tracker = ResolutionHistoryTracker() + + assert tracker.is_enabled() is True + assert len(tracker.get_all_histories()) == 0 + + def test_enable_disable_tracking(self): + """Should enable and disable tracking.""" + tracker = ResolutionHistoryTracker() + + tracker.disable() + assert tracker.is_enabled() is False + + tracker.enable() + assert tracker.is_enabled() is True + + def test_record_discovery(self): + """Should record value discoveries.""" + tracker = ResolutionHistoryTracker() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv) + + # Discovery recorded but history not finalized yet + assert len(tracker.get_all_histories()) == 0 + + def test_mark_selected_creates_history(self): + """Should create history when value is marked as selected.""" + tracker = ResolutionHistoryTracker() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv) + tracker.mark_selected("account", "cli_arguments") + + history = tracker.get_history("account") + assert history is not None + assert history.key == "account" + assert history.final_value == "my_account" + assert len(history.entries) == 1 + assert history.entries[0].was_used is True + + def test_multiple_discoveries_single_selection(self): + """Should track multiple discoveries with one selected.""" + tracker = ResolutionHistoryTracker() + + # Record discoveries from multiple sources + cv_file = ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + cv_env = ConfigValue( + key="account", + value="env_account", + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + ) + cv_cli = ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv_file) + tracker.record_discovery("account", cv_env) + tracker.record_discovery("account", cv_cli) + + # Mark CLI as selected + tracker.mark_selected("account", "cli_arguments") + + history = tracker.get_history("account") + assert history is not None + assert len(history.entries) == 3 + assert history.final_value == "cli_account" + + # Check which was selected + selected = [e for e in history.entries if e.was_used] + assert len(selected) == 1 + assert selected[0].config_value.source_name == "cli_arguments" + + # Check overridden entries + overridden = [e for e in history.entries if not e.was_used] + assert len(overridden) == 2 + + def test_mark_default_used(self): + """Should mark when default value is used.""" + tracker = ResolutionHistoryTracker() + + tracker.mark_default_used("missing_key", "default_value") + + history = tracker.get_history("missing_key") + assert history is not None + assert history.default_used is True + assert history.final_value == "default_value" + assert len(history.entries) == 0 + + def test_mark_default_after_discoveries(self): + """Should update history when default is used after discoveries.""" + tracker = ResolutionHistoryTracker() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv) + tracker.mark_selected("account", "cli_arguments") + tracker.mark_default_used("account", "default_account") + + history = tracker.get_history("account") + assert history.default_used is True + assert history.final_value == "default_account" + + def test_get_history_nonexistent_key(self): + """Should return None for keys not tracked.""" + tracker = ResolutionHistoryTracker() + + history = tracker.get_history("nonexistent") + assert history is None + + def test_get_all_histories(self): + """Should return all tracked histories.""" + tracker = ResolutionHistoryTracker() + + cv1 = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + cv2 = ConfigValue( + key="user", + value="my_user", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv1) + tracker.mark_selected("account", "cli_arguments") + + tracker.record_discovery("user", cv2) + tracker.mark_selected("user", "cli_arguments") + + histories = tracker.get_all_histories() + assert len(histories) == 2 + assert "account" in histories + assert "user" in histories + + def test_clear_history(self): + """Should clear all recorded history.""" + tracker = ResolutionHistoryTracker() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv) + tracker.mark_selected("account", "cli_arguments") + + assert len(tracker.get_all_histories()) == 1 + + tracker.clear() + + assert len(tracker.get_all_histories()) == 0 + + def test_disabled_tracker_does_not_record(self): + """Should not record when tracking is disabled.""" + tracker = ResolutionHistoryTracker() + tracker.disable() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv) + tracker.mark_selected("account", "cli_arguments") + + assert len(tracker.get_all_histories()) == 0 + + def test_summary_with_no_histories(self): + """Should return empty summary when no histories exist.""" + tracker = ResolutionHistoryTracker() + + summary = tracker.get_summary() + + assert summary["total_keys_resolved"] == 0 + assert summary["keys_with_overrides"] == 0 + assert summary["keys_using_defaults"] == 0 + assert len(summary["source_usage"]) == 0 + assert len(summary["source_wins"]) == 0 + + def test_summary_with_single_source(self): + """Should calculate correct summary for single source.""" + tracker = ResolutionHistoryTracker() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv) + tracker.mark_selected("account", "cli_arguments") + + summary = tracker.get_summary() + + assert summary["total_keys_resolved"] == 1 + assert summary["keys_with_overrides"] == 0 + assert summary["source_usage"]["cli_arguments"] == 1 + assert summary["source_wins"]["cli_arguments"] == 1 + + def test_summary_with_multiple_sources(self): + """Should calculate correct summary with overrides.""" + tracker = ResolutionHistoryTracker() + + # File source provides account + cv_file = ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + # Env source overrides account + cv_env = ConfigValue( + key="account", + value="env_account", + source_name="snowflake_cli_env", + priority=SourcePriority.ENVIRONMENT, + ) + + tracker.record_discovery("account", cv_file) + tracker.record_discovery("account", cv_env) + tracker.mark_selected("account", "snowflake_cli_env") + + summary = tracker.get_summary() + + assert summary["total_keys_resolved"] == 1 + assert summary["keys_with_overrides"] == 1 + assert summary["source_usage"]["toml:connections"] == 1 + assert summary["source_usage"]["snowflake_cli_env"] == 1 + assert summary["source_wins"]["snowflake_cli_env"] == 1 + assert summary["source_wins"].get("toml:connections", 0) == 0 + + def test_summary_with_defaults(self): + """Should count keys using defaults.""" + tracker = ResolutionHistoryTracker() + + tracker.mark_default_used("missing_key", "default_value") + + summary = tracker.get_summary() + + assert summary["total_keys_resolved"] == 1 + assert summary["keys_using_defaults"] == 1 + + def test_entries_have_timestamps(self): + """Resolution entries should have timestamps.""" + tracker = ResolutionHistoryTracker() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + before = datetime.now() + tracker.record_discovery("account", cv) + tracker.mark_selected("account", "cli_arguments") + after = datetime.now() + + history = tracker.get_history("account") + entry_timestamp = history.entries[0].timestamp + + assert before <= entry_timestamp <= after + + def test_overridden_by_is_set_correctly(self): + """Should set overridden_by field correctly.""" + tracker = ResolutionHistoryTracker() + + cv_file = ConfigValue( + key="account", + value="file_account", + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + cv_cli = ConfigValue( + key="account", + value="cli_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv_file) + tracker.record_discovery("account", cv_cli) + tracker.mark_selected("account", "cli_arguments") + + history = tracker.get_history("account") + + # File entry should be overridden by CLI + file_entry = [ + e + for e in history.entries + if e.config_value.source_name == "toml:connections" + ][0] + assert file_entry.was_used is False + assert file_entry.overridden_by == "cli_arguments" + + # CLI entry should be selected + cli_entry = [ + e for e in history.entries if e.config_value.source_name == "cli_arguments" + ][0] + assert cli_entry.was_used is True + assert cli_entry.overridden_by is None + + def test_get_all_histories_returns_copy(self): + """get_all_histories should return a copy.""" + tracker = ResolutionHistoryTracker() + + cv = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + tracker.record_discovery("account", cv) + tracker.mark_selected("account", "cli_arguments") + + histories1 = tracker.get_all_histories() + histories1.clear() + + histories2 = tracker.get_all_histories() + assert len(histories2) == 1 + + def test_multiple_keys_tracked_independently(self): + """Should track multiple keys independently.""" + tracker = ResolutionHistoryTracker() + + cv_account = ConfigValue( + key="account", + value="my_account", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + cv_user_file = ConfigValue( + key="user", + value="file_user", + source_name="toml:connections", + priority=SourcePriority.FILE, + ) + cv_user_cli = ConfigValue( + key="user", + value="cli_user", + source_name="cli_arguments", + priority=SourcePriority.CLI_ARGUMENT, + ) + + # Account from CLI only + tracker.record_discovery("account", cv_account) + tracker.mark_selected("account", "cli_arguments") + + # User from File and CLI + tracker.record_discovery("user", cv_user_file) + tracker.record_discovery("user", cv_user_cli) + tracker.mark_selected("user", "cli_arguments") + + # Check account history + account_history = tracker.get_history("account") + assert len(account_history.entries) == 1 + + # Check user history + user_history = tracker.get_history("user") + assert len(user_history.entries) == 2 diff --git a/tests/config_ng/test_resolver_integration.py b/tests/config_ng/test_resolver_integration.py new file mode 100644 index 0000000000..3096fdb81e --- /dev/null +++ b/tests/config_ng/test_resolver_integration.py @@ -0,0 +1,377 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +End-to-end integration tests for ConfigurationResolver. + +Tests verify: +- Complete resolution workflow with all sources +- Real-world migration scenarios +- Complete precedence chains +- History tracking in production scenarios +""" + +from snowflake.cli.api.config_ng.env_handlers import ( + SnowCliEnvHandler, + SnowSqlEnvHandler, +) +from snowflake.cli.api.config_ng.file_handlers import ( + SnowSqlConfigHandler, + TomlFileHandler, +) +from snowflake.cli.api.config_ng.resolver import ConfigurationResolver +from snowflake.cli.api.config_ng.sources import ( + CliArgumentSource, + EnvironmentSource, + FileSource, +) + + +class TestResolverEndToEnd: + """End-to-end integration tests for complete resolution workflow.""" + + def test_production_configuration_setup(self, tmp_path, monkeypatch): + """Test production-like configuration setup.""" + # Create SnowCLI TOML config + snowcli_config = tmp_path / "connections.toml" + snowcli_config.write_text( + "[default]\n" + 'account = "toml_account"\n' + 'user = "toml_user"\n' + 'database = "toml_db"\n' + ) + + # Set environment variables + monkeypatch.setenv("SNOWFLAKE_WAREHOUSE", "env_warehouse") + + # CLI arguments + cli_context = {"account": "cli_account"} + + # Create sources + cli_source = CliArgumentSource(cli_context=cli_context) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[snowcli_config], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + # Create resolver + resolver = ConfigurationResolver( + sources=[cli_source, env_source, file_source], track_history=True + ) + + # Resolve + config = resolver.resolve() + + # Verify precedence + assert config["account"] == "cli_account" # CLI wins + assert config["warehouse"] == "env_warehouse" # From env + assert config["user"] == "toml_user" # From file + assert config["database"] == "toml_db" # From file + + # Verify history + account_history = resolver.get_resolution_history("account") + assert len(account_history.entries) == 2 # TOML and CLI + assert ( + account_history.selected_entry.config_value.source_name == "cli_arguments" + ) + + def test_snowsql_to_snowcli_migration(self, tmp_path, monkeypatch): + """Test complete SnowSQL to SnowCLI migration scenario.""" + # SnowSQL config (legacy) + snowsql_config = tmp_path / "snowsql.toml" + snowsql_config.write_text( + "[connections]\n" + 'accountname = "old_account"\n' + 'username = "old_user"\n' + 'databasename = "old_db"\n' + 'warehousename = "old_warehouse"\n' + ) + + # SnowCLI config (new, partial migration) + snowcli_config = tmp_path / "connections.toml" + snowcli_config.write_text( + '[default]\naccount = "new_account"\nuser = "new_user"\n' + ) + + # Environment variables (mixed) + monkeypatch.setenv("SNOWSQL_PWD", "env_password") + monkeypatch.setenv("SNOWFLAKE_WAREHOUSE", "env_warehouse") + + # Create sources + env_source = EnvironmentSource( + handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] + ) + file_source = FileSource( + file_paths=[snowcli_config, snowsql_config], + handlers=[ + TomlFileHandler(section_path=["default"]), + SnowSqlConfigHandler(), + ], + ) + + resolver = ConfigurationResolver(sources=[env_source, file_source]) + + config = resolver.resolve() + + # New values should win + assert config["account"] == "new_account" # From SnowCLI TOML + assert config["user"] == "new_user" # From SnowCLI TOML + assert config["warehouse"] == "env_warehouse" # From SnowCLI env + assert ( + config["password"] == "env_password" + ) # From SnowSQL env (mapped from PWD) + + # Legacy values as fallback + assert config["database"] == "old_db" # From SnowSQL config + + def test_debugging_complete_workflow(self, tmp_path, monkeypatch): + """Test complete debugging workflow.""" + # Setup multi-source config + config_file = tmp_path / "config.toml" + config_file.write_text('[default]\naccount = "file_account"\n') + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + # Resolve + resolver.resolve() + + # Format resolution chain + formatted = resolver.format_resolution_chain("account") + + # Verify chain shows all sources + assert "file_account" in formatted + assert "env_account" in formatted + assert "cli_account" in formatted + assert "SELECTED" in formatted + + def test_history_export_complete(self, tmp_path, monkeypatch): + """Test complete history export for debugging.""" + config_file = tmp_path / "config.toml" + config_file.write_text( + "[default]\n" + 'account = "file_account"\n' + 'user = "file_user"\n' + 'database = "file_db"\n' + ) + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + monkeypatch.setenv("SNOWFLAKE_USER", "env_user") + + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + config = resolver.resolve() + + # Export history + export_file = tmp_path / "debug.json" + resolver.export_history(export_file) + + # Verify export contains all keys + import json + + with open(export_file) as f: + data = json.load(f) + + assert "account" in data["histories"] + assert "user" in data["histories"] + assert "database" in data["histories"] + + # Verify summary (may have more keys than expected from TOML) + assert data["summary"]["total_keys_resolved"] >= 3 + assert data["summary"]["keys_with_overrides"] >= 2 # account and user + + def test_cli_override_everything(self, tmp_path, monkeypatch): + """Test CLI arguments override all other sources.""" + # Setup all sources with same key + config_file = tmp_path / "config.toml" + config_file.write_text('[default]\naccount = "file_account"\n') + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + account = resolver.resolve_value("account") + + assert account == "cli_account" + + # Verify all sources were consulted + history = resolver.get_resolution_history("account") + assert len(history.entries) == 3 + assert len(history.overridden_entries) == 2 + + def test_layered_fallback(self, tmp_path, monkeypatch): + """Test layered fallback across multiple sources.""" + config_file = tmp_path / "config.toml" + config_file.write_text( + "[default]\n" + 'account = "file_account"\n' + 'user = "file_user"\n' + 'database = "file_db"\n' + 'warehouse = "file_warehouse"\n' + 'role = "file_role"\n' + ) + + # Env only provides some values + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + monkeypatch.setenv("SNOWFLAKE_USER", "env_user") + + # CLI only provides one value + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + config = resolver.resolve() + + # Verify layered fallback + assert config["account"] == "cli_account" # From CLI + assert config["user"] == "env_user" # From Env (CLI didn't have it) + assert config["database"] == "file_db" # From File (neither CLI nor Env had it) + assert config["warehouse"] == "file_warehouse" # From File + assert config["role"] == "file_role" # From File + + def test_summary_statistics_complete(self, tmp_path, monkeypatch): + """Test summary statistics for complete resolution.""" + config_file = tmp_path / "config.toml" + config_file.write_text( + "[default]\n" + 'account = "file_account"\n' + 'user = "file_user"\n' + 'database = "file_db"\n' + ) + + monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") + monkeypatch.setenv("SNOWFLAKE_USER", "env_user") + + cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) + env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) + file_source = FileSource( + file_paths=[config_file], + handlers=[TomlFileHandler(section_path=["default"])], + ) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + config = resolver.resolve() + + summary = resolver.get_history_summary() + + # At least 3 keys + assert summary["total_keys_resolved"] >= 3 + + # account and user have overrides + assert summary["keys_with_overrides"] >= 2 + + # Source usage: File provided at least 3, Env provided at least 2, CLI provided 1 + assert summary["source_usage"]["toml:default"] >= 3 + assert summary["source_usage"]["snowflake_cli_env"] >= 2 + assert summary["source_usage"]["cli_arguments"] == 1 + + # Source wins: CLI won 1 (account), Env won at least 1 (user), File won at least 1 (database) + assert summary["source_wins"]["cli_arguments"] == 1 # account + assert ( + summary["source_wins"]["snowflake_cli_env"] >= 1 + ) # user and possibly others + assert ( + summary["source_wins"]["toml:default"] >= 1 + ) # database and possibly others + + def test_no_sources_with_default(self): + """Test resolver with no sources returns default.""" + resolver = ConfigurationResolver() + + value = resolver.resolve_value("missing", default="default_value") + + assert value == "default_value" + + # Verify default tracked in history + history = resolver.get_resolution_history("missing") + assert history.default_used is True + assert history.final_value == "default_value" + + def test_real_world_multiple_connections(self, tmp_path): + """Test real-world scenario with multiple connection configs.""" + # User has both SnowCLI and SnowSQL configs with different connections + snowcli_config = tmp_path / "connections.toml" + snowcli_config.write_text( + "[prod]\n" + 'account = "prod_account"\n' + 'user = "prod_user"\n' + "[dev]\n" + 'account = "dev_account"\n' + 'user = "dev_user"\n' + ) + + # Test resolving prod connection + file_source = FileSource( + file_paths=[snowcli_config], + handlers=[TomlFileHandler(section_path=["prod"])], + ) + + resolver_prod = ConfigurationResolver(sources=[file_source]) + prod_config = resolver_prod.resolve() + + assert prod_config["account"] == "prod_account" + assert prod_config["user"] == "prod_user" + + # Test resolving dev connection + file_source_dev = FileSource( + file_paths=[snowcli_config], + handlers=[TomlFileHandler(section_path=["dev"])], + ) + + resolver_dev = ConfigurationResolver(sources=[file_source_dev]) + dev_config = resolver_dev.resolve() + + assert dev_config["account"] == "dev_account" + assert dev_config["user"] == "dev_user" + + def test_empty_sources_empty_result(self): + """Test resolver with empty sources returns empty config.""" + cli_source = CliArgumentSource(cli_context={}) + env_source = EnvironmentSource(handlers=[]) + file_source = FileSource(file_paths=[], handlers=[]) + + resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) + + config = resolver.resolve() + + assert config == {} From 365730528defc2e37b2ca57ef82937e5bf74eb1e Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 7 Oct 2025 09:26:54 +0200 Subject: [PATCH 09/78] SNOW-2306184: config refactory - integrate new config --- src/snowflake/cli/api/config_provider.py | 346 +++++++++++++++++-- tests/api/test_config_provider.py | 52 +-- tests/test_config_provider_integration.py | 389 ++++++++++++++++++++++ 3 files changed, 732 insertions(+), 55 deletions(-) create mode 100644 tests/test_config_provider_integration.py diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 28f143cc38..a7cc6f9f97 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -16,7 +16,10 @@ import os from abc import ABC, abstractmethod -from typing import Any, Optional +from typing import TYPE_CHECKING, Any, Dict, Optional + +if TYPE_CHECKING: + from snowflake.cli.api.config_ng.resolver import ConfigurationResolver ALTERNATIVE_CONFIG_ENV_VAR = "SNOWFLAKE_CLI_CONFIG_V2_ENABLED" @@ -75,80 +78,363 @@ class LegacyConfigProvider(ConfigProvider): """ def get_section(self, *path) -> dict: - from snowflake.cli.api.config import get_config_section_internal + from snowflake.cli.api.config import get_config_section - return get_config_section_internal(*path) + return get_config_section(*path) def get_value(self, *path, key: str, default: Optional[Any] = None) -> Any: - from snowflake.cli.api.config import Empty, get_config_value_internal + from snowflake.cli.api.config import Empty, get_config_value - return get_config_value_internal( + return get_config_value( *path, key=key, default=default if default is not None else Empty ) def set_value(self, path: list[str], value: Any) -> None: - from snowflake.cli.api.config import set_config_value_internal + from snowflake.cli.api.config import set_config_value - set_config_value_internal(path, value) + set_config_value(path, value) def unset_value(self, path: list[str]) -> None: - from snowflake.cli.api.config import unset_config_value_internal + from snowflake.cli.api.config import unset_config_value - unset_config_value_internal(path) + unset_config_value(path) def section_exists(self, *path) -> bool: - from snowflake.cli.api.config import config_section_exists_internal + from snowflake.cli.api.config import config_section_exists - return config_section_exists_internal(*path) + return config_section_exists(*path) def read_config(self) -> None: - from snowflake.cli.api.config import _read_config_file + from snowflake.cli.api.config import get_config_manager - _read_config_file() + config_manager = get_config_manager() + config_manager.read_config() def get_connection_dict(self, connection_name: str) -> dict: - from snowflake.cli.api.config import get_connection_dict_internal + from snowflake.cli.api.config import get_connection_dict - return get_connection_dict_internal(connection_name) + return get_connection_dict(connection_name) def get_all_connections(self) -> dict: - from snowflake.cli.api.config import get_all_connections_internal + from snowflake.cli.api.config import get_all_connections - return get_all_connections_internal() + return get_all_connections() class AlternativeConfigProvider(ConfigProvider): """ - New configuration provider implementation. - To be implemented with new logic while maintaining same output format. + New configuration provider using config_ng resolution system. + + This provider uses ConfigurationResolver to discover values from: + - CLI arguments (highest priority) + - Environment variables (SNOWFLAKE_* and SNOWSQL_*) + - Configuration files (SnowCLI TOML and SnowSQL config) + + Maintains backward compatibility with LegacyConfigProvider output format. """ - def __init__(self): - pass + def __init__(self) -> None: + self._resolver: Optional[ConfigurationResolver] = None + self._config_cache: Dict[str, Any] = {} + self._initialized: bool = False + + def _ensure_initialized(self) -> None: + """Lazily initialize the resolver on first use.""" + if self._initialized: + return + + from snowflake.cli.api.cli_global_context import get_cli_context + from snowflake.cli.api.config import get_config_manager, get_connections_file + from snowflake.cli.api.config_ng import ( + CliArgumentSource, + ConfigurationResolver, + EnvironmentSource, + FileSource, + SnowCliEnvHandler, + SnowSqlConfigHandler, + SnowSqlEnvHandler, + TomlFileHandler, + get_snowsql_config_paths, + ) + + # Get CLI context safely + try: + cli_context = get_cli_context().connection_context + cli_context_dict = cli_context.present_values_as_dict() + except Exception: + cli_context_dict = {} + + # 1. CLI Arguments Source (Priority 1 - Highest) + cli_source = CliArgumentSource(cli_context=cli_context_dict) + + # 2. Environment Variables Source (Priority 2 - Medium) + env_source = EnvironmentSource( + handlers=[ + SnowCliEnvHandler(), # SNOWFLAKE_* checked first + SnowSqlEnvHandler(), # SNOWSQL_* checked second (fallback) + ] + ) + + # 3. Configuration Files Source (Priority 3 - Lowest) + config_manager = get_config_manager() + connections_file = get_connections_file() + + file_paths = [] + # Add connections file if it exists + if connections_file and connections_file.exists(): + file_paths.append(connections_file) + # Add main config file + if config_manager.file_path.exists(): + file_paths.append(config_manager.file_path) + # Add SnowSQL config paths + file_paths.extend(get_snowsql_config_paths()) + + file_source = FileSource( + file_paths=file_paths, + handlers=[ + # SnowCLI TOML handlers (tried first) + TomlFileHandler(section_path=["connections"]), + TomlFileHandler(section_path=["cli"]), + TomlFileHandler(), # Root level + # SnowSQL handler (tried last, fallback) + SnowSqlConfigHandler(), + ], + ) + + # Create resolver with all sources + self._resolver = ConfigurationResolver( + sources=[cli_source, env_source, file_source], track_history=True + ) + + self._initialized = True + + def read_config(self) -> None: + """ + Load configuration from all sources. + For config_ng, this means (re)initializing the resolver. + """ + self._initialized = False + self._config_cache.clear() + self._ensure_initialized() + + # Resolve all configuration to populate cache + assert self._resolver is not None + self._config_cache = self._resolver.resolve() def get_section(self, *path) -> dict: - raise NotImplementedError("Alternative config provider not yet implemented") + """ + Get configuration section at specified path. + + Args: + *path: Section path (e.g., "connections", "my_conn") + + Returns: + Dictionary of section contents + """ + self._ensure_initialized() + + if not self._config_cache: + assert self._resolver is not None + self._config_cache = self._resolver.resolve() + + # Navigate through path to find section + if not path: + return self._config_cache + + # For connections section, return all connections as nested dicts + if len(path) == 1 and path[0] == "connections": + return self._get_all_connections_dict() + + # For specific connection, return connection dict + if len(path) == 2 and path[0] == "connections": + connection_name = path[1] + return self._get_connection_dict_internal(connection_name) + + # For other sections, try to resolve with path prefix + section_prefix = ".".join(path) + result = {} + for key, value in self._config_cache.items(): + if key.startswith(section_prefix + "."): + # Strip prefix to get relative key + relative_key = key[len(section_prefix) + 1 :] + result[relative_key] = value + elif key == section_prefix: + # Exact match for section itself + return value if isinstance(value, dict) else {section_prefix: value} + + return result def get_value(self, *path, key: str, default: Optional[Any] = None) -> Any: - raise NotImplementedError("Alternative config provider not yet implemented") + """ + Get single configuration value at path + key. + + Args: + *path: Path to section + key: Configuration key + default: Default value if not found + + Returns: + Configuration value or default + """ + self._ensure_initialized() + + if not self._config_cache: + assert self._resolver is not None + self._config_cache = self._resolver.resolve() + + # Build full key from path and key + if path: + full_key = ".".join(path) + "." + key + else: + full_key = key + + # Try to resolve the value + value = self._config_cache.get(full_key, default) + return value def set_value(self, path: list[str], value: Any) -> None: - raise NotImplementedError("Alternative config provider not yet implemented") + """ + Set configuration value at path. + + Note: config_ng is read-only for resolution. This delegates to + legacy config system for writing. + """ + from snowflake.cli.api.config import set_config_value as legacy_set_value + + legacy_set_value(path, value) + # Clear cache to force re-read on next access + self._config_cache.clear() + self._initialized = False def unset_value(self, path: list[str]) -> None: - raise NotImplementedError("Alternative config provider not yet implemented") + """ + Remove configuration value at path. + + Note: config_ng is read-only for resolution. This delegates to + legacy config system for writing. + """ + from snowflake.cli.api.config import unset_config_value as legacy_unset_value + + legacy_unset_value(path) + # Clear cache to force re-read on next access + self._config_cache.clear() + self._initialized = False def section_exists(self, *path) -> bool: - raise NotImplementedError("Alternative config provider not yet implemented") + """ + Check if configuration section exists. - def read_config(self) -> None: - raise NotImplementedError("Alternative config provider not yet implemented") + Args: + *path: Section path + + Returns: + True if section exists and has values + """ + self._ensure_initialized() + + if not self._config_cache: + assert self._resolver is not None + self._config_cache = self._resolver.resolve() + + if not path: + return True + + section_prefix = ".".join(path) + # Check if any key starts with this prefix + return any( + key == section_prefix or key.startswith(section_prefix + ".") + for key in self._config_cache.keys() + ) + + def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: + """ + Get connection configuration by name. + + Args: + connection_name: Name of the connection + + Returns: + Dictionary of connection parameters + """ + self._ensure_initialized() + + if not self._config_cache: + assert self._resolver is not None + self._config_cache = self._resolver.resolve() + + # Look for keys like "connections.{connection_name}.{param}" + connection_prefix = f"connections.{connection_name}." + connection_dict: Dict[str, Any] = {} + + for key, value in self._config_cache.items(): + if key.startswith(connection_prefix): + # Extract parameter name + param_name = key[len(connection_prefix) :] + connection_dict[param_name] = value + + if not connection_dict: + from snowflake.cli.api.exceptions import MissingConfigurationError + + raise MissingConfigurationError( + f"Connection {connection_name} is not configured" + ) + + return connection_dict def get_connection_dict(self, connection_name: str) -> dict: - raise NotImplementedError("Alternative config provider not yet implemented") + """ + Get connection configuration by name. + + Args: + connection_name: Name of the connection + + Returns: + Dictionary of connection parameters + """ + return self._get_connection_dict_internal(connection_name) + + def _get_all_connections_dict(self) -> Dict[str, Dict[str, Any]]: + """ + Get all connection configurations as nested dictionary. + + Returns: + Dictionary mapping connection names to their configurations + """ + self._ensure_initialized() + + if not self._config_cache: + assert self._resolver is not None + self._config_cache = self._resolver.resolve() + + connections: Dict[str, Dict[str, Any]] = {} + connections_prefix = "connections." + + for key, value in self._config_cache.items(): + if key.startswith(connections_prefix): + # Parse "connections.{name}.{param}" + parts = key[len(connections_prefix) :].split(".", 1) + if len(parts) == 2: + conn_name, param_name = parts + if conn_name not in connections: + connections[conn_name] = {} + connections[conn_name][param_name] = value + + return connections def get_all_connections(self) -> dict: - raise NotImplementedError("Alternative config provider not yet implemented") + """ + Get all connection configurations. + + Returns: + Dictionary mapping connection names to ConnectionConfig objects + """ + from snowflake.cli.api.config import ConnectionConfig + + connections_dict = self._get_all_connections_dict() + return { + name: ConnectionConfig.from_dict(config) + for name, config in connections_dict.items() + } def _is_alternative_config_enabled() -> bool: diff --git a/tests/api/test_config_provider.py b/tests/api/test_config_provider.py index 69f5fb6de7..cc8ceda3b7 100644 --- a/tests/api/test_config_provider.py +++ b/tests/api/test_config_provider.py @@ -90,30 +90,32 @@ def test_reset_provider(): assert provider1 is not provider2 -def test_alternative_provider_methods_not_implemented(): - """AlternativeConfigProvider methods should raise NotImplementedError.""" +def test_alternative_provider_has_all_required_methods(): + """AlternativeConfigProvider should have all ConfigProvider methods implemented.""" provider = AlternativeConfigProvider() - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.get_section("test") - - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.get_value("test", key="key") - - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.set_value(["test"], "value") - - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.unset_value(["test"]) - - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.section_exists("test") - - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.read_config() - - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.get_connection_dict("test") - - with pytest.raises(NotImplementedError, match="not yet implemented"): - provider.get_all_connections() + # Verify all abstract methods are implemented and callable + # Note: These are smoke tests - comprehensive tests are in test_config_provider_integration.py + assert callable(provider.get_section) + assert callable(provider.get_value) + assert callable(provider.set_value) + assert callable(provider.unset_value) + assert callable(provider.section_exists) + assert callable(provider.read_config) + assert callable(provider.get_connection_dict) + assert callable(provider.get_all_connections) + + +def test_legacy_provider_has_all_required_methods(): + """LegacyConfigProvider should have all ConfigProvider methods implemented.""" + provider = LegacyConfigProvider() + + # Verify all abstract methods are implemented and callable + assert callable(provider.get_section) + assert callable(provider.get_value) + assert callable(provider.set_value) + assert callable(provider.unset_value) + assert callable(provider.section_exists) + assert callable(provider.read_config) + assert callable(provider.get_connection_dict) + assert callable(provider.get_all_connections) diff --git a/tests/test_config_provider_integration.py b/tests/test_config_provider_integration.py new file mode 100644 index 0000000000..6653ebb66a --- /dev/null +++ b/tests/test_config_provider_integration.py @@ -0,0 +1,389 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ruff: noqa: SLF001 +""" +Integration tests for ConfigProvider Phase 7: Provider Integration. + +Tests the AlternativeConfigProvider implementation and its compatibility +with LegacyConfigProvider. + +Note: This file accesses private members for testing purposes, which is expected +in test code to verify internal state and behavior. +""" + +import os +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest import mock + +import pytest +from snowflake.cli.api.config_provider import ( + ALTERNATIVE_CONFIG_ENV_VAR, + AlternativeConfigProvider, + LegacyConfigProvider, + get_config_provider, + get_config_provider_singleton, + reset_config_provider, +) + + +class TestProviderSelection: + """Tests for provider selection via environment variable.""" + + def test_default_provider_is_legacy(self): + """Test that LegacyConfigProvider is used by default.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + + provider = get_config_provider() + assert isinstance(provider, LegacyConfigProvider) + + def test_alternative_provider_enabled_with_true(self): + """Test enabling alternative provider with 'true'.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + def test_alternative_provider_enabled_with_1(self): + """Test enabling alternative provider with '1'.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}): + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + def test_alternative_provider_enabled_with_yes(self): + """Test enabling alternative provider with 'yes'.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "yes"}): + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + def test_alternative_provider_enabled_with_on(self): + """Test enabling alternative provider with 'on'.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "on"}): + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + def test_alternative_provider_case_insensitive(self): + """Test that environment variable is case-insensitive.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "TRUE"}): + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "Yes"}): + provider = get_config_provider() + assert isinstance(provider, AlternativeConfigProvider) + + def test_singleton_pattern(self): + """Test that singleton returns same instance.""" + with mock.patch.dict(os.environ, {}): + reset_config_provider() + + provider1 = get_config_provider_singleton() + provider2 = get_config_provider_singleton() + + assert provider1 is provider2 + + def test_reset_config_provider(self): + """Test that reset_config_provider creates new instance.""" + with mock.patch.dict(os.environ, {}): + reset_config_provider() + + provider1 = get_config_provider_singleton() + reset_config_provider() + provider2 = get_config_provider_singleton() + + assert provider1 is not provider2 + + +class TestAlternativeConfigProviderInitialization: + """Tests for AlternativeConfigProvider initialization.""" + + def test_lazy_initialization(self): + """Test that provider initializes lazily on first use.""" + provider = AlternativeConfigProvider() + assert provider._resolver is None + assert not provider._initialized + + # Accessing any method should trigger initialization + provider._ensure_initialized() + assert provider._resolver is not None + assert provider._initialized + + def test_reinitialization_clears_cache(self): + """Test that re-initialization clears cache.""" + provider = AlternativeConfigProvider() + provider._config_cache = {"old": "data"} + provider._initialized = True + + provider.read_config() + + # Cache should be cleared during re-init + assert provider._config_cache != {"old": "data"} + + +class TestAlternativeConfigProviderBasicOperations: + """Tests for basic config provider operations.""" + + def test_section_exists_root(self): + """Test section_exists for root.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = {"key": "value"} + provider._initialized = True + + assert provider.section_exists() + + def test_section_exists_with_prefix(self): + """Test section_exists for specific section.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account", + "connections.default.user": "test_user", + } + provider._initialized = True + + assert provider.section_exists("connections") + assert provider.section_exists("connections", "default") + assert not provider.section_exists("nonexistent") + + def test_get_value_simple(self): + """Test get_value for simple key.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = {"account": "test_account"} + provider._initialized = True + + value = provider.get_value(key="account") + assert value == "test_account" + + def test_get_value_with_path(self): + """Test get_value with path.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account" + } + provider._initialized = True + + value = provider.get_value("connections", "default", key="account") + assert value == "test_account" + + def test_get_value_with_default(self): + """Test get_value returns default when key not found.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = {} + provider._initialized = True + + value = provider.get_value(key="nonexistent", default="default_value") + assert value == "default_value" + + def test_get_section_root(self): + """Test get_section for root.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + config_data = {"key1": "value1", "key2": "value2"} + mock_resolver.resolve.return_value = config_data + provider._initialized = True + + section = provider.get_section() + assert section == config_data + + def test_get_section_connections(self): + """Test get_section for connections.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account", + "connections.default.user": "test_user", + "connections.prod.account": "prod_account", + } + provider._initialized = True + + section = provider.get_section("connections") + assert "default" in section + assert "prod" in section + assert section["default"]["account"] == "test_account" + assert section["prod"]["account"] == "prod_account" + + def test_get_section_specific_connection(self): + """Test get_section for specific connection.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account", + "connections.default.user": "test_user", + } + provider._initialized = True + + section = provider.get_section("connections", "default") + assert section == {"account": "test_account", "user": "test_user"} + + +class TestAlternativeConfigProviderConnectionOperations: + """Tests for connection-specific operations.""" + + def test_get_connection_dict(self): + """Test get_connection_dict retrieves connection config.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account", + "connections.default.user": "test_user", + "connections.default.password": "secret", + } + provider._initialized = True + + conn_dict = provider.get_connection_dict("default") + assert conn_dict == { + "account": "test_account", + "user": "test_user", + "password": "secret", + } + + def test_get_connection_dict_not_found(self): + """Test get_connection_dict raises error for missing connection.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = {} + provider._initialized = True + + with pytest.raises(Exception): # MissingConfigurationError + provider.get_connection_dict("nonexistent") + + def test_get_all_connections_dict(self): + """Test _get_all_connections_dict returns nested dict.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account", + "connections.default.user": "test_user", + "connections.prod.account": "prod_account", + "connections.prod.user": "prod_user", + } + provider._initialized = True + + all_conns = provider._get_all_connections_dict() + assert "default" in all_conns + assert "prod" in all_conns + assert all_conns["default"] == { + "account": "test_account", + "user": "test_user", + } + assert all_conns["prod"] == { + "account": "prod_account", + "user": "prod_user", + } + + @mock.patch("snowflake.cli.api.config.ConnectionConfig") + def test_get_all_connections(self, mock_connection_config): + """Test get_all_connections returns ConnectionConfig objects.""" + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account", + "connections.default.user": "test_user", + } + provider._initialized = True + + # Mock ConnectionConfig.from_dict + mock_config_instance = mock.Mock() + mock_connection_config.from_dict.return_value = mock_config_instance + + all_conns = provider.get_all_connections() + + assert "default" in all_conns + assert all_conns["default"] == mock_config_instance + mock_connection_config.from_dict.assert_called_once() + + +class TestAlternativeConfigProviderWriteOperations: + """Tests for write operations that delegate to legacy system.""" + + @mock.patch("snowflake.cli.api.config.set_config_value") + def test_set_value_delegates_to_legacy(self, mock_set_value): + """Test that set_value delegates to legacy system.""" + provider = AlternativeConfigProvider() + provider._initialized = True + + provider.set_value(["test", "path"], "value") + + mock_set_value.assert_called_once_with(["test", "path"], "value") + assert not provider._initialized # Should reset + assert not provider._config_cache # Should clear cache + + @mock.patch("snowflake.cli.api.config.unset_config_value") + def test_unset_value_delegates_to_legacy(self, mock_unset_value): + """Test that unset_value delegates to legacy system.""" + provider = AlternativeConfigProvider() + provider._initialized = True + + provider.unset_value(["test", "path"]) + + mock_unset_value.assert_called_once_with(["test", "path"]) + assert not provider._initialized # Should reset + assert not provider._config_cache # Should clear cache + + +class TestProviderIntegrationEndToEnd: + """End-to-end integration tests with real config files.""" + + def test_alternative_provider_with_toml_file(self): + """Test alternative provider reads from TOML file.""" + with TemporaryDirectory() as tmpdir: + # Create a test config file + config_file = Path(tmpdir) / "connections.toml" + config_file.write_text( + """ +[default] +account = "test_account" +user = "test_user" +password = "test_password" +""" + ) + + # Create provider and test + # Note: This requires mocking the config manager to use our temp file + # Full integration testing would be done in separate test suite + + def test_provider_switching_via_environment(self): + """Test switching between providers via environment variable.""" + # Test legacy provider (default) + reset_config_provider() + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + + provider = get_config_provider_singleton() + assert isinstance(provider, LegacyConfigProvider) + + # Test alternative provider (enabled) + reset_config_provider() + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + provider = get_config_provider_singleton() + assert isinstance(provider, AlternativeConfigProvider) From a0628a2996c53dfefa40d4b33cb32b055b62fb42 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 7 Oct 2025 10:51:55 +0200 Subject: [PATCH 10/78] SNOW-2306184: config refactory - helpers command integration --- .../cli/_plugins/helpers/commands.py | 99 ++++++ src/snowflake/cli/api/config_ng/__init__.py | 20 ++ .../cli/api/config_ng/resolution_logger.py | 264 +++++++++++++++ tests/config_ng/test_resolution_logger.py | 317 ++++++++++++++++++ 4 files changed, 700 insertions(+) create mode 100644 src/snowflake/cli/api/config_ng/resolution_logger.py create mode 100644 tests/config_ng/test_resolution_logger.py diff --git a/src/snowflake/cli/_plugins/helpers/commands.py b/src/snowflake/cli/_plugins/helpers/commands.py index 45a02faf79..0e35ae7abd 100644 --- a/src/snowflake/cli/_plugins/helpers/commands.py +++ b/src/snowflake/cli/_plugins/helpers/commands.py @@ -30,6 +30,7 @@ get_all_connections, set_config_value, ) +from snowflake.cli.api.config_provider import ALTERNATIVE_CONFIG_ENV_VAR from snowflake.cli.api.console import cli_console from snowflake.cli.api.output.types import ( CollectionResult, @@ -317,3 +318,101 @@ def check_snowsql_env_vars(**options): results.append(MessageResult(summary)) return MultipleResults(results) + + +@app.command( + name="show-config-sources", + requires_connection=False, + hidden=os.environ.get(ALTERNATIVE_CONFIG_ENV_VAR, "").lower() + not in ("1", "true", "yes", "on"), +) +def show_config_sources( + key: Optional[str] = typer.Argument( + None, + help="Specific configuration key to show resolution for (e.g., 'account', 'user'). If not provided, shows summary for all keys.", + ), + show_details: bool = typer.Option( + False, + "--show-details", + "-d", + help="Show detailed resolution chains for all sources consulted.", + ), + export_file: Optional[Path] = typer.Option( + None, + "--export", + "-e", + help="Export complete resolution history to JSON file for support or debugging.", + file_okay=True, + dir_okay=False, + ), + **options, +) -> CommandResult: + """ + Show where configuration values come from. + + This command displays the configuration resolution process, showing which + source (CLI arguments, environment variables, or config files) provided + each configuration value. Useful for debugging configuration issues. + + Examples: + + # Show summary of all configuration resolution + snow helpers show-config-sources + + # Show detailed resolution for all keys + snow helpers show-config-sources --show-details + + # Show resolution for a specific key + snow helpers show-config-sources account + + # Show detailed resolution for a specific key + snow helpers show-config-sources account --show-details + + # Export complete resolution history to file + snow helpers show-config-sources --export config_debug.json + + Note: This command requires the enhanced configuration system to be enabled. + Set SNOWFLAKE_CLI_CONFIG_V2_ENABLED=true to enable it. + """ + from snowflake.cli.api.config_ng import ( + explain_configuration, + export_resolution_history, + is_resolution_logging_available, + ) + + if not is_resolution_logging_available(): + return MessageResult( + f"⚠️ Configuration resolution logging is not available.\n\n" + f"To enable it, set the environment variable:\n" + f" export {ALTERNATIVE_CONFIG_ENV_VAR}=true\n\n" + f"Then run this command again to see where configuration values come from." + ) + + # Export if requested + if export_file: + success = export_resolution_history(export_file) + if not success: + return MessageResult( + f"❌ Failed to export resolution history to {export_file}" + ) + return MessageResult( + f"✅ Resolution history exported to: {export_file}\n\n" + f"This file contains complete details about configuration resolution " + f"and can be attached to support tickets." + ) + + # Show resolution information + explain_configuration(key=key, verbose=show_details) + + if key: + return MessageResult( + f"\n✅ Showing resolution for key: {key}\n" + f"Use --show-details to see the complete resolution chain." + ) + else: + return MessageResult( + "\n✅ Configuration resolution summary displayed above.\n" + "Use a specific key (e.g., 'snow helpers show-config-sources account') " + "to see detailed resolution for that key.\n" + "Use --show-details to see complete resolution chains for all keys." + ) diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index b8b416d8e4..7bd68c0234 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -39,6 +39,17 @@ TomlFileHandler, get_snowsql_config_paths, ) +from snowflake.cli.api.config_ng.resolution_logger import ( + check_value_source, + explain_configuration, + export_resolution_history, + format_summary_for_display, + get_resolution_summary, + get_resolver, + is_resolution_logging_available, + show_all_resolution_chains, + show_resolution_chain, +) from snowflake.cli.api.config_ng.resolver import ( ConfigurationResolver, ResolutionHistoryTracker, @@ -51,16 +62,25 @@ ) __all__ = [ + "check_value_source", "CliArgumentSource", "ConfigurationResolver", "ConfigurationSource", "ConfigValue", "EnvironmentSource", + "explain_configuration", + "export_resolution_history", "FileSource", + "format_summary_for_display", + "get_resolution_summary", + "get_resolver", "get_snowsql_config_paths", + "is_resolution_logging_available", "ResolutionEntry", "ResolutionHistory", "ResolutionHistoryTracker", + "show_all_resolution_chains", + "show_resolution_chain", "SnowCliEnvHandler", "SnowSqlConfigHandler", "SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS", diff --git a/src/snowflake/cli/api/config_ng/resolution_logger.py b/src/snowflake/cli/api/config_ng/resolution_logger.py new file mode 100644 index 0000000000..d90ea4ff42 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/resolution_logger.py @@ -0,0 +1,264 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Configuration resolution logging utilities. + +This module provides internal utilities for logging and displaying configuration +resolution information. It's designed to be used independently of CLI commands, +allowing it to be used in any context where configuration debugging is needed. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING, Dict, Optional + +from snowflake.cli.api.config_provider import ( + ALTERNATIVE_CONFIG_ENV_VAR, + AlternativeConfigProvider, + get_config_provider_singleton, +) +from snowflake.cli.api.console import cli_console + +if TYPE_CHECKING: + from snowflake.cli.api.config_ng.resolver import ConfigurationResolver + + +def is_resolution_logging_available() -> bool: + """ + Check if configuration resolution logging is available. + + Returns: + True if the alternative config provider is enabled and has resolution history + """ + provider = get_config_provider_singleton() + return isinstance(provider, AlternativeConfigProvider) + + +def get_resolver() -> Optional[ConfigurationResolver]: + """ + Get the ConfigurationResolver from the current provider. + + Returns: + ConfigurationResolver instance if available, None otherwise + """ + provider = get_config_provider_singleton() + if not isinstance(provider, AlternativeConfigProvider): + return None + + # Ensure provider is initialized + provider._ensure_initialized() # noqa: SLF001 + return provider._resolver # noqa: SLF001 + + +def show_resolution_chain(key: str) -> None: + """ + Display the resolution chain for a specific configuration key. + + This shows: + - All sources that provided values for the key + - The order in which values were considered + - Which value overrode which + - The final selected value + + Args: + key: Configuration key to show resolution for + """ + resolver = get_resolver() + + if resolver is None: + cli_console.warning( + "Configuration resolution logging is not available. " + f"Set {ALTERNATIVE_CONFIG_ENV_VAR}=true to enable it." + ) + return + + resolver.print_resolution_chain(key) + + +def show_all_resolution_chains() -> None: + """ + Display resolution chains for all configured keys. + + This provides a complete overview of the configuration resolution process, + showing how every configuration value was determined. + """ + resolver = get_resolver() + + if resolver is None: + cli_console.warning( + "Configuration resolution logging is not available. " + f"Set {ALTERNATIVE_CONFIG_ENV_VAR}=true to enable it." + ) + return + + resolver.print_all_chains() + + +def get_resolution_summary() -> Optional[Dict]: + """ + Get summary statistics about configuration resolution. + + Returns: + Dictionary with statistics including: + - total_keys_resolved: Number of keys resolved + - keys_with_overrides: Number of keys where values were overridden + - keys_using_defaults: Number of keys using default values + - source_usage: Dict of source_name -> count of values provided + - source_wins: Dict of source_name -> count of values selected + + None if resolution logging is not available + """ + resolver = get_resolver() + + if resolver is None: + return None + + return resolver.get_history_summary() + + +def export_resolution_history(output_path: Path) -> bool: + """ + Export complete resolution history to a JSON file. + + This creates a detailed JSON report that can be: + - Attached to support tickets + - Used for configuration debugging + - Analyzed programmatically + + Args: + output_path: Path where the JSON file should be saved + + Returns: + True if export succeeded, False otherwise + """ + resolver = get_resolver() + + if resolver is None: + cli_console.warning( + "Configuration resolution logging is not available. " + f"Set {ALTERNATIVE_CONFIG_ENV_VAR}=true to enable it." + ) + return False + + try: + resolver.export_history(output_path) + cli_console.message(f"✅ Resolution history exported to: {output_path}") + return True + except Exception as e: + cli_console.warning(f"❌ Failed to export resolution history: {e}") + return False + + +def format_summary_for_display() -> Optional[str]: + """ + Format resolution summary as a human-readable string. + + Returns: + Formatted summary string, or None if resolution logging not available + """ + summary = get_resolution_summary() + + if summary is None: + return None + + lines = [ + "\n" + "=" * 80, + "Configuration Resolution Summary", + "=" * 80, + f"Total keys resolved: {summary['total_keys_resolved']}", + f"Keys with overrides: {summary['keys_with_overrides']}", + f"Keys using defaults: {summary['keys_using_defaults']}", + "", + "Source Usage:", + ] + + # Sort sources by number of values provided (descending) + source_usage = summary["source_usage"] + source_wins = summary["source_wins"] + + for source_name in sorted(source_usage, key=source_usage.get, reverse=True): + provided = source_usage[source_name] + wins = source_wins.get(source_name, 0) + lines.append( + f" {source_name:30s} provided: {provided:3d} selected: {wins:3d}" + ) + + lines.append("=" * 80 + "\n") + return "\n".join(lines) + + +def check_value_source(key: str) -> Optional[str]: + """ + Check which source provided the value for a specific configuration key. + + Args: + key: Configuration key to check + + Returns: + Name of the source that provided the final value, or None if not found + """ + resolver = get_resolver() + + if resolver is None: + return None + + history = resolver.get_resolution_history(key) + if history and history.selected_entry: + return history.selected_entry.config_value.source_name + + return None + + +def explain_configuration(key: Optional[str] = None, verbose: bool = False) -> None: + """ + Explain configuration resolution for a key or all keys. + + This is a high-level function that combines multiple resolution + logging capabilities to provide comprehensive configuration explanation. + + Args: + key: Specific key to explain, or None to explain all + verbose: If True, show detailed resolution chains + """ + resolver = get_resolver() + + if resolver is None: + cli_console.warning( + "Configuration resolution logging is not available. " + f"Set {ALTERNATIVE_CONFIG_ENV_VAR}=true to enable the new config system." + ) + return + + if key: + # Explain specific key + with cli_console.phase(f"Configuration Resolution: {key}"): + source = check_value_source(key) + if source: + cli_console.message(f"Current value from: {source}") + else: + cli_console.message("No value found for this key") + + if verbose: + resolver.print_resolution_chain(key) + else: + # Explain all configuration + with cli_console.phase("Complete Configuration Resolution"): + summary_text = format_summary_for_display() + if summary_text: + cli_console.message(summary_text) + + if verbose: + resolver.print_all_chains() diff --git a/tests/config_ng/test_resolution_logger.py b/tests/config_ng/test_resolution_logger.py new file mode 100644 index 0000000000..73b84f3174 --- /dev/null +++ b/tests/config_ng/test_resolution_logger.py @@ -0,0 +1,317 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ruff: noqa: SLF001 +""" +Tests for configuration resolution logger module. + +This tests the internal resolution logging utilities that are independent +of CLI commands and can be used in any context. +""" + +import os +from pathlib import Path +from tempfile import TemporaryDirectory +from unittest import mock + +from snowflake.cli.api.config_ng.resolution_logger import ( + check_value_source, + explain_configuration, + export_resolution_history, + format_summary_for_display, + get_resolution_summary, + get_resolver, + is_resolution_logging_available, + show_all_resolution_chains, + show_resolution_chain, +) +from snowflake.cli.api.config_provider import ( + ALTERNATIVE_CONFIG_ENV_VAR, + AlternativeConfigProvider, + reset_config_provider, +) + + +class TestResolutionLoggingAvailability: + """Tests for checking if resolution logging is available.""" + + def test_logging_not_available_with_legacy_provider(self): + """Test that logging is not available with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + assert not is_resolution_logging_available() + + def test_logging_available_with_alternative_provider(self): + """Test that logging is available with alternative provider.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + reset_config_provider() + + assert is_resolution_logging_available() + + def test_get_resolver_returns_none_with_legacy(self): + """Test that get_resolver returns None with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + resolver = get_resolver() + assert resolver is None + + def test_get_resolver_returns_instance_with_alternative(self): + """Test that get_resolver returns resolver with alternative provider.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + reset_config_provider() + + resolver = get_resolver() + assert resolver is not None + + +class TestShowResolutionChain: + """Tests for showing resolution chains.""" + + def test_show_chain_with_legacy_provider_shows_warning(self, capsys): + """Test that show_resolution_chain shows warning with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + show_resolution_chain("test_key") + + captured = capsys.readouterr() + assert "not available" in captured.out.lower() + + def test_show_all_chains_with_legacy_provider_shows_warning(self, capsys): + """Test that show_all_resolution_chains shows warning with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + show_all_resolution_chains() + + captured = capsys.readouterr() + assert "not available" in captured.out.lower() + + +class TestResolutionSummary: + """Tests for resolution summary functionality.""" + + def test_summary_returns_none_with_legacy_provider(self): + """Test that get_resolution_summary returns None with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + summary = get_resolution_summary() + assert summary is None + + def test_format_summary_returns_none_with_legacy_provider(self): + """Test that format_summary_for_display returns None with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + formatted = format_summary_for_display() + assert formatted is None + + def test_format_summary_with_alternative_provider(self): + """Test that format_summary_for_display returns formatted string.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + reset_config_provider() + + # Mock the resolver to have some data + provider = AlternativeConfigProvider() + provider._ensure_initialized() + + with mock.patch.object( + provider._resolver, "get_history_summary" + ) as mock_summary: + mock_summary.return_value = { + "total_keys_resolved": 5, + "keys_with_overrides": 2, + "keys_using_defaults": 1, + "source_usage": { + "cli_arguments": 2, + "snowflake_cli_env": 3, + }, + "source_wins": { + "cli_arguments": 2, + "snowflake_cli_env": 3, + }, + } + + # Need to mock the provider singleton + with mock.patch( + "snowflake.cli.api.config_ng.resolution_logger.get_config_provider_singleton", + return_value=provider, + ): + formatted = format_summary_for_display() + + assert formatted is not None + assert "Total keys resolved: 5" in formatted + assert "Keys with overrides: 2" in formatted + assert "Keys using defaults: 1" in formatted + assert "cli_arguments" in formatted + assert "snowflake_cli_env" in formatted + + +class TestCheckValueSource: + """Tests for checking value source.""" + + def test_check_value_source_returns_none_with_legacy(self): + """Test that check_value_source returns None with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + source = check_value_source("test_key") + assert source is None + + +class TestExportResolutionHistory: + """Tests for exporting resolution history.""" + + def test_export_returns_false_with_legacy_provider(self, capsys): + """Test that export_resolution_history returns False with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + with TemporaryDirectory() as tmpdir: + export_path = Path(tmpdir) / "test_export.json" + success = export_resolution_history(export_path) + + assert not success + captured = capsys.readouterr() + assert "not available" in captured.out.lower() + + def test_export_succeeds_with_alternative_provider(self): + """Test that export_resolution_history succeeds with alternative provider.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + reset_config_provider() + + with TemporaryDirectory() as tmpdir: + export_path = Path(tmpdir) / "test_export.json" + success = export_resolution_history(export_path) + + assert success + assert export_path.exists() + + # Verify JSON is valid + import json + + with open(export_path) as f: + data = json.load(f) + + assert "summary" in data + assert "histories" in data + + +class TestExplainConfiguration: + """Tests for explain_configuration function.""" + + def test_explain_with_legacy_provider_shows_warning(self, capsys): + """Test that explain_configuration shows warning with legacy provider.""" + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() + + explain_configuration() + + captured = capsys.readouterr() + assert "not available" in captured.out.lower() + + def test_explain_specific_key(self, capsys): + """Test explaining a specific key.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + reset_config_provider() + + # Just test that it doesn't crash + # Actual display testing would require more setup + explain_configuration(key="account") + + def test_explain_all_keys_verbose(self, capsys): + """Test explaining all keys in verbose mode.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + reset_config_provider() + + # Just test that it doesn't crash + explain_configuration(verbose=True) + + +class TestIntegrationWithRealConfig: + """Integration tests with actual configuration.""" + + def test_resolution_with_env_vars(self): + """Test resolution logging with actual environment variables.""" + with mock.patch.dict( + os.environ, + { + ALTERNATIVE_CONFIG_ENV_VAR: "true", + "SNOWFLAKE_ACCOUNT": "test_account", + "SNOWFLAKE_USER": "test_user", + }, + ): + reset_config_provider() + + # Verify logging is available + assert is_resolution_logging_available() + + # Get resolver and check it has data + resolver = get_resolver() + assert resolver is not None + + # Force resolution + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + provider.read_config() + + # Check that we can get summary + summary = get_resolution_summary() + assert summary is not None + assert summary["total_keys_resolved"] > 0 + + def test_check_value_source_for_env_var(self): + """Test checking the source of an environment variable.""" + with mock.patch.dict( + os.environ, + { + ALTERNATIVE_CONFIG_ENV_VAR: "true", + "SNOWFLAKE_ACCOUNT": "test_account", + }, + ): + reset_config_provider() + + # Force resolution + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + provider.read_config() + + # Check source + source = check_value_source("account") + # Should be from environment (snowflake_cli_env or similar) + assert source is not None + assert "env" in source.lower() From 482750e65c0cdf3890f4e46b3d9fac4611f5bf56 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 8 Oct 2025 09:34:56 +0200 Subject: [PATCH 11/78] SNOW-2306184: config refactory - config testing setup --- tests/config_ng/README_TESTING.md | 221 +++++++++++ tests/config_ng/configs/cli.env | 1 + tests/config_ng/configs/config | 4 + tests/config_ng/configs/config.toml | 18 + tests/config_ng/configs/connections.toml | 18 + tests/config_ng/configs/snowsql.env | 1 + tests/config_ng/conftest.py | 452 +++++++++++++++++++++++ tests/config_ng/test_configuration.py | 239 ++++++++++++ 8 files changed, 954 insertions(+) create mode 100644 tests/config_ng/README_TESTING.md create mode 100644 tests/config_ng/configs/cli.env create mode 100644 tests/config_ng/configs/config create mode 100644 tests/config_ng/configs/config.toml create mode 100644 tests/config_ng/configs/connections.toml create mode 100644 tests/config_ng/configs/snowsql.env create mode 100644 tests/config_ng/conftest.py create mode 100644 tests/config_ng/test_configuration.py diff --git a/tests/config_ng/README_TESTING.md b/tests/config_ng/README_TESTING.md new file mode 100644 index 0000000000..2b79f59600 --- /dev/null +++ b/tests/config_ng/README_TESTING.md @@ -0,0 +1,221 @@ + + +# Configuration Merging Test Framework + +## Overview + +This test framework provides an easy-to-use, readable way to test merged configuration from multiple sources in the Snowflake CLI. + +## Features + +### Configuration Sources + +The framework supports testing all configuration sources: + +1. **SnowSQLConfig**: SnowSQL INI-style config files (`.snowsql/config`) +2. **SnowSQLEnvs**: SnowSQL environment variables (`SNOWSQL_*`) +3. **CliConfig**: CLI TOML config files (`.snowflake/config.toml`) +4. **CliEnvs**: CLI environment variables (`SNOWFLAKE_*`) +5. **CliParams**: CLI command-line parameters (`--account`, `--user`, etc.) +6. **ConnectionsToml**: Connections TOML files (`.snowflake/connections.toml`) + +### Configuration Priority + +The framework correctly tests the precedence order: +1. CLI parameters (highest) +2. CLI environment variables (`SNOWFLAKE_*`) +3. SnowSQL environment variables (`SNOWSQL_*`) +4. CLI config files +5. Connections TOML +6. SnowSQL config files (lowest) + +## Usage + +### Basic Example + +```python +from tests.config_ng.conftest import ( + CliConfig, + CliEnvs, + CliParams, + SnowSQLConfig, + SnowSQLEnvs, + config_sources, +) + +def test_configuration_merging(): + sources = ( + SnowSQLConfig("config"), + SnowSQLEnvs("snowsql.env"), + CliConfig("config.toml"), + CliEnvs("cli.env"), + CliParams("--account", "test-account", "--user", "alice"), + ) + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + + # CLI params have highest priority + assert merged["account"] == "test-account" + assert merged["user"] == "alice" +``` + +### Testing Specific Connections + +```python +def test_specific_connection(): + sources = (ConnectionsToml("connections.toml"),) + + with config_sources(sources, connection="prod") as ctx: + merged = ctx.get_merged_config() + assert merged["account"] == "prod-account" +``` + +### Using FinalConfig for Readability + +```python +from textwrap import dedent + +from tests.config_ng.conftest import FinalConfig + +# From dictionary +expected = FinalConfig(config_dict={ + "account": "test-account", + "user": "alice", +}) + +# From TOML string (more readable for complex configs) +# Use dedent to avoid indentation issues +expected = FinalConfig(toml_string=dedent(""" + [connections.prod] + account = "prod-account" + user = "prod-user" + password = "secret" + """)) + +# Compare with merged config +assert merged == expected +``` + +### Accessing Resolution History + +```python +with config_sources(sources) as ctx: + resolver = ctx.get_resolver() + config = resolver.resolve() + + # Check which source won + history = resolver.get_resolution_history("account") + assert history.selected_entry.config_value.source_name == "cli_arguments" + + # Get resolution summary + summary = resolver.get_history_summary() + print(f"Total keys resolved: {summary['total_keys_resolved']}") + print(f"Keys with overrides: {summary['keys_with_overrides']}") +``` + +## Test File Structure + +### Required Directory Structure + +``` +tests/config_ng/ +├── conftest.py # Test framework implementation +├── test_configuration.py # Example tests +└── configs/ # Test configuration files + ├── config # SnowSQL config + ├── snowsql.env # SnowSQL environment variables + ├── config.toml # CLI config + ├── cli.env # CLI environment variables + └── connections.toml # Connections config +``` + +### Configuration Files + +Create test configuration files in `tests/config_ng/configs/`: + +**config** (SnowSQL format): +```ini +[connections.a] +accountname = account-a +user = user +password = password +``` + +**config.toml** (CLI format): +```toml +[connections.a] +account = "account-a" +username = "user" +password = "abc" +``` + +**cli.env**: +```bash +SNOWFLAKE_USER=Alice +``` + +**snowsql.env**: +```bash +SNOWSQL_USER=Bob +``` + +## Implementation Details + +### Context Manager + +The `config_sources` context manager: +- Creates temporary directories for config files +- Writes config files to proper locations +- Sets environment variables +- Cleans up after test completion + +### ConfigSourcesContext + +Provides methods: +- `get_merged_config()`: Returns the merged configuration dictionary +- `get_resolver()`: Returns the ConfigurationResolver for advanced testing + +## Running Tests + +```bash +# Run with timeout +timeout 30 hatch env run -- pytest tests/config_ng/test_configuration.py -v -p no:warnings + +# Run all config_ng tests +timeout 60 hatch env run -- pytest tests/config_ng/ -v -p no:warnings + +# Run with pre-commit checks +hatch env run -- pre-commit run --files tests/config_ng/conftest.py tests/config_ng/test_configuration.py +``` + +## Benefits + +1. **Readable**: Tests clearly express intent with descriptive source objects +2. **Isolated**: Each test runs in a clean temporary environment +3. **Comprehensive**: Tests all configuration sources and their interactions +4. **Type-safe**: Full mypy type checking support +5. **Maintainable**: Centralized logic in `conftest.py` +6. **Flexible**: Easy to add new test scenarios + +## Examples from Tests + +See `test_configuration.py` for complete examples: +- `test_all_sources_merged`: Tests complete precedence chain +- `test_cli_envs_override_snowsql_envs`: Tests environment variable precedence +- `test_config_files_precedence`: Tests file precedence +- `test_resolution_history_tracking`: Tests resolution debugging features diff --git a/tests/config_ng/configs/cli.env b/tests/config_ng/configs/cli.env new file mode 100644 index 0000000000..23724a9a0c --- /dev/null +++ b/tests/config_ng/configs/cli.env @@ -0,0 +1 @@ +SNOWFLAKE_USER=Alice diff --git a/tests/config_ng/configs/config b/tests/config_ng/configs/config new file mode 100644 index 0000000000..0880c2b8a4 --- /dev/null +++ b/tests/config_ng/configs/config @@ -0,0 +1,4 @@ +[connections.a] +accountname = account-a +user = user +password = password diff --git a/tests/config_ng/configs/config.toml b/tests/config_ng/configs/config.toml new file mode 100644 index 0000000000..ae3d5a5203 --- /dev/null +++ b/tests/config_ng/configs/config.toml @@ -0,0 +1,18 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[connections.a] +account = "account-a" +username = "user" +password = "abc" diff --git a/tests/config_ng/configs/connections.toml b/tests/config_ng/configs/connections.toml new file mode 100644 index 0000000000..5183da2262 --- /dev/null +++ b/tests/config_ng/configs/connections.toml @@ -0,0 +1,18 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[connections.b] +account = "account-a" +username = "user" +password = "abc" diff --git a/tests/config_ng/configs/snowsql.env b/tests/config_ng/configs/snowsql.env new file mode 100644 index 0000000000..a900b8f68d --- /dev/null +++ b/tests/config_ng/configs/snowsql.env @@ -0,0 +1 @@ +SNOWSQL_USER=Bob diff --git a/tests/config_ng/conftest.py b/tests/config_ng/conftest.py new file mode 100644 index 0000000000..09db791957 --- /dev/null +++ b/tests/config_ng/conftest.py @@ -0,0 +1,452 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Configuration testing utilities for testing merged configuration from multiple sources. + +This module provides fixtures and utilities for testing configuration resolution +from various sources (SnowSQL config, CLI config, environment variables, CLI params). +""" + +import os +import tempfile +from contextlib import contextmanager +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple + +import pytest +import tomlkit +from snowflake.cli.api.config_ng import ( + CliArgumentSource, + ConfigurationResolver, + EnvironmentSource, + FileSource, + SnowCliEnvHandler, + SnowSqlConfigHandler, + SnowSqlEnvHandler, + TomlFileHandler, +) + + +@dataclass +class SnowSQLConfig: + """ + Represents SnowSQL INI-style config file content. + + Args: + filename: Name of the config file in the configs/ directory + """ + + filename: str + + +@dataclass +class SnowSQLEnvs: + """ + Represents SnowSQL environment variables from a file. + + Args: + filename: Name of the env file in the configs/ directory + """ + + filename: str + + +@dataclass +class CliConfig: + """ + Represents CLI TOML config file content. + + Args: + filename: Name of the config.toml file in the configs/ directory + """ + + filename: str + + +@dataclass +class CliEnvs: + """ + Represents CLI environment variables from a file. + + Args: + filename: Name of the env file in the configs/ directory + """ + + filename: str + + +@dataclass +class CliParams: + """ + Represents CLI command-line parameters. + + Args: + args: Variable length list of CLI arguments (e.g., "--account", "value", "--user", "alice") + """ + + args: Tuple[str, ...] + + def __init__(self, *args: str): + object.__setattr__(self, "args", args) + + def to_dict(self) -> Dict[str, Any]: + """ + Convert CLI arguments to a dictionary. + + Returns: + Dictionary with parsed CLI arguments + """ + result: Dict[str, Any] = {} + i = 0 + while i < len(self.args): + if self.args[i].startswith("--"): + key = self.args[i][2:].replace("-", "_") + if i + 1 < len(self.args) and not self.args[i + 1].startswith("--"): + result[key] = self.args[i + 1] + i += 2 + else: + result[key] = True + i += 1 + else: + i += 1 + return result + + +@dataclass +class ConnectionsToml: + """ + Represents connections.toml file content. + + Args: + filename: Name of the connections.toml file in the configs/ directory + """ + + filename: str + + +@dataclass +class FinalConfig: + """ + Represents the expected final merged configuration. + + Args: + config_dict: Dictionary of expected configuration values + connection: Optional connection name to test (default: None for all connections) + toml_string: Optional TOML string representation for easy reading + """ + + config_dict: Dict[str, Any] + connection: Optional[str] = None + toml_string: Optional[str] = None + + def __init__( + self, + config_dict: Optional[Dict[str, Any]] = None, + connection: Optional[str] = None, + toml_string: Optional[str] = None, + ): + """ + Initialize FinalConfig from either a dict or TOML string. + """ + if toml_string: + parsed = tomlkit.parse(toml_string) + object.__setattr__(self, "config_dict", dict(parsed)) + elif config_dict: + object.__setattr__(self, "config_dict", config_dict) + else: + object.__setattr__(self, "config_dict", {}) + + object.__setattr__(self, "connection", connection) + object.__setattr__(self, "toml_string", toml_string) + + def __eq__(self, other): + """Compare FinalConfig with another FinalConfig or dict.""" + if isinstance(other, FinalConfig): + return self.config_dict == other.config_dict + if isinstance(other, dict): + return self.config_dict == other + return False + + def __repr__(self): + """String representation for debugging.""" + if self.toml_string: + return f"FinalConfig(connection={self.connection}):\n{self.toml_string}" + return f"FinalConfig({self.config_dict})" + + +class ConfigSourcesContext: + """ + Context manager for setting up configuration sources in a temporary environment. + + This class: + - Creates temporary directories for config files + - Writes config files from source definitions + - Sets environment variables + - Manages cleanup + """ + + def __init__( + self, + sources: Tuple[Any, ...], + configs_dir: Path, + connection_name: Optional[str] = None, + ): + """ + Initialize the config sources context. + + Args: + sources: Tuple of source definitions (SnowSQLConfig, CliConfig, etc.) + configs_dir: Path to directory containing config file templates + connection_name: Optional connection name to resolve + """ + self.sources = sources + self.configs_dir = configs_dir + self.connection_name = connection_name or "a" + + self.temp_dir: Optional[Path] = None + self.snowsql_dir: Optional[Path] = None + self.snowflake_dir: Optional[Path] = None + self.original_env: Dict[str, Optional[str]] = {} + self.env_vars_to_set: Dict[str, str] = {} + self.cli_args_dict: Dict[str, Any] = {} + + self.snowsql_config_path: Optional[Path] = None + self.cli_config_path: Optional[Path] = None + self.connections_toml_path: Optional[Path] = None + + def __enter__(self): + """Set up the configuration environment.""" + self.temp_dir = Path(tempfile.mkdtemp()) + self.snowsql_dir = self.temp_dir / ".snowsql" + self.snowflake_dir = self.temp_dir / ".snowflake" + + self.snowsql_dir.mkdir(exist_ok=True) + self.snowflake_dir.mkdir(exist_ok=True) + + # Process sources + for source in self.sources: + if isinstance(source, SnowSQLConfig): + self._setup_snowsql_config(source) + elif isinstance(source, SnowSQLEnvs): + self._setup_snowsql_envs(source) + elif isinstance(source, CliConfig): + self._setup_cli_config(source) + elif isinstance(source, CliEnvs): + self._setup_cli_envs(source) + elif isinstance(source, CliParams): + self._setup_cli_params(source) + elif isinstance(source, ConnectionsToml): + self._setup_connections_toml(source) + + # Set environment variables + for key, value in self.env_vars_to_set.items(): + self.original_env[key] = os.environ.get(key) + os.environ[key] = value + + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Clean up the configuration environment.""" + # Restore original environment variables + for key, original_value in self.original_env.items(): + if original_value is None: + os.environ.pop(key, None) + else: + os.environ[key] = original_value + + # Clean up temp directory + if self.temp_dir: + import shutil + + shutil.rmtree(self.temp_dir, ignore_errors=True) + + def _setup_snowsql_config(self, source: SnowSQLConfig): + """Set up SnowSQL config file.""" + assert self.snowsql_dir is not None + config_content = (self.configs_dir / source.filename).read_text() + self.snowsql_config_path = self.snowsql_dir / "config" + self.snowsql_config_path.write_text(config_content) + + def _setup_snowsql_envs(self, source: SnowSQLEnvs): + """Set up SnowSQL environment variables from file.""" + env_file = self.configs_dir / source.filename + for line in env_file.read_text().splitlines(): + line = line.strip() + if line and not line.startswith("#") and "=" in line: + key, value = line.split("=", 1) + self.env_vars_to_set[key.strip()] = value.strip() + + def _setup_cli_config(self, source: CliConfig): + """Set up CLI config.toml file.""" + assert self.snowflake_dir is not None + config_content = (self.configs_dir / source.filename).read_text() + self.cli_config_path = self.snowflake_dir / "config.toml" + self.cli_config_path.write_text(config_content) + + def _setup_cli_envs(self, source: CliEnvs): + """Set up CLI environment variables from file.""" + env_file = self.configs_dir / source.filename + for line in env_file.read_text().splitlines(): + line = line.strip() + if line and not line.startswith("#") and "=" in line: + key, value = line.split("=", 1) + self.env_vars_to_set[key.strip()] = value.strip() + + def _setup_cli_params(self, source: CliParams): + """Set up CLI parameters.""" + self.cli_args_dict = source.to_dict() + + def _setup_connections_toml(self, source: ConnectionsToml): + """Set up connections.toml file.""" + assert self.snowflake_dir is not None + config_content = (self.configs_dir / source.filename).read_text() + self.connections_toml_path = self.snowflake_dir / "connections.toml" + self.connections_toml_path.write_text(config_content) + + def get_resolver(self) -> ConfigurationResolver: + """ + Create a ConfigurationResolver with all configured sources. + + Returns: + ConfigurationResolver instance with all sources configured + """ + sources_list: List[Any] = [] + + # CLI Arguments Source (highest priority) + if self.cli_args_dict: + cli_source = CliArgumentSource(cli_context=self.cli_args_dict) + sources_list.append(cli_source) + + # Environment Variables Source + env_handlers = [SnowCliEnvHandler(), SnowSqlEnvHandler()] + env_source = EnvironmentSource(handlers=env_handlers) + sources_list.append(env_source) + + # File Sources + file_paths: List[Path] = [] + file_handlers = [] + + # Add CLI config files (higher priority) + if self.cli_config_path and self.cli_config_path.exists(): + file_paths.append(self.cli_config_path) + file_handlers.append( + TomlFileHandler(section_path=["connections", self.connection_name]) + ) + + if self.connections_toml_path and self.connections_toml_path.exists(): + file_paths.append(self.connections_toml_path) + file_handlers.append( + TomlFileHandler(section_path=["connections", self.connection_name]) + ) + + # Add SnowSQL config files (lower priority) + if self.snowsql_config_path and self.snowsql_config_path.exists(): + file_paths.append(self.snowsql_config_path) + file_handlers.append( + SnowSqlConfigHandler(section_path=["connections", self.connection_name]) + ) + + if file_paths: + file_source = FileSource(file_paths=file_paths, handlers=file_handlers) + sources_list.append(file_source) + + return ConfigurationResolver(sources=sources_list, track_history=True) + + def get_merged_config(self) -> Dict[str, Any]: + """ + Get the merged configuration from all sources. + + Returns: + Dictionary with resolved configuration values + """ + resolver = self.get_resolver() + return resolver.resolve() + + +@contextmanager +def config_sources( + sources: Tuple[Any, ...], + configs_dir: Optional[Path] = None, + connection: Optional[str] = None, +): + """ + Context manager for testing merged configuration from multiple sources. + + Args: + sources: Tuple of source definitions (SnowSQLConfig, CliConfig, etc.) + configs_dir: Path to directory containing config file templates (defaults to ./configs/) + connection: Optional connection name to resolve (defaults to "a") + + Yields: + ConfigSourcesContext instance for accessing merged configuration + + Example: + sources = ( + SnowSQLConfig('config'), + SnowSQLEnvs('snowsql.env'), + CliConfig('config.toml'), + CliEnvs('cli.env'), + CliParams("--account", "test_account", "--user", "alice"), + ConnectionsToml('connections.toml'), + ) + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + assert merged["account"] == "test_account" + """ + if configs_dir is None: + configs_dir = Path(__file__).parent / "configs" + + context = ConfigSourcesContext(sources, configs_dir, connection) + with context as ctx: + yield ctx + + +@pytest.fixture +def merged_cli_config(): + """ + Fixture that provides a function to get the merged CLI configuration. + + This should be used inside a config_sources context manager. + + Returns: + Function that returns the merged configuration dictionary + """ + + def _get_merged_config(ctx: ConfigSourcesContext) -> Dict[str, Any]: + """Get merged configuration from context.""" + return ctx.get_merged_config() + + return _get_merged_config + + +@pytest.fixture +def make_cli_instance(): + """ + Fixture that provides a function to create a CLI instance. + + Note: This is a placeholder for future implementation if needed. + For now, we work directly with the resolver. + + Returns: + Function that creates a CLI instance (placeholder) + """ + + def _make_cli(): + """Create CLI instance placeholder.""" + return None + + return _make_cli diff --git a/tests/config_ng/test_configuration.py b/tests/config_ng/test_configuration.py new file mode 100644 index 0000000000..2f78e672d5 --- /dev/null +++ b/tests/config_ng/test_configuration.py @@ -0,0 +1,239 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Tests for merged configuration from multiple sources. + +These tests verify that configuration values are properly merged from: +- SnowSQL config files (.snowsql/config) +- SnowSQL environment variables (SNOWSQL_*) +- CLI config files (.snowflake/config.toml) +- CLI environment variables (SNOWFLAKE_*) +- CLI command-line parameters +- Connections TOML files (.snowflake/connections.toml) +""" + +from textwrap import dedent + +from .conftest import ( + CliConfig, + CliEnvs, + CliParams, + ConnectionsToml, + FinalConfig, + SnowSQLConfig, + SnowSQLEnvs, + config_sources, +) + + +class TestConfigurationMerging: + """Test configuration merging from multiple sources.""" + + def test_all_sources_merged(self): + """ + Test that all configuration sources are properly merged. + + Priority order (highest to lowest): + 1. CLI parameters + 2. CLI environment variables + 3. SnowSQL environment variables + 4. CLI config.toml + 5. Connections.toml + 6. SnowSQL config + """ + sources = ( + SnowSQLConfig("config"), + SnowSQLEnvs("snowsql.env"), + CliConfig("config.toml"), + CliEnvs("cli.env"), + CliParams("--account", "cli-account", "--user", "cli-user"), + ConnectionsToml("connections.toml"), + ) + + expected = FinalConfig( + config_dict={ + "account": "cli-account", + "user": "cli-user", + "password": "abc", + } + ) + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + + # CLI params have highest priority + assert merged["account"] == "cli-account" + assert merged["user"] == "cli-user" + + # Password comes from config files + assert merged.get("password") == "abc" + + def test_cli_envs_override_snowsql_envs(self): + """Test that CLI environment variables override SnowSQL environment variables.""" + sources = ( + SnowSQLEnvs("snowsql.env"), + CliEnvs("cli.env"), + ) + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + + # CLI env (SNOWFLAKE_USER=Alice) overrides + # SnowSQL env (SNOWSQL_USER=Bob) + assert merged["user"] == "Alice" + + def test_cli_params_override_all(self): + """Test that CLI parameters override all other sources.""" + sources = ( + SnowSQLConfig("config"), + CliConfig("config.toml"), + CliParams("--account", "override-account"), + ) + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + + # CLI params override everything + assert merged["account"] == "override-account" + + def test_config_files_precedence(self): + """Test precedence among configuration files.""" + sources = ( + SnowSQLConfig("config"), + CliConfig("config.toml"), + ) + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + + # CLI config.toml has higher priority than SnowSQL config + # Both have account-a, but config.toml should win + assert merged["account"] == "account-a" + assert merged["username"] == "user" + + def test_connections_toml_separate_connection(self): + """Test that connections.toml can have separate connections.""" + sources = (ConnectionsToml("connections.toml"),) + + # Test connection 'b' which only exists in connections.toml + with config_sources(sources, connection="b") as ctx: + merged = ctx.get_merged_config() + + assert merged["account"] == "account-a" + assert merged["username"] == "user" + assert merged["password"] == "abc" + + def test_empty_sources(self): + """Test that empty sources return minimal configuration.""" + sources = () + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + + # May contain default keys like 'home', but no connection-specific keys + assert "account" not in merged + assert "user" not in merged + assert "password" not in merged + + def test_only_cli_params(self): + """Test configuration with only CLI parameters.""" + sources = (CliParams("--account", "test-account", "--user", "test-user"),) + + with config_sources(sources) as ctx: + merged = ctx.get_merged_config() + + assert merged["account"] == "test-account" + assert merged["user"] == "test-user" + + def test_final_config_from_dict(self): + """Test FinalConfig creation from dictionary.""" + expected = FinalConfig(config_dict={"account": "test", "user": "alice"}) + + assert expected.config_dict == {"account": "test", "user": "alice"} + assert expected == {"account": "test", "user": "alice"} + + def test_final_config_from_toml_string(self): + """Test FinalConfig creation from TOML string for readability.""" + toml_string = dedent( + """ + [connections.prod] + account = "prod-account" + user = "prod-user" + password = "secret" + """ + ) + + expected = FinalConfig(toml_string=toml_string) + + assert "connections" in expected.config_dict + assert expected.config_dict["connections"]["prod"]["account"] == "prod-account" + + def test_final_config_equality(self): + """Test FinalConfig equality comparison.""" + config1 = FinalConfig(config_dict={"account": "test", "user": "alice"}) + config2 = FinalConfig(config_dict={"account": "test", "user": "alice"}) + config3 = FinalConfig(config_dict={"account": "test", "user": "bob"}) + + assert config1 == config2 + assert config1 != config3 + assert config1 == {"account": "test", "user": "alice"} + + +class TestConfigurationResolution: + """Test configuration resolution details.""" + + def test_resolution_history_tracking(self): + """Test that resolution history is tracked correctly.""" + sources = ( + SnowSQLConfig("config"), + CliConfig("config.toml"), + CliParams("--account", "cli-account"), + ) + + with config_sources(sources) as ctx: + resolver = ctx.get_resolver() + config = resolver.resolve() + + # Check that account was overridden + assert config["account"] == "cli-account" + + # Check resolution history + history = resolver.get_resolution_history("account") + assert history is not None + assert len(history.entries) >= 2 # At least config file and CLI param + + # The selected entry should be from CLI + assert history.selected_entry.config_value.source_name == "cli_arguments" + + def test_resolution_summary(self): + """Test that resolution summary provides useful statistics.""" + sources = ( + SnowSQLConfig("config"), + CliConfig("config.toml"), + CliParams("--account", "cli-account"), + ) + + with config_sources(sources) as ctx: + resolver = ctx.get_resolver() + resolver.resolve() + + summary = resolver.get_history_summary() + + assert summary["total_keys_resolved"] > 0 + assert "source_usage" in summary + assert "source_wins" in summary + + # CLI should have won for account + assert summary["source_wins"].get("cli_arguments", 0) >= 1 From f633582c039aa580829de66ab6eadebe8d9d202d Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 8 Oct 2025 11:09:47 +0200 Subject: [PATCH 12/78] SNOW-2306184: config refactory - fix snowsql config format parsing --- .../cli/api/config_ng/file_handlers.py | 81 ++++++++++------- .../cli/api/config_ng/resolution_logger.py | 42 +++++++++ tests/config_ng/test_configuration.py | 1 + .../config_ng/test_file_handler_migration.py | 38 ++++---- tests/config_ng/test_resolver_integration.py | 8 +- .../config_ng/test_snowsql_config_handler.py | 86 +++++++++---------- tests/config_ng/test_snowsql_config_paths.py | 12 +-- 7 files changed, 162 insertions(+), 106 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/file_handlers.py b/src/snowflake/cli/api/config_ng/file_handlers.py index 8ab8496b2e..1240003fe3 100644 --- a/src/snowflake/cli/api/config_ng/file_handlers.py +++ b/src/snowflake/cli/api/config_ng/file_handlers.py @@ -17,11 +17,12 @@ This module implements handlers for: - TOML configuration files (SnowCLI format) -- SnowSQL configuration files (Legacy format with key mapping) +- SnowSQL configuration files (INI format with key mapping) """ from __future__ import annotations +import configparser from pathlib import Path from typing import Dict, List, Optional @@ -204,7 +205,7 @@ def supports_key(self, key: str) -> bool: class SnowSqlConfigHandler(SourceHandler): """ Handler for SnowSQL config files. - Format: INI-like TOML with SnowSQL-specific key naming. + Format: INI format with SnowSQL-specific key naming. SnowSQL Multi-File Support: SnowSQL reads from multiple config file locations (system-wide, user home, etc.) @@ -223,7 +224,7 @@ class SnowSqlConfigHandler(SourceHandler): - rolename → role - pwd → password - Example SnowSQL config: + Example SnowSQL config (INI format): [connections.default] accountname = my_account username = my_user @@ -261,7 +262,7 @@ def __init__(self, section_path: Optional[List[str]] = None): Default: ["connections"] for SnowSQL compatibility """ self._section_path = section_path or ["connections"] - self._cached_data: Optional[Dict] = None + self._cached_data: Optional[configparser.ConfigParser] = None self._cached_file: Optional[Path] = None @property @@ -281,12 +282,14 @@ def can_handle(self) -> bool: return True def can_handle_file(self, file_path: Path) -> bool: - """Check if file is SnowSQL config or TOML file.""" + """Check if file is SnowSQL config file.""" # SnowSQL config is typically ~/.snowsql/config (no extension) - # But for flexibility, also handle any TOML file + # or ~/.snowsql.cnf, /etc/snowsql.cnf, etc. if file_path.parent.name == ".snowsql" and file_path.name == "config": return True - # Also handle .toml files for testing and flexibility + if file_path.suffix.lower() == ".cnf": + return True + # For backward compatibility during migration, also handle .toml return file_path.suffix.lower() in (".toml", ".tml") def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: @@ -304,7 +307,7 @@ def discover_from_file( Discover values from SnowSQL config with key mapping. Args: - file_path: Path to SnowSQL config file + file_path: Path to SnowSQL config file (INI format) key: Specific key to discover (SnowCLI format), or None Returns: @@ -313,22 +316,35 @@ def discover_from_file( # Load and cache file data if self._cached_file != file_path: try: - with open(file_path) as f: - self._cached_data = tomlkit.load(f) - self._cached_file = file_path - except (OSError, tomlkit.exceptions.TOMLKitError): + parser = configparser.ConfigParser() + parser.read(file_path) + self._cached_data = parser + self._cached_file = file_path + except (OSError, configparser.Error): return {} - # Navigate to section - data = self._cached_data - for section in self._section_path: - if isinstance(data, dict) and section in data: - data = data[section] - else: - return {} # Section doesn't exist + # Ensure we have cached data + if self._cached_data is None: + return {} - # Ensure data is a dictionary - if not isinstance(data, dict): + # Build the section name from section_path + # INI uses dot notation: connections.default becomes "connections.default" + section_name = ".".join(self._section_path) if self._section_path else None + + # Get the data from the appropriate section + data = {} + if section_name: + if self._cached_data.has_section(section_name): + data = dict(self._cached_data.items(section_name)) + else: + # Try to find subsections (e.g., if section_path is ["connections"]) + # Look for all sections starting with "connections." + if len(self._section_path) == 1: + base_section = self._section_path[0] + if self._cached_data.has_section(base_section): + data = dict(self._cached_data.items(base_section)) + + if not data: return {} # Extract and map keys @@ -337,17 +353,18 @@ def discover_from_file( if key is not None: # Reverse lookup: find SnowSQL key for CLI key snowsql_key = self._get_snowsql_key(key) - if snowsql_key in data: - raw = data[snowsql_key] - values[key] = ConfigValue( - key=key, # Normalized SnowCLI key - value=raw, - source_name=self.source_name, - priority=self.priority, - raw_value=f"{snowsql_key}={raw}" - if snowsql_key != key - else str(raw), - ) + # Check both original case and lowercase + for k in [snowsql_key, snowsql_key.lower()]: + if k in data: + raw = data[k] + values[key] = ConfigValue( + key=key, # Normalized SnowCLI key + value=raw, + source_name=self.source_name, + priority=self.priority, + raw_value=f"{k}={raw}" if k != key else str(raw), + ) + break else: for snowsql_key, value in data.items(): if not isinstance(snowsql_key, str): diff --git a/src/snowflake/cli/api/config_ng/resolution_logger.py b/src/snowflake/cli/api/config_ng/resolution_logger.py index d90ea4ff42..df88003a56 100644 --- a/src/snowflake/cli/api/config_ng/resolution_logger.py +++ b/src/snowflake/cli/api/config_ng/resolution_logger.py @@ -76,6 +76,13 @@ def show_resolution_chain(key: str) -> None: Args: key: Configuration key to show resolution for """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + + # Force configuration resolution to populate history + provider.read_config() + resolver = get_resolver() if resolver is None: @@ -95,6 +102,13 @@ def show_all_resolution_chains() -> None: This provides a complete overview of the configuration resolution process, showing how every configuration value was determined. """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + + # Force configuration resolution to populate history + provider.read_config() + resolver = get_resolver() if resolver is None: @@ -121,6 +135,13 @@ def get_resolution_summary() -> Optional[Dict]: None if resolution logging is not available """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + + # Force configuration resolution to populate history + provider.read_config() + resolver = get_resolver() if resolver is None: @@ -144,6 +165,13 @@ def export_resolution_history(output_path: Path) -> bool: Returns: True if export succeeded, False otherwise """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + + # Force configuration resolution to populate history + provider.read_config() + resolver = get_resolver() if resolver is None: @@ -210,6 +238,13 @@ def check_value_source(key: str) -> Optional[str]: Returns: Name of the source that provided the final value, or None if not found """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + + # Force configuration resolution to populate history + provider.read_config() + resolver = get_resolver() if resolver is None: @@ -233,6 +268,13 @@ def explain_configuration(key: Optional[str] = None, verbose: bool = False) -> N key: Specific key to explain, or None to explain all verbose: If True, show detailed resolution chains """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + + # Force configuration resolution to populate history + provider.read_config() + resolver = get_resolver() if resolver is None: diff --git a/tests/config_ng/test_configuration.py b/tests/config_ng/test_configuration.py index 2f78e672d5..87fcbbffbd 100644 --- a/tests/config_ng/test_configuration.py +++ b/tests/config_ng/test_configuration.py @@ -215,6 +215,7 @@ def test_resolution_history_tracking(self): assert len(history.entries) >= 2 # At least config file and CLI param # The selected entry should be from CLI + assert history.selected_entry assert history.selected_entry.config_value.source_name == "cli_arguments" def test_resolution_summary(self): diff --git a/tests/config_ng/test_file_handler_migration.py b/tests/config_ng/test_file_handler_migration.py index 49286b4827..a8837c131e 100644 --- a/tests/config_ng/test_file_handler_migration.py +++ b/tests/config_ng/test_file_handler_migration.py @@ -62,11 +62,11 @@ def test_pure_toml_configuration(self): def test_pure_snowsql_configuration(self): """Scenario: User has only SnowSQL configuration.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: f.write( "[connections]\n" - 'accountname = "snowsql_account"\n' - 'username = "snowsql_user"\n' + "accountname = snowsql_account\n" + "username = snowsql_user\n" ) f.flush() snowsql_path = Path(f.name) @@ -100,9 +100,9 @@ def test_partial_migration_toml_overrides_snowsql(self): with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: f2.write( "[connections]\n" - 'accountname = "old_account"\n' - 'username = "old_user"\n' - 'databasename = "old_db"\n' + "accountname = old_account\n" + "username = old_user\n" + "databasename = old_db\n" ) f2.flush() snowsql_path = Path(f2.name) @@ -131,28 +131,30 @@ def test_partial_migration_toml_overrides_snowsql(self): def test_handler_ordering_within_same_file(self): """Handler order matters when both can handle same file.""" + # Create a pure TOML file that both handlers could potentially read + # TomlFileHandler will read [default], SnowSqlConfigHandler will read [connections] with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - # File has both TOML format AND connections section + # Pure TOML format file with both sections f.write( '[default]\naccount = "toml_format"\n' - '[connections]\naccountname = "snowsql_format"\n' + '[connections]\naccount = "other_format"\n' ) f.flush() temp_path = Path(f.name) try: - # TOML handler first + # TOML handler first - should find account in [default] source = FileSource( file_paths=[temp_path], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + TomlFileHandler(section_path=["connections"]), ], ) values = source.discover() - # TOML handler should win (first handler) + # First TOML handler should win (reads [default]) assert values["account"].value == "toml_format" assert values["account"].source_name == "toml:default" finally: @@ -210,7 +212,7 @@ def test_complete_migration_timeline(self): """Simulates complete migration from SnowSQL to TOML.""" # Step 1: Pure SnowSQL user with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccountname = "account"\nusername = "user"\n') + f.write("[connections]\naccountname = account\nusername = user\n") f.flush() snowsql_path = Path(f.name) @@ -236,9 +238,7 @@ def test_complete_migration_timeline(self): toml_path = Path(f1.name) with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write( - '[connections]\naccountname = "old_account"\nusername = "old_user"\n' - ) + f2.write("[connections]\naccountname = old_account\nusername = old_user\n") f2.flush() snowsql_path = Path(f2.name) @@ -300,9 +300,7 @@ def test_discover_specific_key_with_migration(self): toml_path = Path(f1.name) with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write( - '[connections]\naccountname = "snowsql_account"\nusername = "user"\n' - ) + f2.write("[connections]\naccountname = snowsql_account\nusername = user\n") f2.flush() snowsql_path = Path(f2.name) @@ -346,9 +344,7 @@ def test_complex_configuration_with_all_features(self): with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f3: f3.write( - "[connections]\n" - 'accountname = "legacy_account"\n' - 'username = "legacy_user"\n' + "[connections]\naccountname = legacy_account\nusername = legacy_user\n" ) f3.flush() snowsql_config = Path(f3.name) diff --git a/tests/config_ng/test_resolver_integration.py b/tests/config_ng/test_resolver_integration.py index 3096fdb81e..d7801a4e44 100644 --- a/tests/config_ng/test_resolver_integration.py +++ b/tests/config_ng/test_resolver_integration.py @@ -93,10 +93,10 @@ def test_snowsql_to_snowcli_migration(self, tmp_path, monkeypatch): snowsql_config = tmp_path / "snowsql.toml" snowsql_config.write_text( "[connections]\n" - 'accountname = "old_account"\n' - 'username = "old_user"\n' - 'databasename = "old_db"\n' - 'warehousename = "old_warehouse"\n' + "accountname = old_account\n" + "username = old_user\n" + "databasename = old_db\n" + "warehousename = old_warehouse\n" ) # SnowCLI config (new, partial migration) diff --git a/tests/config_ng/test_snowsql_config_handler.py b/tests/config_ng/test_snowsql_config_handler.py index 0a2d705d59..617c3a0a87 100644 --- a/tests/config_ng/test_snowsql_config_handler.py +++ b/tests/config_ng/test_snowsql_config_handler.py @@ -48,8 +48,8 @@ def test_default_section_path(self): from pathlib import Path from tempfile import NamedTemporaryFile - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccount = "test"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\naccount = test\n") f.flush() temp_path = Path(f.name) @@ -66,8 +66,8 @@ def test_custom_section_path(self): from pathlib import Path from tempfile import NamedTemporaryFile - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\n[connections.prod]\naccount = "prod_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\n\n[connections.prod]\naccount = prod_account\n") f.flush() temp_path = Path(f.name) @@ -114,8 +114,8 @@ def test_discover_from_nonexistent_file(self): def test_key_mapping_accountname(self): """Should map accountname → account.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccountname = "my_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\naccountname = my_account\n") f.flush() temp_path = Path(f.name) @@ -133,8 +133,8 @@ def test_key_mapping_accountname(self): def test_key_mapping_username(self): """Should map username → user.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\nusername = "my_user"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\nusername = my_user\n") f.flush() temp_path = Path(f.name) @@ -149,8 +149,8 @@ def test_key_mapping_username(self): def test_key_mapping_multiple_database_keys(self): """Should map both dbname and databasename → database.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\ndatabasename = "my_db"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\ndatabasename = my_db\n") f.flush() temp_path = Path(f.name) @@ -164,12 +164,12 @@ def test_key_mapping_multiple_database_keys(self): def test_key_mapping_warehouse_schema_role(self): """Should map warehouse, schema, and role names.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: f.write( "[connections]\n" - 'warehousename = "my_wh"\n' - 'schemaname = "my_schema"\n' - 'rolename = "my_role"\n' + "warehousename = my_wh\n" + "schemaname = my_schema\n" + "rolename = my_role\n" ) f.flush() temp_path = Path(f.name) @@ -186,8 +186,8 @@ def test_key_mapping_warehouse_schema_role(self): def test_key_mapping_pwd_to_password(self): """Should map pwd → password (from env mappings).""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\npwd = "secret123"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\npwd = secret123\n") f.flush() temp_path = Path(f.name) @@ -203,8 +203,8 @@ def test_key_mapping_pwd_to_password(self): def test_unmapped_keys_passthrough(self): """Keys without mappings should pass through.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\ncustom_key = "custom_value"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\ncustom_key = custom_value\n") f.flush() temp_path = Path(f.name) @@ -218,16 +218,16 @@ def test_unmapped_keys_passthrough(self): def test_discover_all_common_keys(self): """Should discover all common SnowSQL keys with mapping.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: f.write( "[connections]\n" - 'accountname = "my_account"\n' - 'username = "my_user"\n' - 'pwd = "my_password"\n' - 'databasename = "my_db"\n' - 'schemaname = "my_schema"\n' - 'warehousename = "my_wh"\n' - 'rolename = "my_role"\n' + "accountname = my_account\n" + "username = my_user\n" + "pwd = my_password\n" + "databasename = my_db\n" + "schemaname = my_schema\n" + "warehousename = my_wh\n" + "rolename = my_role\n" ) f.flush() temp_path = Path(f.name) @@ -254,8 +254,8 @@ def test_discover_all_common_keys(self): def test_discover_specific_key(self): """Should discover specific key with mapping.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccountname = "my_account"\nusername = "my_user"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\naccountname = my_account\nusername = my_user\n") f.flush() temp_path = Path(f.name) @@ -271,8 +271,8 @@ def test_discover_specific_key(self): def test_discover_nonexistent_key(self): """Should return empty dict for nonexistent key.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccountname = "my_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\naccountname = my_account\n") f.flush() temp_path = Path(f.name) @@ -286,8 +286,8 @@ def test_discover_nonexistent_key(self): def test_discover_nonexistent_section(self): """Should return empty dict for nonexistent section.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('accountname = "my_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("accountname = my_account\n") f.flush() temp_path = Path(f.name) @@ -301,8 +301,8 @@ def test_discover_nonexistent_section(self): def test_values_have_correct_metadata(self): """Discovered values should have correct metadata.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccountname = "my_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\naccountname = my_account\n") f.flush() temp_path = Path(f.name) @@ -329,8 +329,8 @@ def test_supports_any_string_key(self): def test_reverse_mapping_for_specific_key_query(self): """Should use reverse mapping when querying specific key.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccountname = "my_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\naccountname = my_account\n") f.flush() temp_path = Path(f.name) @@ -355,8 +355,8 @@ def test_get_cli_key_method(self): def test_case_insensitive_key_mapping(self): """Key mappings should be case-insensitive.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\nAccountName = "my_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\nAccountName = my_account\n") f.flush() temp_path = Path(f.name) @@ -370,10 +370,10 @@ def test_case_insensitive_key_mapping(self): finally: temp_path.unlink() - def test_invalid_toml_returns_empty(self): - """Should handle invalid TOML gracefully.""" + def test_invalid_ini_returns_empty(self): + """Should handle invalid INI gracefully.""" with NamedTemporaryFile(mode="w", delete=False) as f: - f.write("invalid toml content [[[") + f.write("invalid ini content [[[") f.flush() temp_path = Path(f.name) @@ -387,8 +387,8 @@ def test_invalid_toml_returns_empty(self): def test_caching_behavior(self): """Should cache file data for performance.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\naccountname = "my_account"\n') + with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: + f.write("[connections]\naccountname = my_account\n") f.flush() temp_path = Path(f.name) diff --git a/tests/config_ng/test_snowsql_config_paths.py b/tests/config_ng/test_snowsql_config_paths.py index dc26c771a5..1885399223 100644 --- a/tests/config_ng/test_snowsql_config_paths.py +++ b/tests/config_ng/test_snowsql_config_paths.py @@ -171,7 +171,7 @@ def test_paths_work_with_file_source(self, tmp_path): snowsql_dir.mkdir() user_config = snowsql_dir / "config" user_config.write_text( - '[connections]\naccountname = "user_account"\nusername = "user"\n' + "[connections]\naccountname = user_account\nusername = user\n" ) # Get paths using helper @@ -204,8 +204,8 @@ def test_file_precedence_with_multiple_configs(self, tmp_path): user_config = snowsql_dir / "config" user_config.write_text( "[connections]\n" - 'accountname = "priority1_account"\n' - 'username = "priority1_user"\n' + "accountname = priority1_account\n" + "username = priority1_user\n" ) # Create another config in snowsql dir (should have lower priority) @@ -213,9 +213,9 @@ def test_file_precedence_with_multiple_configs(self, tmp_path): legacy_config = snowsql_dir / "legacy.toml" legacy_config.write_text( "[connections]\n" - 'accountname = "priority2_account"\n' - 'username = "priority2_user"\n' - 'databasename = "priority2_db"\n' + "accountname = priority2_account\n" + "username = priority2_user\n" + "databasename = priority2_db\n" ) # Manually specify paths to test precedence From c52e1415373b56389d98ec9b98d4d9eafebe0ac5 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 8 Oct 2025 15:18:07 +0200 Subject: [PATCH 13/78] SNOW-2306184: config refactory - drop tests for language functionality --- tests/config_ng/test_configuration_source.py | 53 +--------- tests/config_ng/test_value_source.py | 104 ------------------- 2 files changed, 1 insertion(+), 156 deletions(-) diff --git a/tests/config_ng/test_configuration_source.py b/tests/config_ng/test_configuration_source.py index 6f5800db16..2c2ef8c7bd 100644 --- a/tests/config_ng/test_configuration_source.py +++ b/tests/config_ng/test_configuration_source.py @@ -13,10 +13,9 @@ # limitations under the License. """ -Unit tests for ConfigurationSource abstract base class. +Unit tests for ConfigurationSource. Tests verify: -- Abstract class cannot be instantiated without implementing abstract methods - Handler ordering and precedence - Handler management (add, set, get) - Direct value precedence over handler values @@ -25,7 +24,6 @@ from typing import Any, Dict, Optional -import pytest from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority from snowflake.cli.api.config_ng.handlers import SourceHandler from snowflake.cli.api.config_ng.sources import ConfigurationSource @@ -79,55 +77,6 @@ def supports_key(self, key: str) -> bool: return key in self._data -class TestConfigurationSourceInterface: - """Test suite for ConfigurationSource abstract base class.""" - - def test_cannot_instantiate_abstract_class(self): - """Should not be able to instantiate ConfigurationSource directly.""" - with pytest.raises(TypeError): - ConfigurationSource() - - def test_must_implement_discover_direct(self): - """Concrete implementations must implement discover_direct method.""" - - class IncompleteSource(ConfigurationSource): - @property - def source_name(self) -> str: - return "test" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def supports_key(self, key: str) -> bool: - return True - - with pytest.raises(TypeError): - IncompleteSource() - - def test_complete_implementation(self): - """Should be able to instantiate with all methods implemented.""" - - class CompleteSource(ConfigurationSource): - @property - def source_name(self) -> str: - return "test_source" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def discover_direct(self, key=None) -> Dict[str, ConfigValue]: - return {} - - def supports_key(self, key: str) -> bool: - return True - - source = CompleteSource() - assert source.source_name == "test_source" - assert source.priority == SourcePriority.FILE - - class TestConfigurationSourceHandlers: """Test handler management in ConfigurationSource.""" diff --git a/tests/config_ng/test_value_source.py b/tests/config_ng/test_value_source.py index 0d65666b0e..16e2f5c6ac 100644 --- a/tests/config_ng/test_value_source.py +++ b/tests/config_ng/test_value_source.py @@ -16,117 +16,13 @@ Unit tests for ValueSource interface. Tests verify: -- Abstract interface cannot be instantiated -- All abstract methods must be implemented - Concrete implementations work correctly - Common protocol is enforced """ -import pytest from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority, ValueSource -class TestValueSourceInterface: - """Test suite for ValueSource abstract interface.""" - - def test_cannot_instantiate_abstract_class(self): - """Should not be able to instantiate ValueSource directly.""" - with pytest.raises(TypeError): - ValueSource() - - def test_must_implement_source_name(self): - """Concrete implementations must implement source_name property.""" - - class IncompleteSource(ValueSource): - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def discover(self, key=None): - return {} - - def supports_key(self, key: str) -> bool: - return True - - with pytest.raises(TypeError): - IncompleteSource() - - def test_must_implement_priority(self): - """Concrete implementations must implement priority property.""" - - class IncompleteSource(ValueSource): - @property - def source_name(self) -> str: - return "test" - - def discover(self, key=None): - return {} - - def supports_key(self, key: str) -> bool: - return True - - with pytest.raises(TypeError): - IncompleteSource() - - def test_must_implement_discover(self): - """Concrete implementations must implement discover method.""" - - class IncompleteSource(ValueSource): - @property - def source_name(self) -> str: - return "test" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def supports_key(self, key: str) -> bool: - return True - - with pytest.raises(TypeError): - IncompleteSource() - - def test_must_implement_supports_key(self): - """Concrete implementations must implement supports_key method.""" - - class IncompleteSource(ValueSource): - @property - def source_name(self) -> str: - return "test" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def discover(self, key=None): - return {} - - with pytest.raises(TypeError): - IncompleteSource() - - def test_complete_implementation(self): - """Should be able to instantiate with all methods implemented.""" - - class CompleteSource(ValueSource): - @property - def source_name(self) -> str: - return "test_source" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def discover(self, key=None): - return {} - - def supports_key(self, key: str) -> bool: - return True - - source = CompleteSource() - assert source.source_name == "test_source" - assert source.priority == SourcePriority.FILE - - class TestValueSourceConcreteImplementation: """Test a concrete implementation of ValueSource.""" From 6128802ad59f34926dadd55964b4d0434e044eff Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 8 Oct 2025 15:24:29 +0200 Subject: [PATCH 14/78] SNOW-2306184: config refactory - drop tests for language functionality p2 --- tests/config_ng/test_config_value.py | 86 ---------------------- tests/config_ng/test_resolution_history.py | 49 +----------- tests/config_ng/test_source_priority.py | 74 ------------------- 3 files changed, 1 insertion(+), 208 deletions(-) diff --git a/tests/config_ng/test_config_value.py b/tests/config_ng/test_config_value.py index ef650be4c3..e633acdd88 100644 --- a/tests/config_ng/test_config_value.py +++ b/tests/config_ng/test_config_value.py @@ -16,14 +16,12 @@ Unit tests for ConfigValue dataclass. Tests verify: -- Immutability (frozen dataclass) - Field values and types - Raw value preservation - Type conversions - Representation formatting """ -import pytest from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority @@ -61,58 +59,6 @@ def test_create_config_value_with_raw_value(self): assert isinstance(cv.value, int) assert isinstance(cv.raw_value, str) - def test_config_value_is_immutable(self): - """ConfigValue should be immutable (frozen dataclass).""" - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - with pytest.raises(Exception): - cv.key = "new_key" - - with pytest.raises(Exception): - cv.value = "new_value" - - with pytest.raises(Exception): - cv.source_name = "new_source" - - def test_config_value_equality(self): - """ConfigValue instances with same data should be equal.""" - cv1 = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - cv2 = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - assert cv1 == cv2 - - def test_config_value_inequality(self): - """ConfigValue instances with different data should not be equal.""" - cv1 = ConfigValue( - key="account", - value="account1", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - cv2 = ConfigValue( - key="account", - value="account2", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - assert cv1 != cv2 - def test_repr_without_conversion(self): """__repr__ should show value only when no conversion occurred.""" cv = ConfigValue( @@ -275,35 +221,3 @@ def test_priority_comparison(self): ) assert cv_high.priority.value < cv_low.priority.value - - def test_config_value_hash(self): - """ConfigValue should be hashable (frozen dataclass).""" - cv1 = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - cv2 = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - assert hash(cv1) == hash(cv2) - - config_set = {cv1, cv2} - assert len(config_set) == 1 - - def test_config_value_can_be_dict_key(self): - """ConfigValue should be usable as dictionary key.""" - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - test_dict = {cv: "some_data"} - assert test_dict[cv] == "some_data" diff --git a/tests/config_ng/test_resolution_history.py b/tests/config_ng/test_resolution_history.py index 74a4208d24..5d5c83237d 100644 --- a/tests/config_ng/test_resolution_history.py +++ b/tests/config_ng/test_resolution_history.py @@ -16,7 +16,7 @@ Unit tests for Resolution History tracking. Tests verify: -- ResolutionEntry immutability and fields +- ResolutionEntry fields - ResolutionHistory creation and properties - Resolution chain formatting - History export to dictionary @@ -25,7 +25,6 @@ from datetime import datetime -import pytest from snowflake.cli.api.config_ng.core import ( ConfigValue, ResolutionEntry, @@ -77,52 +76,6 @@ def test_create_entry_with_override(self): assert entry.was_used is False assert entry.overridden_by == "cli_arguments" - def test_resolution_entry_is_immutable(self): - """ResolutionEntry should be immutable (frozen dataclass).""" - config_value = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - entry = ResolutionEntry( - config_value=config_value, - timestamp=datetime.now(), - was_used=True, - ) - - with pytest.raises(Exception): - entry.was_used = False - - with pytest.raises(Exception): - entry.overridden_by = "someone" - - def test_resolution_entry_equality(self): - """ResolutionEntry instances with same data should be equal.""" - config_value = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - timestamp = datetime.now() - - entry1 = ResolutionEntry( - config_value=config_value, - timestamp=timestamp, - was_used=True, - ) - - entry2 = ResolutionEntry( - config_value=config_value, - timestamp=timestamp, - was_used=True, - ) - - assert entry1 == entry2 - class TestResolutionHistory: """Test suite for ResolutionHistory dataclass.""" diff --git a/tests/config_ng/test_source_priority.py b/tests/config_ng/test_source_priority.py index 37dcbb336f..fd86020c91 100644 --- a/tests/config_ng/test_source_priority.py +++ b/tests/config_ng/test_source_priority.py @@ -18,28 +18,14 @@ Tests verify: - Enum values are correctly defined - Priority ordering is correct (lower value = higher priority) -- Enum members have expected attributes """ -import pytest from snowflake.cli.api.config_ng.core import SourcePriority class TestSourcePriority: """Test suite for SourcePriority enum.""" - def test_enum_members_exist(self): - """All required enum members should exist.""" - assert hasattr(SourcePriority, "CLI_ARGUMENT") - assert hasattr(SourcePriority, "ENVIRONMENT") - assert hasattr(SourcePriority, "FILE") - - def test_enum_values_are_integers(self): - """All enum values should be integers.""" - assert isinstance(SourcePriority.CLI_ARGUMENT.value, int) - assert isinstance(SourcePriority.ENVIRONMENT.value, int) - assert isinstance(SourcePriority.FILE.value, int) - def test_cli_argument_has_highest_priority(self): """CLI_ARGUMENT should have the lowest numeric value (highest priority).""" assert SourcePriority.CLI_ARGUMENT.value == 1 @@ -69,63 +55,3 @@ def test_enum_comparison(self): assert sorted_priorities[0] == SourcePriority.CLI_ARGUMENT assert sorted_priorities[1] == SourcePriority.ENVIRONMENT assert sorted_priorities[2] == SourcePriority.FILE - - def test_enum_equality(self): - """Enum members should be equal to themselves.""" - assert SourcePriority.CLI_ARGUMENT == SourcePriority.CLI_ARGUMENT - assert SourcePriority.ENVIRONMENT == SourcePriority.ENVIRONMENT - assert SourcePriority.FILE == SourcePriority.FILE - - def test_enum_inequality(self): - """Different enum members should not be equal.""" - assert SourcePriority.CLI_ARGUMENT != SourcePriority.ENVIRONMENT - assert SourcePriority.ENVIRONMENT != SourcePriority.FILE - assert SourcePriority.CLI_ARGUMENT != SourcePriority.FILE - - def test_enum_has_name_attribute(self): - """Enum members should have a name attribute.""" - assert SourcePriority.CLI_ARGUMENT.name == "CLI_ARGUMENT" - assert SourcePriority.ENVIRONMENT.name == "ENVIRONMENT" - assert SourcePriority.FILE.name == "FILE" - - def test_enum_is_iterable(self): - """Should be able to iterate over enum members.""" - members = list(SourcePriority) - assert len(members) == 3 - assert SourcePriority.CLI_ARGUMENT in members - assert SourcePriority.ENVIRONMENT in members - assert SourcePriority.FILE in members - - def test_enum_can_be_accessed_by_name(self): - """Should be able to access enum members by name.""" - assert SourcePriority["CLI_ARGUMENT"] == SourcePriority.CLI_ARGUMENT - assert SourcePriority["ENVIRONMENT"] == SourcePriority.ENVIRONMENT - assert SourcePriority["FILE"] == SourcePriority.FILE - - def test_enum_can_be_accessed_by_value(self): - """Should be able to access enum members by value.""" - assert SourcePriority(1) == SourcePriority.CLI_ARGUMENT - assert SourcePriority(2) == SourcePriority.ENVIRONMENT - assert SourcePriority(3) == SourcePriority.FILE - - def test_invalid_value_raises_error(self): - """Accessing enum with invalid value should raise ValueError.""" - with pytest.raises(ValueError): - SourcePriority(99) - - def test_invalid_name_raises_error(self): - """Accessing enum with invalid name should raise KeyError.""" - with pytest.raises(KeyError): - SourcePriority["INVALID"] - - def test_enum_repr(self): - """Enum members should have a readable representation.""" - assert "SourcePriority.CLI_ARGUMENT" in repr(SourcePriority.CLI_ARGUMENT) - assert "SourcePriority.ENVIRONMENT" in repr(SourcePriority.ENVIRONMENT) - assert "SourcePriority.FILE" in repr(SourcePriority.FILE) - - def test_enum_str(self): - """Enum members should have a readable string representation.""" - assert "SourcePriority.CLI_ARGUMENT" in str(SourcePriority.CLI_ARGUMENT) - assert "SourcePriority.ENVIRONMENT" in str(SourcePriority.ENVIRONMENT) - assert "SourcePriority.FILE" in str(SourcePriority.FILE) From 5664e4625ad84c0855b822a3b6e50a9608408560 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 8 Oct 2025 15:42:11 +0200 Subject: [PATCH 15/78] SNOW-2306184: config refactory - cleanup source config handlers naming --- src/snowflake/cli/api/config_ng/__init__.py | 6 +- .../cli/api/config_ng/file_handlers.py | 65 +++++---- src/snowflake/cli/api/config_provider.py | 4 +- tests/config_ng/conftest.py | 4 +- .../config_ng/test_file_handler_migration.py | 18 +-- tests/config_ng/test_resolver_integration.py | 4 +- .../config_ng/test_snowsql_config_handler.py | 131 +++++++++--------- tests/config_ng/test_snowsql_config_paths.py | 10 +- 8 files changed, 130 insertions(+), 112 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index 7bd68c0234..7b6c1124f7 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -35,7 +35,8 @@ SnowSqlEnvHandler, ) from snowflake.cli.api.config_ng.file_handlers import ( - SnowSqlConfigHandler, + SNOWSQL_CONFIG_KEY_MAPPINGS, + IniFileHandler, TomlFileHandler, get_snowsql_config_paths, ) @@ -82,9 +83,10 @@ "show_all_resolution_chains", "show_resolution_chain", "SnowCliEnvHandler", - "SnowSqlConfigHandler", + "SNOWSQL_CONFIG_KEY_MAPPINGS", "SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS", "SnowSqlEnvHandler", + "IniFileHandler", "SourcePriority", "TomlFileHandler", "ValueSource", diff --git a/src/snowflake/cli/api/config_ng/file_handlers.py b/src/snowflake/cli/api/config_ng/file_handlers.py index 1240003fe3..93a4c865ec 100644 --- a/src/snowflake/cli/api/config_ng/file_handlers.py +++ b/src/snowflake/cli/api/config_ng/file_handlers.py @@ -17,13 +17,14 @@ This module implements handlers for: - TOML configuration files (SnowCLI format) -- SnowSQL configuration files (INI format with key mapping) +- INI configuration files (SnowSQL format with key mapping) """ from __future__ import annotations import configparser from pathlib import Path +from types import MappingProxyType from typing import Dict, List, Optional import tomlkit @@ -31,6 +32,20 @@ from snowflake.cli.api.config_ng.env_handlers import SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS from snowflake.cli.api.config_ng.handlers import SourceHandler +# Key mappings from SnowSQL to SnowCLI config keys (immutable) +SNOWSQL_CONFIG_KEY_MAPPINGS: MappingProxyType[str, str] = MappingProxyType( + { + **SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS, # Include env mappings (pwd → password) + "accountname": "account", + "username": "user", + "dbname": "database", + "databasename": "database", + "schemaname": "schema", + "warehousename": "warehouse", + "rolename": "role", + } +) + def get_snowsql_config_paths() -> List[Path]: """ @@ -202,10 +217,10 @@ def supports_key(self, key: str) -> bool: return isinstance(key, str) -class SnowSqlConfigHandler(SourceHandler): +class IniFileHandler(SourceHandler): """ - Handler for SnowSQL config files. - Format: INI format with SnowSQL-specific key naming. + Handler for INI format configuration files. + Supports SnowSQL-specific key naming and mappings. SnowSQL Multi-File Support: SnowSQL reads from multiple config file locations (system-wide, user home, etc.) @@ -232,42 +247,38 @@ class SnowSqlConfigHandler(SourceHandler): Example usage with multiple files: from snowflake.cli.api.config_ng import ( - FileSource, SnowSqlConfigHandler, get_snowsql_config_paths + FileSource, IniFileHandler, get_snowsql_config_paths ) + snowsql_config_handler = IniFileHandler(source_name="snowsql_config") source = FileSource( file_paths=get_snowsql_config_paths(), - handlers=[SnowSqlConfigHandler()] + handlers=[snowsql_config_handler] ) """ - # Key mappings from SnowSQL to SnowCLI (in addition to env mappings) - SNOWSQL_CONFIG_KEY_MAPPINGS: Dict[str, str] = { - **SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS, # Include env mappings (pwd → password) - "accountname": "account", - "username": "user", - "dbname": "database", - "databasename": "database", - "schemaname": "schema", - "warehousename": "warehouse", - "rolename": "role", - } - - def __init__(self, section_path: Optional[List[str]] = None): + def __init__( + self, + section_path: Optional[List[str]] = None, + source_name: str = "snowsql_config", + ): """ - Initialize with optional section path. + Initialize with optional section path and source name. Args: section_path: Path to section in config file Default: ["connections"] for SnowSQL compatibility + source_name: Name to identify this handler instance (e.g., "snowsql_config", "ini_config") + Default: "snowsql_config" for backward compatibility """ self._section_path = section_path or ["connections"] + self._source_name = source_name self._cached_data: Optional[configparser.ConfigParser] = None self._cached_file: Optional[Path] = None @property def source_name(self) -> str: - return "snowsql_config" + return self._source_name @property def priority(self) -> SourcePriority: @@ -275,7 +286,7 @@ def priority(self) -> SourcePriority: @property def handler_type(self) -> str: - return "snowsql_config" + return "ini" def can_handle(self) -> bool: """SnowSQL handler is always available.""" @@ -295,7 +306,7 @@ def can_handle_file(self, file_path: Path) -> bool: def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """Not directly called - file handlers use discover_from_file.""" raise NotImplementedError( - "SnowSqlConfigHandler requires file_path. Use discover_from_file() instead." + "IniFileHandler requires file_path. Use discover_from_file() instead." ) def discover_from_file( @@ -372,7 +383,7 @@ def discover_from_file( # Map to SnowCLI key (lowercase) snowsql_key_lower = snowsql_key.lower() - cli_key = self.SNOWSQL_CONFIG_KEY_MAPPINGS.get( + cli_key = SNOWSQL_CONFIG_KEY_MAPPINGS.get( snowsql_key_lower, snowsql_key_lower ) @@ -396,7 +407,7 @@ def supports_key(self, key: str) -> bool: def _get_snowsql_key(self, cli_key: str) -> str: """Reverse mapping: CLI key → SnowSQL key.""" - for snowsql_key, cli_mapped_key in self.SNOWSQL_CONFIG_KEY_MAPPINGS.items(): + for snowsql_key, cli_mapped_key in SNOWSQL_CONFIG_KEY_MAPPINGS.items(): if cli_mapped_key == cli_key: return snowsql_key return cli_key @@ -404,6 +415,4 @@ def _get_snowsql_key(self, cli_key: str) -> str: def get_cli_key(self, snowsql_key: str) -> str: """Forward mapping: SnowSQL key → CLI key.""" snowsql_key_lower = snowsql_key.lower() - return self.SNOWSQL_CONFIG_KEY_MAPPINGS.get( - snowsql_key_lower, snowsql_key_lower - ) + return SNOWSQL_CONFIG_KEY_MAPPINGS.get(snowsql_key_lower, snowsql_key_lower) diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index a7cc6f9f97..ad225431a7 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -150,8 +150,8 @@ def _ensure_initialized(self) -> None: ConfigurationResolver, EnvironmentSource, FileSource, + IniFileHandler, SnowCliEnvHandler, - SnowSqlConfigHandler, SnowSqlEnvHandler, TomlFileHandler, get_snowsql_config_paths, @@ -197,7 +197,7 @@ def _ensure_initialized(self) -> None: TomlFileHandler(section_path=["cli"]), TomlFileHandler(), # Root level # SnowSQL handler (tried last, fallback) - SnowSqlConfigHandler(), + IniFileHandler(source_name="snowsql_config"), ], ) diff --git a/tests/config_ng/conftest.py b/tests/config_ng/conftest.py index 09db791957..a90371a1c2 100644 --- a/tests/config_ng/conftest.py +++ b/tests/config_ng/conftest.py @@ -33,8 +33,8 @@ ConfigurationResolver, EnvironmentSource, FileSource, + IniFileHandler, SnowCliEnvHandler, - SnowSqlConfigHandler, SnowSqlEnvHandler, TomlFileHandler, ) @@ -356,7 +356,7 @@ def get_resolver(self) -> ConfigurationResolver: if self.snowsql_config_path and self.snowsql_config_path.exists(): file_paths.append(self.snowsql_config_path) file_handlers.append( - SnowSqlConfigHandler(section_path=["connections", self.connection_name]) + IniFileHandler(section_path=["connections", self.connection_name]) ) if file_paths: diff --git a/tests/config_ng/test_file_handler_migration.py b/tests/config_ng/test_file_handler_migration.py index a8837c131e..2a858a476d 100644 --- a/tests/config_ng/test_file_handler_migration.py +++ b/tests/config_ng/test_file_handler_migration.py @@ -26,7 +26,7 @@ from tempfile import NamedTemporaryFile from snowflake.cli.api.config_ng.file_handlers import ( - SnowSqlConfigHandler, + IniFileHandler, TomlFileHandler, ) from snowflake.cli.api.config_ng.sources import FileSource @@ -47,7 +47,7 @@ def test_pure_toml_configuration(self): file_paths=[toml_path], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) @@ -76,7 +76,7 @@ def test_pure_snowsql_configuration(self): file_paths=[snowsql_path], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) @@ -113,7 +113,7 @@ def test_partial_migration_toml_overrides_snowsql(self): file_paths=[toml_path, snowsql_path], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) @@ -132,7 +132,7 @@ def test_partial_migration_toml_overrides_snowsql(self): def test_handler_ordering_within_same_file(self): """Handler order matters when both can handle same file.""" # Create a pure TOML file that both handlers could potentially read - # TomlFileHandler will read [default], SnowSqlConfigHandler will read [connections] + # TomlFileHandler will read [default], IniFileHandler will read [connections] with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: # Pure TOML format file with both sections f.write( @@ -221,7 +221,7 @@ def test_complete_migration_timeline(self): file_paths=[snowsql_path], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) @@ -247,7 +247,7 @@ def test_complete_migration_timeline(self): file_paths=[toml_path, snowsql_path], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) @@ -309,7 +309,7 @@ def test_discover_specific_key_with_migration(self): file_paths=[toml_path, snowsql_path], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) @@ -355,7 +355,7 @@ def test_complex_configuration_with_all_features(self): handlers=[ TomlFileHandler(section_path=["connections"]), TomlFileHandler(section_path=["cli"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) diff --git a/tests/config_ng/test_resolver_integration.py b/tests/config_ng/test_resolver_integration.py index d7801a4e44..7641e213c5 100644 --- a/tests/config_ng/test_resolver_integration.py +++ b/tests/config_ng/test_resolver_integration.py @@ -27,7 +27,7 @@ SnowSqlEnvHandler, ) from snowflake.cli.api.config_ng.file_handlers import ( - SnowSqlConfigHandler, + IniFileHandler, TomlFileHandler, ) from snowflake.cli.api.config_ng.resolver import ConfigurationResolver @@ -117,7 +117,7 @@ def test_snowsql_to_snowcli_migration(self, tmp_path, monkeypatch): file_paths=[snowcli_config, snowsql_config], handlers=[ TomlFileHandler(section_path=["default"]), - SnowSqlConfigHandler(), + IniFileHandler(), ], ) diff --git a/tests/config_ng/test_snowsql_config_handler.py b/tests/config_ng/test_snowsql_config_handler.py index 617c3a0a87..cc5464d041 100644 --- a/tests/config_ng/test_snowsql_config_handler.py +++ b/tests/config_ng/test_snowsql_config_handler.py @@ -13,7 +13,7 @@ # limitations under the License. """ -Unit tests for SnowSqlConfigHandler. +Unit tests for IniFileHandler. Tests verify: - SnowSQL config file discovery @@ -28,19 +28,19 @@ import pytest from snowflake.cli.api.config_ng.core import SourcePriority -from snowflake.cli.api.config_ng.file_handlers import SnowSqlConfigHandler +from snowflake.cli.api.config_ng.file_handlers import IniFileHandler -class TestSnowSqlConfigHandler: - """Test suite for SnowSqlConfigHandler.""" +class TestIniFileHandler: + """Test suite for IniFileHandler.""" def test_create_handler(self): """Should create handler with correct properties.""" - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() - assert handler.source_name == "snowsql_config" - assert handler.priority == SourcePriority.FILE - assert handler.handler_type == "snowsql_config" + assert snowsql_config_handler.source_name == "snowsql_config" + assert snowsql_config_handler.priority == SourcePriority.FILE + assert snowsql_config_handler.handler_type == "ini" def test_default_section_path(self): """Should default to connections section.""" @@ -54,8 +54,8 @@ def test_default_section_path(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) # Should find value in [connections] section assert "account" in values finally: @@ -72,8 +72,10 @@ def test_custom_section_path(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler(section_path=["connections", "prod"]) - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler( + section_path=["connections", "prod"] + ) + values = snowsql_config_handler.discover_from_file(temp_path) # Should find value in custom section path assert values["account"].value == "prod_account" finally: @@ -81,34 +83,37 @@ def test_custom_section_path(self): def test_can_handle_always_true(self): """Should always return True.""" - handler = SnowSqlConfigHandler() - assert handler.can_handle() is True + snowsql_config_handler = IniFileHandler() + assert snowsql_config_handler.can_handle() is True def test_can_handle_snowsql_config_files(self): """Should detect SnowSQL config files.""" - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() # Typical SnowSQL config path - assert handler.can_handle_file(Path("~/.snowsql/config")) is True - assert handler.can_handle_file(Path("/home/user/.snowsql/config")) is True + assert snowsql_config_handler.can_handle_file(Path("~/.snowsql/config")) is True + assert ( + snowsql_config_handler.can_handle_file(Path("/home/user/.snowsql/config")) + is True + ) def test_can_handle_toml_files(self): """Should also handle .toml files.""" - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() - assert handler.can_handle_file(Path("config.toml")) is True + assert snowsql_config_handler.can_handle_file(Path("config.toml")) is True def test_discover_raises_not_implemented(self): """Should raise NotImplementedError for discover() without file_path.""" - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() with pytest.raises(NotImplementedError, match="requires file_path"): - handler.discover() + snowsql_config_handler.discover() def test_discover_from_nonexistent_file(self): """Should return empty dict for nonexistent file.""" - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(Path("/nonexistent/config")) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(Path("/nonexistent/config")) assert len(values) == 0 @@ -120,8 +125,8 @@ def test_key_mapping_accountname(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert len(values) == 1 assert "account" in values @@ -139,8 +144,8 @@ def test_key_mapping_username(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert values["user"].value == "my_user" assert values["user"].raw_value == "username=my_user" @@ -155,8 +160,8 @@ def test_key_mapping_multiple_database_keys(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert values["database"].value == "my_db" finally: @@ -175,8 +180,8 @@ def test_key_mapping_warehouse_schema_role(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert values["warehouse"].value == "my_wh" assert values["schema"].value == "my_schema" @@ -192,8 +197,8 @@ def test_key_mapping_pwd_to_password(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert "password" in values assert "pwd" not in values @@ -209,8 +214,8 @@ def test_unmapped_keys_passthrough(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert values["custom_key"].value == "custom_value" finally: @@ -233,8 +238,8 @@ def test_discover_all_common_keys(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert len(values) == 7 assert all( @@ -260,8 +265,8 @@ def test_discover_specific_key(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path, key="account") + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path, key="account") assert len(values) == 1 assert "account" in values @@ -277,8 +282,10 @@ def test_discover_nonexistent_key(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path, key="nonexistent") + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file( + temp_path, key="nonexistent" + ) assert len(values) == 0 finally: @@ -292,8 +299,8 @@ def test_discover_nonexistent_section(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() # Default section: connections - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() # Default section: connections + values = snowsql_config_handler.discover_from_file(temp_path) assert len(values) == 0 finally: @@ -307,8 +314,8 @@ def test_values_have_correct_metadata(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) config_value = values["account"] assert config_value.source_name == "snowsql_config" @@ -322,10 +329,10 @@ def test_values_have_correct_metadata(self): def test_supports_any_string_key(self): """Should support any string key.""" - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() - assert handler.supports_key("account") is True - assert handler.supports_key("any_key") is True + assert snowsql_config_handler.supports_key("account") is True + assert snowsql_config_handler.supports_key("any_key") is True def test_reverse_mapping_for_specific_key_query(self): """Should use reverse mapping when querying specific key.""" @@ -335,9 +342,9 @@ def test_reverse_mapping_for_specific_key_query(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() # Query for "account" should find "accountname" - values = handler.discover_from_file(temp_path, key="account") + values = snowsql_config_handler.discover_from_file(temp_path, key="account") assert len(values) == 1 assert values["account"].value == "my_account" @@ -346,12 +353,12 @@ def test_reverse_mapping_for_specific_key_query(self): def test_get_cli_key_method(self): """Should convert SnowSQL keys to CLI keys.""" - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() - assert handler.get_cli_key("accountname") == "account" - assert handler.get_cli_key("username") == "user" - assert handler.get_cli_key("pwd") == "password" - assert handler.get_cli_key("unmapped") == "unmapped" + assert snowsql_config_handler.get_cli_key("accountname") == "account" + assert snowsql_config_handler.get_cli_key("username") == "user" + assert snowsql_config_handler.get_cli_key("pwd") == "password" + assert snowsql_config_handler.get_cli_key("unmapped") == "unmapped" def test_case_insensitive_key_mapping(self): """Key mappings should be case-insensitive.""" @@ -361,8 +368,8 @@ def test_case_insensitive_key_mapping(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) # Should still map to "account" assert "account" in values @@ -378,8 +385,8 @@ def test_invalid_ini_returns_empty(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() - values = handler.discover_from_file(temp_path) + snowsql_config_handler = IniFileHandler() + values = snowsql_config_handler.discover_from_file(temp_path) assert len(values) == 0 finally: @@ -393,12 +400,12 @@ def test_caching_behavior(self): temp_path = Path(f.name) try: - handler = SnowSqlConfigHandler() + snowsql_config_handler = IniFileHandler() # First call loads file - values1 = handler.discover_from_file(temp_path) + values1 = snowsql_config_handler.discover_from_file(temp_path) # Second call uses cache - values2 = handler.discover_from_file(temp_path) + values2 = snowsql_config_handler.discover_from_file(temp_path) assert values1 == values2 # Verify caching by checking results are consistent diff --git a/tests/config_ng/test_snowsql_config_paths.py b/tests/config_ng/test_snowsql_config_paths.py index 1885399223..0bfd39cd5f 100644 --- a/tests/config_ng/test_snowsql_config_paths.py +++ b/tests/config_ng/test_snowsql_config_paths.py @@ -158,11 +158,11 @@ def test_system_configs_have_lower_priority_than_user(self, tmp_path): class TestSnowSqlConfigPathsIntegration: - """Integration tests with FileSource and SnowSqlConfigHandler.""" + """Integration tests with FileSource and IniFileHandler.""" def test_paths_work_with_file_source(self, tmp_path): """Paths should work correctly with FileSource.""" - from snowflake.cli.api.config_ng.file_handlers import SnowSqlConfigHandler + from snowflake.cli.api.config_ng.file_handlers import IniFileHandler from snowflake.cli.api.config_ng.sources import FileSource with patch("pathlib.Path.home", return_value=tmp_path): @@ -178,7 +178,7 @@ def test_paths_work_with_file_source(self, tmp_path): paths = get_snowsql_config_paths() # Create FileSource with these paths - source = FileSource(file_paths=paths, handlers=[SnowSqlConfigHandler()]) + source = FileSource(file_paths=paths, handlers=[IniFileHandler()]) values = source.discover() @@ -194,7 +194,7 @@ def test_file_precedence_with_multiple_configs(self, tmp_path): In FileSource: earlier files override later ones With reversed order: same effective behavior """ - from snowflake.cli.api.config_ng.file_handlers import SnowSqlConfigHandler + from snowflake.cli.api.config_ng.file_handlers import IniFileHandler from snowflake.cli.api.config_ng.sources import FileSource with patch("pathlib.Path.home", return_value=tmp_path): @@ -221,7 +221,7 @@ def test_file_precedence_with_multiple_configs(self, tmp_path): # Manually specify paths to test precedence paths = [user_config, legacy_config] - source = FileSource(file_paths=paths, handlers=[SnowSqlConfigHandler()]) + source = FileSource(file_paths=paths, handlers=[IniFileHandler()]) values = source.discover() From 2c89535848b969f5b52c2c1af926017020c6e267 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 8 Oct 2025 16:13:11 +0200 Subject: [PATCH 16/78] SNOW-2306184: config refactory - cleanup source ConfigValue creation --- src/snowflake/cli/api/config_ng/core.py | 33 +- .../cli/api/config_ng/env_handlers.py | 26 +- tests/config_ng/test_configuration_source.py | 312 ------------------ 3 files changed, 44 insertions(+), 327 deletions(-) delete mode 100644 tests/config_ng/test_configuration_source.py diff --git a/src/snowflake/cli/api/config_ng/core.py b/src/snowflake/cli/api/config_ng/core.py index e22f48e270..73dc678444 100644 --- a/src/snowflake/cli/api/config_ng/core.py +++ b/src/snowflake/cli/api/config_ng/core.py @@ -28,7 +28,7 @@ from dataclasses import dataclass, field from datetime import datetime from enum import Enum -from typing import Any, Dict, List, Optional +from typing import Any, Callable, Dict, List, Optional class SourcePriority(Enum): @@ -62,6 +62,37 @@ def __repr__(self) -> str: value_display = f"{self.raw_value} → {self.value}" return f"ConfigValue({self.key}={value_display}, from {self.source_name})" + @classmethod + def from_source( + cls, + key: str, + raw_value: str, + source_name: str, + priority: SourcePriority, + value_parser: Optional[Callable[[str], Any]] = None, + ) -> ConfigValue: + """ + Factory method to create ConfigValue from a source handler. + + Args: + key: Configuration key + raw_value: Raw string value from the source + source_name: Name of the configuration source + priority: Source priority level + value_parser: Optional parser function; if None, raw_value is used as-is + + Returns: + ConfigValue instance with parsed value + """ + parsed_value = value_parser(raw_value) if value_parser else raw_value + return cls( + key=key, + value=parsed_value, + source_name=source_name, + priority=priority, + raw_value=raw_value, + ) + class ValueSource(ABC): """ diff --git a/src/snowflake/cli/api/config_ng/env_handlers.py b/src/snowflake/cli/api/config_ng/env_handlers.py index 9daad47117..9dca0cf295 100644 --- a/src/snowflake/cli/api/config_ng/env_handlers.py +++ b/src/snowflake/cli/api/config_ng/env_handlers.py @@ -68,25 +68,24 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: # Discover specific key env_key = f"{self.PREFIX}{key.upper()}" if env_key in os.environ: - raw = os.environ[env_key] - values[key] = ConfigValue( + values[key] = ConfigValue.from_source( key=key, - value=self._parse_value(raw), + raw_value=os.environ[env_key], source_name=self.source_name, priority=self.priority, - raw_value=raw, + value_parser=self._parse_value, ) else: # Discover all SNOWFLAKE_* variables for env_key, env_value in os.environ.items(): if env_key.startswith(self.PREFIX): config_key = env_key[len(self.PREFIX) :].lower() - values[config_key] = ConfigValue( + values[config_key] = ConfigValue.from_source( key=config_key, - value=self._parse_value(env_value), + raw_value=env_value, source_name=self.source_name, priority=self.priority, - raw_value=env_value, + value_parser=self._parse_value, ) return values @@ -162,13 +161,12 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: env_key = f"{self.PREFIX}{snowsql_key.upper()}" if env_key in os.environ: - raw = os.environ[env_key] - values[key] = ConfigValue( + values[key] = ConfigValue.from_source( key=key, # Normalized SnowCLI key - value=self._parse_value(raw), + raw_value=os.environ[env_key], source_name=self.source_name, priority=self.priority, - raw_value=raw, + value_parser=self._parse_value, ) else: # Discover all SNOWSQL_* variables @@ -178,12 +176,12 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: # Map to SnowCLI key config_key = self.KEY_MAPPINGS.get(snowsql_key, snowsql_key) - values[config_key] = ConfigValue( + values[config_key] = ConfigValue.from_source( key=config_key, - value=self._parse_value(env_value), + raw_value=env_value, source_name=self.source_name, priority=self.priority, - raw_value=env_value, + value_parser=self._parse_value, ) return values diff --git a/tests/config_ng/test_configuration_source.py b/tests/config_ng/test_configuration_source.py deleted file mode 100644 index 2c2ef8c7bd..0000000000 --- a/tests/config_ng/test_configuration_source.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for ConfigurationSource. - -Tests verify: -- Handler ordering and precedence -- Handler management (add, set, get) -- Direct value precedence over handler values -- Handler failure handling -""" - -from typing import Any, Dict, Optional - -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority -from snowflake.cli.api.config_ng.handlers import SourceHandler -from snowflake.cli.api.config_ng.sources import ConfigurationSource - - -class MockHandler(SourceHandler): - """Mock handler for testing.""" - - def __init__(self, data: Dict[str, Any], name: str = "mock_handler"): - self._data = data - self._name = name - - @property - def source_name(self) -> str: - return self._name - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - @property - def handler_type(self) -> str: - return "mock" - - def can_handle(self) -> bool: - return True - - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - if key is None: - return { - k: ConfigValue( - key=k, - value=v, - source_name=self.source_name, - priority=self.priority, - ) - for k, v in self._data.items() - } - elif key in self._data: - return { - key: ConfigValue( - key=key, - value=self._data[key], - source_name=self.source_name, - priority=self.priority, - ) - } - return {} - - def supports_key(self, key: str) -> bool: - return key in self._data - - -class TestConfigurationSourceHandlers: - """Test handler management in ConfigurationSource.""" - - class TestSource(ConfigurationSource): - """Test implementation of ConfigurationSource.""" - - def __init__(self, direct_values=None, handlers=None): - super().__init__(handlers=handlers) - self._direct_values = direct_values or {} - - @property - def source_name(self) -> str: - return "test_source" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def discover_direct(self, key=None) -> Dict[str, ConfigValue]: - if key is None: - return { - k: ConfigValue( - key=k, - value=v, - source_name=self.source_name, - priority=self.priority, - ) - for k, v in self._direct_values.items() - } - elif key in self._direct_values: - return { - key: ConfigValue( - key=key, - value=self._direct_values[key], - source_name=self.source_name, - priority=self.priority, - ) - } - return {} - - def supports_key(self, key: str) -> bool: - return key in self._direct_values or any( - h.supports_key(key) for h in self._handlers - ) - - def test_initialize_with_no_handlers(self): - """Should initialize with empty handler list.""" - source = self.TestSource() - assert len(source.get_handlers()) == 0 - - def test_initialize_with_handlers(self): - """Should initialize with provided handlers.""" - handler1 = MockHandler({"key1": "value1"}, "handler1") - handler2 = MockHandler({"key2": "value2"}, "handler2") - - source = self.TestSource(handlers=[handler1, handler2]) - handlers = source.get_handlers() - - assert len(handlers) == 2 - assert handlers[0] == handler1 - assert handlers[1] == handler2 - - def test_handler_ordering_first_wins(self): - """First handler with value should win for same key.""" - handler1 = MockHandler({"account": "handler1_account"}, "handler1") - handler2 = MockHandler({"account": "handler2_account"}, "handler2") - - source = self.TestSource(handlers=[handler1, handler2]) - values = source.discover(key="account") - - assert values["account"].value == "handler1_account" - assert values["account"].source_name == "handler1" - - def test_handlers_complement_each_other(self): - """Handlers should provide different keys.""" - handler1 = MockHandler({"key1": "value1"}, "handler1") - handler2 = MockHandler({"key2": "value2"}, "handler2") - - source = self.TestSource(handlers=[handler1, handler2]) - values = source.discover() - - assert len(values) == 2 - assert values["key1"].value == "value1" - assert values["key2"].value == "value2" - - def test_direct_values_override_handlers(self): - """Direct values should take precedence over handler values.""" - handler = MockHandler({"account": "handler_account"}, "handler") - direct_values = {"account": "direct_account"} - - source = self.TestSource(direct_values=direct_values, handlers=[handler]) - values = source.discover(key="account") - - assert values["account"].value == "direct_account" - assert values["account"].source_name == "test_source" - - def test_discover_all_values_from_handlers(self): - """Should discover all values when key is None.""" - handler1 = MockHandler({"key1": "value1", "key2": "value2"}, "handler1") - handler2 = MockHandler({"key3": "value3"}, "handler2") - - source = self.TestSource(handlers=[handler1, handler2]) - values = source.discover() - - assert len(values) == 3 - assert "key1" in values - assert "key2" in values - assert "key3" in values - - def test_discover_specific_key_from_handlers(self): - """Should discover specific key when provided.""" - handler = MockHandler({"key1": "value1", "key2": "value2"}, "handler") - - source = self.TestSource(handlers=[handler]) - values = source.discover(key="key1") - - assert len(values) == 1 - assert "key1" in values - assert values["key1"].value == "value1" - - def test_handler_failure_does_not_break_discovery(self): - """Failed handler should not prevent other handlers from working.""" - - class FailingHandler(SourceHandler): - @property - def source_name(self) -> str: - return "failing" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - @property - def handler_type(self) -> str: - return "failing" - - def can_handle(self) -> bool: - return True - - def discover(self, key=None): - raise RuntimeError("Handler failed") - - def supports_key(self, key: str) -> bool: - return True - - failing = FailingHandler() - working = MockHandler({"key1": "value1"}, "working") - - source = self.TestSource(handlers=[failing, working]) - values = source.discover() - - # Should still get value from working handler - assert len(values) == 1 - assert values["key1"].value == "value1" - - def test_add_handler_append(self): - """Should append handler to end of list.""" - handler1 = MockHandler({"key1": "value1"}, "handler1") - handler2 = MockHandler({"key2": "value2"}, "handler2") - - source = self.TestSource(handlers=[handler1]) - source.add_handler(handler2) - - handlers = source.get_handlers() - assert len(handlers) == 2 - assert handlers[1] == handler2 - - def test_add_handler_prepend(self): - """Should prepend handler to beginning of list.""" - handler1 = MockHandler({"key1": "value1"}, "handler1") - handler2 = MockHandler({"key2": "value2"}, "handler2") - - source = self.TestSource(handlers=[handler1]) - source.add_handler(handler2, position=0) - - handlers = source.get_handlers() - assert len(handlers) == 2 - assert handlers[0] == handler2 - - def test_add_handler_at_position(self): - """Should insert handler at specific position.""" - handler1 = MockHandler({"key1": "value1"}, "handler1") - handler2 = MockHandler({"key2": "value2"}, "handler2") - handler3 = MockHandler({"key3": "value3"}, "handler3") - - source = self.TestSource(handlers=[handler1, handler3]) - source.add_handler(handler2, position=1) - - handlers = source.get_handlers() - assert len(handlers) == 3 - assert handlers[1] == handler2 - - def test_set_handlers(self): - """Should replace all handlers with new list.""" - handler1 = MockHandler({"key1": "value1"}, "handler1") - handler2 = MockHandler({"key2": "value2"}, "handler2") - handler3 = MockHandler({"key3": "value3"}, "handler3") - - source = self.TestSource(handlers=[handler1, handler2]) - source.set_handlers([handler3]) - - handlers = source.get_handlers() - assert len(handlers) == 1 - assert handlers[0] == handler3 - - def test_get_handlers_returns_copy(self): - """get_handlers should return a copy, not the original list.""" - handler = MockHandler({"key1": "value1"}, "handler1") - source = self.TestSource(handlers=[handler]) - - handlers = source.get_handlers() - handlers.clear() - - # Original list should be unchanged - assert len(source.get_handlers()) == 1 - - def test_empty_handlers_returns_direct_values_only(self): - """With no handlers, should return only direct values.""" - direct_values = {"account": "direct_account"} - source = self.TestSource(direct_values=direct_values, handlers=[]) - - values = source.discover() - - assert len(values) == 1 - assert values["account"].value == "direct_account" - - def test_supports_key_checks_handlers(self): - """supports_key should check handlers.""" - handler = MockHandler({"key1": "value1"}, "handler") - source = self.TestSource(handlers=[handler]) - - assert source.supports_key("key1") is True - assert source.supports_key("nonexistent") is False From cc63fd74132696646d81baef5aaa29ab25bec101 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 8 Oct 2025 17:17:10 +0200 Subject: [PATCH 17/78] SNOW-2306184: config refactor - simplify abstractions --- src/snowflake/cli/api/config_ng/__init__.py | 44 +- src/snowflake/cli/api/config_ng/core.py | 31 +- .../cli/api/config_ng/env_handlers.py | 219 ------- .../cli/api/config_ng/file_handlers.py | 418 ------------- src/snowflake/cli/api/config_ng/handlers.py | 84 --- src/snowflake/cli/api/config_ng/resolver.py | 49 +- src/snowflake/cli/api/config_ng/sources.py | 575 +++++++++++------- src/snowflake/cli/api/config_provider.py | 83 +-- tests/config_ng/conftest.py | 93 +-- tests/config_ng/test_cli_argument_source.py | 207 ------- tests/config_ng/test_config_value.py | 63 +- .../config_ng/test_configuration_resolver.py | 441 -------------- tests/config_ng/test_env_handler_migration.py | 321 ---------- tests/config_ng/test_environment_source.py | 300 --------- .../config_ng/test_file_handler_migration.py | 378 ------------ tests/config_ng/test_file_source.py | 463 -------------- tests/config_ng/test_resolution_history.py | 482 --------------- .../test_resolution_history_tracker.py | 431 ------------- tests/config_ng/test_resolver_integration.py | 377 ------------ tests/config_ng/test_snowcli_env_handler.py | 287 --------- .../config_ng/test_snowsql_config_handler.py | 413 ------------- tests/config_ng/test_snowsql_config_paths.py | 233 ------- tests/config_ng/test_snowsql_env_handler.py | 309 ---------- tests/config_ng/test_source_priority.py | 57 -- tests/config_ng/test_toml_file_handler.py | 308 ---------- tests/config_ng/test_value_source.py | 191 ------ 26 files changed, 506 insertions(+), 6351 deletions(-) delete mode 100644 src/snowflake/cli/api/config_ng/env_handlers.py delete mode 100644 src/snowflake/cli/api/config_ng/file_handlers.py delete mode 100644 src/snowflake/cli/api/config_ng/handlers.py delete mode 100644 tests/config_ng/test_cli_argument_source.py delete mode 100644 tests/config_ng/test_configuration_resolver.py delete mode 100644 tests/config_ng/test_env_handler_migration.py delete mode 100644 tests/config_ng/test_environment_source.py delete mode 100644 tests/config_ng/test_file_handler_migration.py delete mode 100644 tests/config_ng/test_file_source.py delete mode 100644 tests/config_ng/test_resolution_history.py delete mode 100644 tests/config_ng/test_resolution_history_tracker.py delete mode 100644 tests/config_ng/test_resolver_integration.py delete mode 100644 tests/config_ng/test_snowcli_env_handler.py delete mode 100644 tests/config_ng/test_snowsql_config_handler.py delete mode 100644 tests/config_ng/test_snowsql_config_paths.py delete mode 100644 tests/config_ng/test_snowsql_env_handler.py delete mode 100644 tests/config_ng/test_source_priority.py delete mode 100644 tests/config_ng/test_toml_file_handler.py delete mode 100644 tests/config_ng/test_value_source.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index 7b6c1124f7..5d8c8442d3 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -15,8 +15,8 @@ """ Enhanced Configuration System - Next Generation (NG) -This package implements a layered, extensible configuration system with: -- Clear precedence rules (CLI > Environment > Files) +This package implements a simple, extensible configuration system with: +- List-order precedence (explicit ordering in source list) - Migration support (SnowCLI and SnowSQL compatibility) - Complete resolution history tracking - Read-only, immutable configuration sources @@ -26,20 +26,8 @@ ConfigValue, ResolutionEntry, ResolutionHistory, - SourcePriority, ValueSource, ) -from snowflake.cli.api.config_ng.env_handlers import ( - SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS, - SnowCliEnvHandler, - SnowSqlEnvHandler, -) -from snowflake.cli.api.config_ng.file_handlers import ( - SNOWSQL_CONFIG_KEY_MAPPINGS, - IniFileHandler, - TomlFileHandler, - get_snowsql_config_paths, -) from snowflake.cli.api.config_ng.resolution_logger import ( check_value_source, explain_configuration, @@ -56,38 +44,34 @@ ResolutionHistoryTracker, ) from snowflake.cli.api.config_ng.sources import ( - CliArgumentSource, - ConfigurationSource, - EnvironmentSource, - FileSource, + CliConfigFile, + CliEnvironment, + CliParameters, + ConnectionsConfigFile, + SnowSQLConfigFile, + SnowSQLEnvironment, ) __all__ = [ "check_value_source", - "CliArgumentSource", + "CliConfigFile", + "CliEnvironment", + "CliParameters", "ConfigurationResolver", - "ConfigurationSource", "ConfigValue", - "EnvironmentSource", + "ConnectionsConfigFile", "explain_configuration", "export_resolution_history", - "FileSource", "format_summary_for_display", "get_resolution_summary", "get_resolver", - "get_snowsql_config_paths", "is_resolution_logging_available", "ResolutionEntry", "ResolutionHistory", "ResolutionHistoryTracker", "show_all_resolution_chains", "show_resolution_chain", - "SnowCliEnvHandler", - "SNOWSQL_CONFIG_KEY_MAPPINGS", - "SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS", - "SnowSqlEnvHandler", - "IniFileHandler", - "SourcePriority", - "TomlFileHandler", + "SnowSQLConfigFile", + "SnowSQLEnvironment", "ValueSource", ] diff --git a/src/snowflake/cli/api/config_ng/core.py b/src/snowflake/cli/api/config_ng/core.py index 73dc678444..e7969fd14a 100644 --- a/src/snowflake/cli/api/config_ng/core.py +++ b/src/snowflake/cli/api/config_ng/core.py @@ -16,7 +16,6 @@ Core abstractions for the enhanced configuration system. This module implements the foundational data structures and interfaces: -- SourcePriority: Defines precedence levels - ConfigValue: Immutable value container with provenance - ValueSource: Common protocol for all configuration sources - ResolutionHistory: Tracks the complete resolution process @@ -27,21 +26,9 @@ from abc import ABC, abstractmethod from dataclasses import dataclass, field from datetime import datetime -from enum import Enum from typing import Any, Callable, Dict, List, Optional -class SourcePriority(Enum): - """ - Defines top-level precedence for configuration sources. - Lower numeric value = higher priority. - """ - - CLI_ARGUMENT = 1 # Highest: command-line arguments - ENVIRONMENT = 2 # Medium: environment variables - FILE = 3 # Lowest: configuration files - - @dataclass(frozen=True) class ConfigValue: """ @@ -52,7 +39,6 @@ class ConfigValue: key: str value: Any source_name: str - priority: SourcePriority raw_value: Optional[Any] = None def __repr__(self) -> str: @@ -68,17 +54,15 @@ def from_source( key: str, raw_value: str, source_name: str, - priority: SourcePriority, value_parser: Optional[Callable[[str], Any]] = None, ) -> ConfigValue: """ - Factory method to create ConfigValue from a source handler. + Factory method to create ConfigValue from a source. Args: key: Configuration key raw_value: Raw string value from the source source_name: Name of the configuration source - priority: Source priority level value_parser: Optional parser function; if None, raw_value is used as-is Returns: @@ -89,15 +73,15 @@ def from_source( key=key, value=parsed_value, source_name=source_name, - priority=priority, raw_value=raw_value, ) class ValueSource(ABC): """ - Common interface for all configuration sources and handlers. + Common interface for all configuration sources. All implementations are READ-ONLY discovery mechanisms. + Precedence is determined by the order sources are provided to the resolver. """ @property @@ -105,16 +89,10 @@ class ValueSource(ABC): def source_name(self) -> str: """ Unique identifier for this source. - Examples: "cli_arguments", "snowflake_cli_env", "toml:connections" + Examples: "cli_arguments", "snowsql_config", "cli_env" """ ... - @property - @abstractmethod - def priority(self) -> SourcePriority: - """Top-level priority for this source.""" - ... - @abstractmethod def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ @@ -241,7 +219,6 @@ def to_dict(self) -> dict: "source": entry.config_value.source_name, "value": entry.config_value.value, "raw_value": entry.config_value.raw_value, - "priority": entry.config_value.priority.name, "was_used": entry.was_used, "overridden_by": entry.overridden_by, "timestamp": entry.timestamp.isoformat(), diff --git a/src/snowflake/cli/api/config_ng/env_handlers.py b/src/snowflake/cli/api/config_ng/env_handlers.py deleted file mode 100644 index 9dca0cf295..0000000000 --- a/src/snowflake/cli/api/config_ng/env_handlers.py +++ /dev/null @@ -1,219 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Environment variable handlers for configuration system. - -This module implements handlers for: -- SNOWFLAKE_* environment variables (SnowCLI format) -- SNOWSQL_* environment variables (Legacy SnowSQL format with key mapping) -""" - -from __future__ import annotations - -import os -from typing import Any, Dict, Optional - -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority -from snowflake.cli.api.config_ng.handlers import SourceHandler - -# Key mappings from SnowSQL to SnowCLI naming conventions -# These mappings are used by handlers that need to support SnowSQL compatibility -SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS: Dict[str, str] = { - "pwd": "password", -} - - -class SnowCliEnvHandler(SourceHandler): - """ - Handler for Snowflake CLI environment variables. - Format: SNOWFLAKE_ → key - Example: SNOWFLAKE_ACCOUNT → account - """ - - PREFIX = "SNOWFLAKE_" - - @property - def source_name(self) -> str: - return "snowflake_cli_env" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.ENVIRONMENT - - @property - def handler_type(self) -> str: - return "snowflake_cli_env" - - def can_handle(self) -> bool: - """Check if any SNOWFLAKE_* env vars are set.""" - return any(k.startswith(self.PREFIX) for k in os.environ) - - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - """Discover values from SNOWFLAKE_* environment variables.""" - values = {} - - if key is not None: - # Discover specific key - env_key = f"{self.PREFIX}{key.upper()}" - if env_key in os.environ: - values[key] = ConfigValue.from_source( - key=key, - raw_value=os.environ[env_key], - source_name=self.source_name, - priority=self.priority, - value_parser=self._parse_value, - ) - else: - # Discover all SNOWFLAKE_* variables - for env_key, env_value in os.environ.items(): - if env_key.startswith(self.PREFIX): - config_key = env_key[len(self.PREFIX) :].lower() - values[config_key] = ConfigValue.from_source( - key=config_key, - raw_value=env_value, - source_name=self.source_name, - priority=self.priority, - value_parser=self._parse_value, - ) - - return values - - def supports_key(self, key: str) -> bool: - """Any string key can be represented as SNOWFLAKE_* env var.""" - return isinstance(key, str) - - def _parse_value(self, value: str) -> Any: - """ - Parse string value to appropriate type. - Supports: boolean, integer, string - """ - # Boolean - case-insensitive - lower_val = value.lower() - if lower_val in ("true", "1", "yes", "on"): - return True - if lower_val in ("false", "0", "no", "off"): - return False - - # Integer - try: - return int(value) - except ValueError: - pass - - # String (default) - return value - - -class SnowSqlEnvHandler(SourceHandler): - """ - Handler for SnowSQL-compatible environment variables. - Format: SNOWSQL_ → key - Supports key mappings for SnowSQL-specific naming. - - Key Mappings (SnowSQL → SnowCLI): - - PWD → password - - All other keys map directly (ACCOUNT → account, USER → user, etc.) - """ - - PREFIX = "SNOWSQL_" - - # Reference to module-level key mappings - KEY_MAPPINGS = SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS - - @property - def source_name(self) -> str: - return "snowsql_env" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.ENVIRONMENT - - @property - def handler_type(self) -> str: - return "snowsql_env" - - def can_handle(self) -> bool: - """Check if any SNOWSQL_* env vars are set.""" - return any(k.startswith(self.PREFIX) for k in os.environ) - - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - """ - Discover values from SNOWSQL_* environment variables. - Applies key mappings for compatibility. - """ - values = {} - - if key is not None: - # Reverse lookup: find SnowSQL key for CLI key - snowsql_key = self.get_snowsql_key(key) - env_key = f"{self.PREFIX}{snowsql_key.upper()}" - - if env_key in os.environ: - values[key] = ConfigValue.from_source( - key=key, # Normalized SnowCLI key - raw_value=os.environ[env_key], - source_name=self.source_name, - priority=self.priority, - value_parser=self._parse_value, - ) - else: - # Discover all SNOWSQL_* variables - for env_key, env_value in os.environ.items(): - if env_key.startswith(self.PREFIX): - snowsql_key = env_key[len(self.PREFIX) :].lower() - # Map to SnowCLI key - config_key = self.KEY_MAPPINGS.get(snowsql_key, snowsql_key) - - values[config_key] = ConfigValue.from_source( - key=config_key, - raw_value=env_value, - source_name=self.source_name, - priority=self.priority, - value_parser=self._parse_value, - ) - - return values - - def supports_key(self, key: str) -> bool: - """Any string key can be represented as SNOWSQL_* env var.""" - return isinstance(key, str) - - def get_snowsql_key(self, cli_key: str) -> str: - """Reverse mapping: CLI key → SnowSQL key.""" - for snowsql_key, cli_mapped_key in self.KEY_MAPPINGS.items(): - if cli_mapped_key == cli_key: - return snowsql_key - return cli_key - - def _parse_value(self, value: str) -> Any: - """ - Parse string value to appropriate type. - Supports: boolean, integer, string - """ - # Boolean - case-insensitive - lower_val = value.lower() - if lower_val in ("true", "1", "yes", "on"): - return True - if lower_val in ("false", "0", "no", "off"): - return False - - # Integer - try: - return int(value) - except ValueError: - pass - - # String (default) - return value diff --git a/src/snowflake/cli/api/config_ng/file_handlers.py b/src/snowflake/cli/api/config_ng/file_handlers.py deleted file mode 100644 index 93a4c865ec..0000000000 --- a/src/snowflake/cli/api/config_ng/file_handlers.py +++ /dev/null @@ -1,418 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -File format handlers for configuration system. - -This module implements handlers for: -- TOML configuration files (SnowCLI format) -- INI configuration files (SnowSQL format with key mapping) -""" - -from __future__ import annotations - -import configparser -from pathlib import Path -from types import MappingProxyType -from typing import Dict, List, Optional - -import tomlkit -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority -from snowflake.cli.api.config_ng.env_handlers import SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS -from snowflake.cli.api.config_ng.handlers import SourceHandler - -# Key mappings from SnowSQL to SnowCLI config keys (immutable) -SNOWSQL_CONFIG_KEY_MAPPINGS: MappingProxyType[str, str] = MappingProxyType( - { - **SNOWSQL_TO_SNOWCLI_KEY_MAPPINGS, # Include env mappings (pwd → password) - "accountname": "account", - "username": "user", - "dbname": "database", - "databasename": "database", - "schemaname": "schema", - "warehousename": "warehouse", - "rolename": "role", - } -) - - -def get_snowsql_config_paths() -> List[Path]: - """ - Get standard SnowSQL configuration file paths in FileSource precedence order. - - SnowSQL reads config files where "last one wins" (later files override earlier ones). - Our FileSource uses "first one wins" (earlier files override later ones). - - This function returns paths in REVERSE order of SnowSQL's CNF_FILES to maintain - compatibility with SnowSQL's precedence behavior. - - SnowSQL precedence (lowest to highest): - 1. Bundled default config - 2. System-wide configs (/etc/snowsql.cnf, /etc/snowflake/snowsql.cnf, /usr/local/etc/snowsql.cnf) - 3. User home config (~/.snowsql.cnf) - 4. User .snowsql directory config (~/.snowsql/config) - 5. RPM config (/usr/lib64/snowflake/snowsql/config) - if exists - - Returns: - List of Path objects in FileSource precedence order (highest to lowest priority). - Only includes paths that exist on the filesystem. - """ - home_dir = Path.home() - - # Define paths in FileSource order (first = highest priority) - # This is REVERSE of SnowSQL's order to maintain same effective precedence - paths_to_check = [ - # Highest priority in both systems - home_dir / ".snowsql" / "config", # User .snowsql directory config - home_dir / ".snowsql.cnf", # User home config (legacy) - Path("/usr/local/etc/snowsql.cnf"), # Local system config - Path("/etc/snowflake/snowsql.cnf"), # Alternative system config - Path("/etc/snowsql.cnf"), # System-wide config - # Bundled default config would go here but we typically don't ship one - # Lowest priority in both systems - ] - - # Check for RPM config (highest priority in SnowSQL if it exists) - rpm_config = Path("/usr/lib64/snowflake/snowsql/config") - if rpm_config.exists(): - paths_to_check.insert(0, rpm_config) # Add as highest priority - - # Return only paths that exist - return [p for p in paths_to_check if p.exists()] - - -class TomlFileHandler(SourceHandler): - """ - Handler for TOML configuration files. - Supports section navigation for nested configurations. - - Example: - # Config file: ~/.snowflake/connections.toml - [default] - account = "my_account" - user = "my_user" - - # With section_path=["default"] - TomlFileHandler(section_path=["default"]).discover_from_file(path) - # Returns: {"account": "my_account", "user": "my_user"} - """ - - def __init__(self, section_path: Optional[List[str]] = None): - """ - Initialize with optional section path. - - Args: - section_path: Path to section in TOML - Example: ["connections", "default"] for [connections.default] - None or [] means root level - """ - self._section_path = section_path or [] - self._cached_data: Optional[Dict] = None - self._cached_file: Optional[Path] = None - - @property - def source_name(self) -> str: - if self._section_path: - section = ".".join(self._section_path) - return f"toml:{section}" - return "toml:root" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - @property - def handler_type(self) -> str: - return "toml" - - def can_handle(self) -> bool: - """TOML handler is always available.""" - return True - - def can_handle_file(self, file_path: Path) -> bool: - """Check if file is TOML format.""" - return file_path.suffix.lower() in (".toml", ".tml") - - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - """Not directly called - file handlers use discover_from_file.""" - raise NotImplementedError( - "TomlFileHandler requires file_path. Use discover_from_file() instead." - ) - - def discover_from_file( - self, - file_path: Path, - key: Optional[str] = None, - ) -> Dict[str, ConfigValue]: - """ - Discover values from TOML file. - - Args: - file_path: Path to TOML file - key: Specific key to discover, or None for all - - Returns: - Dictionary of discovered values - """ - # Load and cache file data - if self._cached_file != file_path: - try: - with open(file_path) as f: - self._cached_data = tomlkit.load(f) - self._cached_file = file_path - except (OSError, tomlkit.exceptions.TOMLKitError): - # File doesn't exist or invalid TOML - return {} - - # Navigate to section - data = self._cached_data - for section in self._section_path: - if isinstance(data, dict) and section in data: - data = data[section] - else: - return {} # Section doesn't exist - - # Ensure data is a dictionary - if not isinstance(data, dict): - return {} - - # Extract values - values = {} - if key is not None: - if key in data: - raw = data[key] - values[key] = ConfigValue( - key=key, - value=raw, # TOML already parsed - source_name=self.source_name, - priority=self.priority, - raw_value=str(raw) if raw is not None else None, - ) - else: - for k, v in data.items(): - if isinstance(k, str): # Only process string keys - values[k] = ConfigValue( - key=k, - value=v, - source_name=self.source_name, - priority=self.priority, - raw_value=str(v) if v is not None else None, - ) - - return values - - def supports_key(self, key: str) -> bool: - """TOML can handle any string key.""" - return isinstance(key, str) - - -class IniFileHandler(SourceHandler): - """ - Handler for INI format configuration files. - Supports SnowSQL-specific key naming and mappings. - - SnowSQL Multi-File Support: - SnowSQL reads from multiple config file locations (system-wide, user home, etc.) - where later files override earlier ones. To maintain this behavior with FileSource: - - 1. Use get_snowsql_config_paths() to get paths in correct precedence order - 2. FileSource will process them with "first wins" logic, which matches - SnowSQL's effective behavior due to reversed ordering - - Key Mappings (SnowSQL → SnowCLI): - - accountname → account - - username → user - - dbname/databasename → database - - schemaname → schema - - warehousename → warehouse - - rolename → role - - pwd → password - - Example SnowSQL config (INI format): - [connections.default] - accountname = my_account - username = my_user - password = secret123 - - Example usage with multiple files: - from snowflake.cli.api.config_ng import ( - FileSource, IniFileHandler, get_snowsql_config_paths - ) - - snowsql_config_handler = IniFileHandler(source_name="snowsql_config") - source = FileSource( - file_paths=get_snowsql_config_paths(), - handlers=[snowsql_config_handler] - ) - """ - - def __init__( - self, - section_path: Optional[List[str]] = None, - source_name: str = "snowsql_config", - ): - """ - Initialize with optional section path and source name. - - Args: - section_path: Path to section in config file - Default: ["connections"] for SnowSQL compatibility - source_name: Name to identify this handler instance (e.g., "snowsql_config", "ini_config") - Default: "snowsql_config" for backward compatibility - """ - self._section_path = section_path or ["connections"] - self._source_name = source_name - self._cached_data: Optional[configparser.ConfigParser] = None - self._cached_file: Optional[Path] = None - - @property - def source_name(self) -> str: - return self._source_name - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - @property - def handler_type(self) -> str: - return "ini" - - def can_handle(self) -> bool: - """SnowSQL handler is always available.""" - return True - - def can_handle_file(self, file_path: Path) -> bool: - """Check if file is SnowSQL config file.""" - # SnowSQL config is typically ~/.snowsql/config (no extension) - # or ~/.snowsql.cnf, /etc/snowsql.cnf, etc. - if file_path.parent.name == ".snowsql" and file_path.name == "config": - return True - if file_path.suffix.lower() == ".cnf": - return True - # For backward compatibility during migration, also handle .toml - return file_path.suffix.lower() in (".toml", ".tml") - - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - """Not directly called - file handlers use discover_from_file.""" - raise NotImplementedError( - "IniFileHandler requires file_path. Use discover_from_file() instead." - ) - - def discover_from_file( - self, - file_path: Path, - key: Optional[str] = None, - ) -> Dict[str, ConfigValue]: - """ - Discover values from SnowSQL config with key mapping. - - Args: - file_path: Path to SnowSQL config file (INI format) - key: Specific key to discover (SnowCLI format), or None - - Returns: - Dictionary with normalized SnowCLI keys - """ - # Load and cache file data - if self._cached_file != file_path: - try: - parser = configparser.ConfigParser() - parser.read(file_path) - self._cached_data = parser - self._cached_file = file_path - except (OSError, configparser.Error): - return {} - - # Ensure we have cached data - if self._cached_data is None: - return {} - - # Build the section name from section_path - # INI uses dot notation: connections.default becomes "connections.default" - section_name = ".".join(self._section_path) if self._section_path else None - - # Get the data from the appropriate section - data = {} - if section_name: - if self._cached_data.has_section(section_name): - data = dict(self._cached_data.items(section_name)) - else: - # Try to find subsections (e.g., if section_path is ["connections"]) - # Look for all sections starting with "connections." - if len(self._section_path) == 1: - base_section = self._section_path[0] - if self._cached_data.has_section(base_section): - data = dict(self._cached_data.items(base_section)) - - if not data: - return {} - - # Extract and map keys - values = {} - - if key is not None: - # Reverse lookup: find SnowSQL key for CLI key - snowsql_key = self._get_snowsql_key(key) - # Check both original case and lowercase - for k in [snowsql_key, snowsql_key.lower()]: - if k in data: - raw = data[k] - values[key] = ConfigValue( - key=key, # Normalized SnowCLI key - value=raw, - source_name=self.source_name, - priority=self.priority, - raw_value=f"{k}={raw}" if k != key else str(raw), - ) - break - else: - for snowsql_key, value in data.items(): - if not isinstance(snowsql_key, str): - continue - - # Map to SnowCLI key (lowercase) - snowsql_key_lower = snowsql_key.lower() - cli_key = SNOWSQL_CONFIG_KEY_MAPPINGS.get( - snowsql_key_lower, snowsql_key_lower - ) - - values[cli_key] = ConfigValue( - key=cli_key, - value=value, - source_name=self.source_name, - priority=self.priority, - raw_value=( - f"{snowsql_key}={value}" - if snowsql_key_lower != cli_key - else str(value) - ), - ) - - return values - - def supports_key(self, key: str) -> bool: - """Any string key can be represented in SnowSQL config.""" - return isinstance(key, str) - - def _get_snowsql_key(self, cli_key: str) -> str: - """Reverse mapping: CLI key → SnowSQL key.""" - for snowsql_key, cli_mapped_key in SNOWSQL_CONFIG_KEY_MAPPINGS.items(): - if cli_mapped_key == cli_key: - return snowsql_key - return cli_key - - def get_cli_key(self, snowsql_key: str) -> str: - """Forward mapping: SnowSQL key → CLI key.""" - snowsql_key_lower = snowsql_key.lower() - return SNOWSQL_CONFIG_KEY_MAPPINGS.get(snowsql_key_lower, snowsql_key_lower) diff --git a/src/snowflake/cli/api/config_ng/handlers.py b/src/snowflake/cli/api/config_ng/handlers.py deleted file mode 100644 index bb00398727..0000000000 --- a/src/snowflake/cli/api/config_ng/handlers.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Configuration handlers for specific formats and schemas. - -This module will implement specific handlers for: -- Environment variables (SNOWFLAKE_*, SNOWSQL_*) -- File formats (TOML, SnowSQL config, JSON, YAML) - -To be implemented in Phase 3-4. -""" - -from __future__ import annotations - -from abc import abstractmethod -from pathlib import Path -from typing import Dict, Optional - -from snowflake.cli.api.config_ng.core import ConfigValue, ValueSource - - -class SourceHandler(ValueSource): - """ - Specific handler for a configuration format or schema. - Examples: TOML files, SnowSQL config, SNOWFLAKE_* env vars, etc. - """ - - @property - @abstractmethod - def handler_type(self) -> str: - """ - Type identifier for this handler. - Examples: 'toml', 'json', 'snowsql_env', 'snowsql_config' - """ - ... - - @abstractmethod - def can_handle(self) -> bool: - """ - Check if this handler is applicable/available. - - Returns: - True if handler can be used, False otherwise - """ - ... - - def can_handle_file(self, file_path: Path) -> bool: - """ - Check if this handler can process the given file. - - Args: - file_path: Path to file to check - - Returns: - True if handler can process this file, False otherwise - """ - return False - - def discover_from_file( - self, file_path: Path, key: Optional[str] = None - ) -> Dict[str, ConfigValue]: - """ - Discover values from a file. - - Args: - file_path: Path to file to read - key: Specific key to discover, or None for all - - Returns: - Dictionary of discovered values - """ - return {} diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index c5f247e4d2..e71b59a795 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -37,7 +37,7 @@ from snowflake.cli.api.console import cli_console if TYPE_CHECKING: - from snowflake.cli.api.config_ng.sources import ConfigurationSource + from snowflake.cli.api.config_ng.core import ValueSource log = logging.getLogger(__name__) @@ -268,14 +268,22 @@ class ConfigurationResolver: Orchestrates configuration sources with full resolution history tracking. This is the main entry point for configuration resolution. It: - - Manages multiple configuration sources (CLI, Environment, Files) - - Applies precedence rules (CLI > Env > Files) + - Manages multiple configuration sources in precedence order + - Applies precedence rules based on source list order - Tracks complete resolution history - Provides debugging and export utilities + Sources should be provided in precedence order (lowest to highest priority). + Later sources in the list override earlier sources. + Example: resolver = ConfigurationResolver( - sources=[cli_source, env_source, file_source], + sources=[ + snowsql_config, # Lowest priority + cli_config, + env_source, + cli_arguments, # Highest priority + ], track_history=True ) @@ -291,39 +299,34 @@ class ConfigurationResolver: def __init__( self, - sources: Optional[List[ConfigurationSource]] = None, + sources: Optional[List["ValueSource"]] = None, track_history: bool = True, ): """ Initialize resolver with sources and history tracking. Args: - sources: List of configuration sources (will be sorted by priority) + sources: List of configuration sources in precedence order + (first = lowest priority, last = highest priority) track_history: Enable resolution history tracking (default: True) """ self._sources = sources or [] - self._sort_sources() self._history_tracker = ResolutionHistoryTracker() if not track_history: self._history_tracker.disable() - def _sort_sources(self) -> None: - """Sort sources by priority (highest first).""" - self._sources.sort(key=lambda s: s.priority.value) - - def add_source(self, source: ConfigurationSource) -> None: + def add_source(self, source: "ValueSource") -> None: """ - Add a configuration source and re-sort. + Add a configuration source to the end of the list (highest priority). Args: - source: ConfigurationSource to add + source: ValueSource to add """ self._sources.append(source) - self._sort_sources() - def get_sources(self) -> List[ConfigurationSource]: - """Get list of all sources (for inspection).""" + def get_sources(self) -> List["ValueSource"]: + """Get list of all sources in precedence order (for inspection).""" return self._sources.copy() def enable_history(self) -> None: @@ -343,9 +346,9 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A Resolve configuration values from all sources with history tracking. Resolution Process: - 1. Query all sources for the key (lowest to highest priority) + 1. Iterate sources in order (lowest to highest priority) 2. Record all discovered values in history - 3. Apply precedence rules (higher priority overwrites lower) + 3. Later sources overwrite earlier sources (simple dict update) 4. Mark which value was selected 5. Return final resolved values @@ -358,9 +361,9 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A """ all_values: Dict[str, ConfigValue] = {} - # Process sources in REVERSE priority order (lowest first) - # This way higher priority values will overwrite lower ones - for source in reversed(self._sources): + # Process sources in order (first = lowest priority, last = highest) + # Later sources overwrite earlier ones via dict.update() + for source in self._sources: try: source_values = source.discover(key) @@ -368,7 +371,7 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A for k, config_value in source_values.items(): self._history_tracker.record_discovery(k, config_value) - # Update current values (higher priority overwrites) + # Update current values (later source overwrites earlier) all_values.update(source_values) except Exception as e: diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index cd7f6a9a14..38ce00abd5 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -13,320 +13,471 @@ # limitations under the License. """ -Top-level configuration sources. +Configuration sources for the Snowflake CLI. -This module implements the top-level configuration sources that orchestrate -handlers and provide configuration values according to precedence rules. +This module implements concrete configuration sources that discover values from: +- SnowSQL configuration files (INI format, merged from multiple locations) +- CLI configuration files (TOML format, first-found) +- Connections configuration files (dedicated connections.toml) +- SnowSQL environment variables (SNOWSQL_* prefix) +- CLI environment variables (SNOWFLAKE_* and SNOWFLAKE_CONNECTION_* patterns) +- CLI command-line parameters + +Precedence is determined by the order sources are provided to the resolver. """ from __future__ import annotations +import configparser import logging -from abc import abstractmethod +import os from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional - -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority, ValueSource +from typing import Any, Dict, Optional -if TYPE_CHECKING: - from snowflake.cli.api.config_ng.handlers import SourceHandler +from snowflake.cli.api.config_ng.core import ConfigValue, ValueSource log = logging.getLogger(__name__) +# Try to import tomllib (Python 3.11+) or fall back to tomli +try: + import tomllib +except ImportError: + import tomli as tomllib # type: ignore -class ConfigurationSource(ValueSource): + +class SnowSQLConfigFile(ValueSource): """ - Base class for top-level sources that may delegate to handlers. - Handlers are tried IN ORDER - first handler with value wins. + SnowSQL configuration file source. + + Reads multiple config files in order and MERGES them (SnowSQL behavior). + Later files override earlier files for the same keys. + + Config files searched (in order): + 1. Bundled default config (if in package) + 2. /etc/snowsql.cnf (system-wide) + 3. /etc/snowflake/snowsql.cnf (alternative system) + 4. /usr/local/etc/snowsql.cnf (local system) + 5. ~/.snowsql.cnf (legacy user config) + 6. ~/.snowsql/config (current user config) """ - def __init__(self, handlers: Optional[List["SourceHandler"]] = None): + def __init__(self, connection_name: str = "default"): """ - Initialize with ordered list of sub-handlers. + Initialize SnowSQL config file source. Args: - handlers: List of handlers in priority order (first = highest) + connection_name: Name of the connection to read from """ - self._handlers = handlers or [] + self._connection_name = connection_name + self._config_files = [ + Path("/etc/snowsql.cnf"), + Path("/etc/snowflake/snowsql.cnf"), + Path("/usr/local/etc/snowsql.cnf"), + Path.home() / ".snowsql.cnf", + Path.home() / ".snowsql" / "config", + ] - @abstractmethod - def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - """ - Discover values directly from this source (without handlers). - Direct values always take precedence over handler values. - - Returns: - Dictionary of directly discovered values - """ - ... + @property + def source_name(self) -> str: + return "snowsql_config" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ - Discover values from handlers and direct sources. - - Precedence within this source: - 1. Direct values (highest) - 2. First handler with value - 3. Second handler with value - 4. ... and so on - - Args: - key: Specific key to discover, or None for all - - Returns: - Dictionary of all discovered values with precedence applied + Read and MERGE all SnowSQL config files. + Later files override earlier files (SnowSQL merging behavior). """ - discovered: Dict[str, ConfigValue] = {} + merged_values: Dict[str, ConfigValue] = {} + + for config_file in self._config_files: + if not config_file.exists(): + continue - # Process handlers in ORDER (first wins for same key) - for handler in self._handlers: try: - handler_values = handler.discover(key) - for k, v in handler_values.items(): - if k not in discovered: # First handler wins - discovered[k] = v - except Exception as e: - log.debug("Handler %s failed: %s", handler.source_name, e) + config = configparser.ConfigParser() + config.read(config_file) + + # Try connection-specific section first: [connections.prod] + section_name = f"connections.{self._connection_name}" + if config.has_section(section_name): + section_data = dict(config[section_name]) + # Fall back to default [connections] section + elif config.has_section("connections"): + section_data = dict(config["connections"]) + else: + continue - # Direct values override all handlers - direct_values = self.discover_direct(key) - discovered.update(direct_values) + # Merge values (later file wins for conflicts) + for k, v in section_data.items(): + if key is None or k == key: + merged_values[k] = ConfigValue( + key=k, + value=v, + source_name=self.source_name, + raw_value=v, + ) - return discovered + except Exception as e: + log.debug("Failed to read SnowSQL config %s: %s", config_file, e) - def add_handler(self, handler: "SourceHandler", position: int = -1) -> None: - """ - Add handler at specific position. + return merged_values - Args: - handler: Handler to add - position: Insert position (-1 = append, 0 = prepend) - """ - if position == -1: - self._handlers.append(handler) - else: - self._handlers.insert(position, handler) + def supports_key(self, key: str) -> bool: + return key in self.discover() - def set_handlers(self, handlers: List["SourceHandler"]) -> None: - """Replace all handlers with new ordered list.""" - self._handlers = handlers - def get_handlers(self) -> List["SourceHandler"]: - """Get current handler list (for inspection/reordering).""" - return self._handlers.copy() +class CliConfigFile(ValueSource): + """ + CLI config.toml file source. + Scans for config.toml files in order and uses FIRST file found (CLI behavior). + Does NOT merge multiple files - first found wins. -class CliArgumentSource(ConfigurationSource): - """ - Source for command-line arguments. - Highest priority source with no sub-handlers. - Values come directly from parsed CLI arguments. + Search order: + 1. ./config.toml (current directory) + 2. ~/.snowflake/config.toml (user config) """ - def __init__(self, cli_context: Optional[Dict[str, Any]] = None): + def __init__(self, connection_name: str = "default"): """ - Initialize with CLI context containing parsed arguments. + Initialize CLI config file source. Args: - cli_context: Dictionary of CLI arguments (key -> value) + connection_name: Name of the connection to read from """ - super().__init__(handlers=[]) # No handlers needed - self._cli_context = cli_context or {} + self._connection_name = connection_name + self._search_paths = [ + Path.cwd() / "config.toml", + Path.home() / ".snowflake" / "config.toml", + ] @property def source_name(self) -> str: - return "cli_arguments" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.CLI_ARGUMENT + return "cli_config_toml" - def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ - Extract non-None values from CLI context. - CLI arguments are already parsed by Typer/Click. + Find FIRST existing config file and use it (CLI behavior). + Does NOT merge multiple files. """ - values = {} - - if key is not None: - # Discover specific key - if key in self._cli_context and self._cli_context[key] is not None: - values[key] = ConfigValue( - key=key, - value=self._cli_context[key], - source_name=self.source_name, - priority=self.priority, - raw_value=self._cli_context[key], + for config_file in self._search_paths: + if config_file.exists(): + return self._parse_toml_file(config_file, key) + + return {} + + def _parse_toml_file( + self, file_path: Path, key: Optional[str] = None + ) -> Dict[str, ConfigValue]: + """Parse TOML file and extract connection configuration.""" + try: + with open(file_path, "rb") as f: + data = tomllib.load(f) + + # Navigate to connections. + conn_data = data.get("connections", {}).get(self._connection_name, {}) + + return { + k: ConfigValue( + key=k, value=v, source_name=self.source_name, raw_value=v ) - else: - # Discover all present values - for k, v in self._cli_context.items(): - if v is not None: - values[k] = ConfigValue( - key=k, - value=v, - source_name=self.source_name, - priority=self.priority, - raw_value=v, - ) + for k, v in conn_data.items() + if key is None or k == key + } - return values + except Exception as e: + log.debug("Failed to parse CLI config %s: %s", file_path, e) + return {} def supports_key(self, key: str) -> bool: - """Check if key is present in CLI context.""" - return key in self._cli_context + return key in self.discover() -class EnvironmentSource(ConfigurationSource): +class ConnectionsConfigFile(ValueSource): """ - Source for environment variables with handler precedence. - - Default Handler Order (supports migration): - 1. SnowCliEnvHandler (SNOWFLAKE_*) ← Check first - 2. SnowSqlEnvHandler (SNOWSQL_*) ← Fallback for legacy + Dedicated connections.toml file source. - This allows users to: - - Start with only SNOWSQL_* vars (works) - - Add SNOWFLAKE_* vars (automatically override SNOWSQL_*) - - Gradually migrate without breaking anything + Reads ~/.snowflake/connections.toml specifically. """ - def __init__(self, handlers: Optional[List["SourceHandler"]] = None): + def __init__(self, connection_name: str = "default"): """ - Initialize with ordered handlers. + Initialize connections.toml source. Args: - handlers: Custom handler list, or None for default + connection_name: Name of the connection to read from """ - super().__init__(handlers=handlers or []) + self._connection_name = connection_name + self._file_path = Path.home() / ".snowflake" / "connections.toml" @property def source_name(self) -> str: - return "environment" + return "connections_toml" + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """Read connections.toml if it exists.""" + if not self._file_path.exists(): + return {} + + try: + with open(self._file_path, "rb") as f: + data = tomllib.load(f) + + conn_data = data.get("connections", {}).get(self._connection_name, {}) + + return { + k: ConfigValue( + key=k, value=v, source_name=self.source_name, raw_value=v + ) + for k, v in conn_data.items() + if key is None or k == key + } + + except Exception as e: + log.debug("Failed to read connections.toml: %s", e) + return {} + + def supports_key(self, key: str) -> bool: + return key in self.discover() + + +class SnowSQLEnvironment(ValueSource): + """ + SnowSQL environment variables source. + + Discovers SNOWSQL_* environment variables only. + Simple prefix mapping without connection-specific variants. + + Examples: + SNOWSQL_ACCOUNT -> account + SNOWSQL_USER -> user + SNOWSQL_PWD -> password + """ + + # Mapping of SNOWSQL_* env vars to configuration keys + ENV_VAR_MAPPING = { + "SNOWSQL_ACCOUNT": "account", + "SNOWSQL_ACCOUNTNAME": "account", # Alternative + "SNOWSQL_USER": "user", + "SNOWSQL_USERNAME": "user", # Alternative + "SNOWSQL_PWD": "password", + "SNOWSQL_PASSWORD": "password", # Alternative + "SNOWSQL_DATABASE": "database", + "SNOWSQL_DBNAME": "database", # Alternative + "SNOWSQL_SCHEMA": "schema", + "SNOWSQL_SCHEMANAME": "schema", # Alternative + "SNOWSQL_ROLE": "role", + "SNOWSQL_ROLENAME": "role", # Alternative + "SNOWSQL_WAREHOUSE": "warehouse", + "SNOWSQL_WAREHOUSENAME": "warehouse", # Alternative + "SNOWSQL_PROTOCOL": "protocol", + "SNOWSQL_HOST": "host", + "SNOWSQL_PORT": "port", + "SNOWSQL_REGION": "region", + "SNOWSQL_AUTHENTICATOR": "authenticator", + "SNOWSQL_PRIVATE_KEY_PASSPHRASE": "private_key_passphrase", + } @property - def priority(self) -> SourcePriority: - return SourcePriority.ENVIRONMENT + def source_name(self) -> str: + return "snowsql_env" - def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ - Environment source has no direct values. - All values come from handlers. + Discover SNOWSQL_* environment variables. + No connection-specific variables supported. """ - return {} + values: Dict[str, ConfigValue] = {} + + for env_var, config_key in self.ENV_VAR_MAPPING.items(): + if key is not None and config_key != key: + continue + + env_value = os.getenv(env_var) + if env_value is not None: + # Only set if not already set by a previous env var + # (e.g., SNOWSQL_ACCOUNT takes precedence over SNOWSQL_ACCOUNTNAME) + if config_key not in values: + values[config_key] = ConfigValue( + key=config_key, + value=env_value, + source_name=self.source_name, + raw_value=env_value, + ) + + return values def supports_key(self, key: str) -> bool: - """Check if any handler supports this key.""" - return any(h.supports_key(key) for h in self._handlers) + # Check if any env var for this key is set + for env_var, config_key in self.ENV_VAR_MAPPING.items(): + if config_key == key and os.getenv(env_var) is not None: + return True + return False -class FileSource(ConfigurationSource): +class CliEnvironment(ValueSource): """ - Source for configuration files with handler precedence. + CLI environment variables source. - Default Handler Order (supports migration): - 1. SnowCLI TOML handlers (config.toml, connections.toml) ← Check first - 2. SnowSQL config handler (~/.snowsql/config) ← Fallback + Discovers SNOWFLAKE_* environment variables with two patterns: + 1. General: SNOWFLAKE_ACCOUNT (applies to all connections) + 2. Connection-specific: SNOWFLAKE_CONNECTION__ACCOUNT (overrides general) - File Path Order: - - Earlier paths take precedence over later ones - - Allows user-specific configs to override system configs + Connection-specific variables take precedence within this source. + + Examples: + SNOWFLAKE_ACCOUNT -> account (general) + SNOWFLAKE_CONNECTION_PROD_ACCOUNT -> account (for "prod" connection) + SNOWFLAKE_USER -> user + SNOWFLAKE_CONNECTION_DEV_USER -> user (for "dev" connection) """ - def __init__( - self, - file_paths: Optional[List[Path]] = None, - handlers: Optional[List["SourceHandler"]] = None, - ): + # Base configuration keys that can be set via environment + CONFIG_KEYS = [ + "account", + "user", + "password", + "database", + "schema", + "role", + "warehouse", + "protocol", + "host", + "port", + "region", + "authenticator", + ] + + def __init__(self, connection_name: Optional[str] = None): """ - Initialize with file paths and handlers. + Initialize CLI environment source. Args: - file_paths: Ordered list of file paths (first = highest precedence) - handlers: Ordered list of format handlers (first = highest precedence) + connection_name: Optional connection name for connection-specific vars """ - super().__init__(handlers=handlers or []) - self._file_paths = file_paths or [] + self._connection_name = connection_name @property def source_name(self) -> str: - return "configuration_files" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE + if self._connection_name: + return f"cli_env:{self._connection_name}" + return "cli_env" - def discover_direct(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - """ - File source has no direct values. - All values come from file handlers. + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ - return {} + Discover SNOWFLAKE_* environment variables. - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + Supports two patterns: + 1. SNOWFLAKE_ACCOUNT (general) + 2. SNOWFLAKE_CONNECTION__ACCOUNT (connection-specific, higher priority) """ - Try each file path with each handler. + values: Dict[str, ConfigValue] = {} - Precedence: - 1. First file path with value - a. First handler that can read it with value - 2. Second file path with value - a. First handler that can read it with value - ... + # Pattern 1: General SNOWFLAKE_* variables + for config_key in self.CONFIG_KEYS: + if key is not None and config_key != key: + continue - Args: - key: Specific key to discover, or None for all + env_var = f"SNOWFLAKE_{config_key.upper()}" + env_value = os.getenv(env_var) - Returns: - Dictionary of discovered values with precedence applied - """ - discovered: Dict[str, ConfigValue] = {} + if env_value is not None: + values[config_key] = ConfigValue( + key=config_key, + value=env_value, + source_name=self.source_name, + raw_value=env_value, + ) - for file_path in self._file_paths: - if not file_path.exists(): - continue + # Pattern 2: Connection-specific SNOWFLAKE_CONNECTION__* variables + # These override general variables + if self._connection_name: + conn_prefix = f"SNOWFLAKE_CONNECTION_{self._connection_name.upper()}_" - for handler in self._handlers: - if not handler.can_handle_file(file_path): + for config_key in self.CONFIG_KEYS: + if key is not None and config_key != key: continue - try: - handler_values = handler.discover_from_file(file_path, key) - # First file+handler combination wins - for k, v in handler_values.items(): - if k not in discovered: - discovered[k] = v - except Exception as e: - log.debug( - "Handler %s failed for %s: %s", - handler.source_name, - file_path, - e, + env_var = f"{conn_prefix}{config_key.upper()}" + env_value = os.getenv(env_var) + + if env_value is not None: + # Override general variable + values[config_key] = ConfigValue( + key=config_key, + value=env_value, + source_name=self.source_name, + raw_value=env_value, ) - return discovered + return values def supports_key(self, key: str) -> bool: - """Check if any handler supports this key.""" - return any(h.supports_key(key) for h in self._handlers) + if key not in self.CONFIG_KEYS: + return False + + # Check general var + if os.getenv(f"SNOWFLAKE_{key.upper()}") is not None: + return True - def get_file_paths(self) -> List[Path]: - """Get current file paths list (for inspection).""" - return self._file_paths.copy() + # Check connection-specific var + if self._connection_name: + conn_var = ( + f"SNOWFLAKE_CONNECTION_{self._connection_name.upper()}_{key.upper()}" + ) + if os.getenv(conn_var) is not None: + return True - def add_file_path(self, file_path: Path, position: int = -1) -> None: + return False + + +class CliParameters(ValueSource): + """ + CLI command-line parameters source. + + Highest priority source that extracts values from parsed CLI arguments. + Values are already parsed by Typer/Click framework. + + Examples: + --account my_account -> account: "my_account" + --user alice -> user: "alice" + -a my_account -> account: "my_account" + """ + + def __init__(self, cli_context: Optional[Dict[str, Any]] = None): """ - Add file path at specific position. + Initialize CLI parameters source. Args: - file_path: Path to add - position: Insert position (-1 = append, 0 = prepend) + cli_context: Dictionary of CLI arguments (key -> value) + """ + self._cli_context = cli_context or {} + + @property + def source_name(self) -> str: + return "cli_arguments" + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ - if position == -1: - self._file_paths.append(file_path) - else: - self._file_paths.insert(position, file_path) - - def set_file_paths(self, file_paths: List[Path]) -> None: - """Replace all file paths with new ordered list.""" - self._file_paths = file_paths + Extract non-None values from CLI context. + CLI arguments are already parsed by the framework. + """ + values: Dict[str, ConfigValue] = {} + + for k, v in self._cli_context.items(): + # Skip None values (not provided on CLI) + if v is None: + continue + + if key is None or k == key: + values[k] = ConfigValue( + key=k, + value=v, + source_name=self.source_name, + raw_value=v, + ) + + return values + + def supports_key(self, key: str) -> bool: + """Check if key is present in CLI context with non-None value.""" + return key in self._cli_context and self._cli_context[key] is not None diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index ad225431a7..fc71e414c8 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -144,67 +144,46 @@ def _ensure_initialized(self) -> None: return from snowflake.cli.api.cli_global_context import get_cli_context - from snowflake.cli.api.config import get_config_manager, get_connections_file from snowflake.cli.api.config_ng import ( - CliArgumentSource, + CliConfigFile, + CliEnvironment, + CliParameters, ConfigurationResolver, - EnvironmentSource, - FileSource, - IniFileHandler, - SnowCliEnvHandler, - SnowSqlEnvHandler, - TomlFileHandler, - get_snowsql_config_paths, + ConnectionsConfigFile, + SnowSQLConfigFile, + SnowSQLEnvironment, ) - # Get CLI context safely + # Get CLI context and connection name safely try: cli_context = get_cli_context().connection_context cli_context_dict = cli_context.present_values_as_dict() + connection_name = cli_context_dict.get("connection", "default") except Exception: cli_context_dict = {} - - # 1. CLI Arguments Source (Priority 1 - Highest) - cli_source = CliArgumentSource(cli_context=cli_context_dict) - - # 2. Environment Variables Source (Priority 2 - Medium) - env_source = EnvironmentSource( - handlers=[ - SnowCliEnvHandler(), # SNOWFLAKE_* checked first - SnowSqlEnvHandler(), # SNOWSQL_* checked second (fallback) - ] - ) - - # 3. Configuration Files Source (Priority 3 - Lowest) - config_manager = get_config_manager() - connections_file = get_connections_file() - - file_paths = [] - # Add connections file if it exists - if connections_file and connections_file.exists(): - file_paths.append(connections_file) - # Add main config file - if config_manager.file_path.exists(): - file_paths.append(config_manager.file_path) - # Add SnowSQL config paths - file_paths.extend(get_snowsql_config_paths()) - - file_source = FileSource( - file_paths=file_paths, - handlers=[ - # SnowCLI TOML handlers (tried first) - TomlFileHandler(section_path=["connections"]), - TomlFileHandler(section_path=["cli"]), - TomlFileHandler(), # Root level - # SnowSQL handler (tried last, fallback) - IniFileHandler(source_name="snowsql_config"), - ], - ) - - # Create resolver with all sources - self._resolver = ConfigurationResolver( - sources=[cli_source, env_source, file_source], track_history=True - ) + connection_name = "default" + + # Create sources in precedence order (lowest to highest priority) + # Order: SnowSQL config -> CLI config -> connections.toml -> + # SnowSQL env -> CLI env -> CLI arguments + + sources = [ + # 1. SnowSQL config files (lowest priority, merged) + SnowSQLConfigFile(connection_name=connection_name), + # 2. CLI config.toml (first-found behavior) + CliConfigFile(connection_name=connection_name), + # 3. Dedicated connections.toml + ConnectionsConfigFile(connection_name=connection_name), + # 4. SnowSQL environment variables (SNOWSQL_*) + SnowSQLEnvironment(), + # 5. CLI environment variables (SNOWFLAKE_* and SNOWFLAKE_CONNECTION_*) + CliEnvironment(connection_name=connection_name), + # 6. CLI command-line arguments (highest priority) + CliParameters(cli_context=cli_context_dict), + ] + + # Create resolver with all sources in order + self._resolver = ConfigurationResolver(sources=sources, track_history=True) self._initialized = True diff --git a/tests/config_ng/conftest.py b/tests/config_ng/conftest.py index a90371a1c2..c45a8685a6 100644 --- a/tests/config_ng/conftest.py +++ b/tests/config_ng/conftest.py @@ -24,20 +24,13 @@ from contextlib import contextmanager from dataclasses import dataclass from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple import pytest import tomlkit -from snowflake.cli.api.config_ng import ( - CliArgumentSource, - ConfigurationResolver, - EnvironmentSource, - FileSource, - IniFileHandler, - SnowCliEnvHandler, - SnowSqlEnvHandler, - TomlFileHandler, -) + +if TYPE_CHECKING: + from snowflake.cli.api.config_ng import ConfigurationResolver @dataclass @@ -316,52 +309,72 @@ def _setup_connections_toml(self, source: ConnectionsToml): self.connections_toml_path = self.snowflake_dir / "connections.toml" self.connections_toml_path.write_text(config_content) - def get_resolver(self) -> ConfigurationResolver: + def get_resolver(self) -> "ConfigurationResolver": """ Create a ConfigurationResolver with all configured sources. Returns: ConfigurationResolver instance with all sources configured """ - sources_list: List[Any] = [] + from snowflake.cli.api.config_ng import ( + CliConfigFile, + CliEnvironment, + CliParameters, + ConfigurationResolver, + ConnectionsConfigFile, + SnowSQLConfigFile, + SnowSQLEnvironment, + ) - # CLI Arguments Source (highest priority) - if self.cli_args_dict: - cli_source = CliArgumentSource(cli_context=self.cli_args_dict) - sources_list.append(cli_source) + sources_list: List[Any] = [] - # Environment Variables Source - env_handlers = [SnowCliEnvHandler(), SnowSqlEnvHandler()] - env_source = EnvironmentSource(handlers=env_handlers) - sources_list.append(env_source) + # Create sources in precedence order (lowest to highest) - # File Sources - file_paths: List[Path] = [] - file_handlers = [] + # 1. SnowSQL config files (lowest priority) - if configured + if self.snowsql_config_path and self.snowsql_config_path.exists(): + # Create a custom SnowSQL source that reads from our test path + class TestSnowSQLConfig(SnowSQLConfigFile): + def __init__(self, config_path: Path, conn_name: str): + super().__init__(connection_name=conn_name) + self._config_files = [config_path] + + sources_list.append( + TestSnowSQLConfig(self.snowsql_config_path, self.connection_name) + ) - # Add CLI config files (higher priority) + # 2. CLI config.toml - if configured if self.cli_config_path and self.cli_config_path.exists(): - file_paths.append(self.cli_config_path) - file_handlers.append( - TomlFileHandler(section_path=["connections", self.connection_name]) + # Create a custom CLI config source that reads from our test path + class TestCliConfig(CliConfigFile): + def __init__(self, config_path: Path, conn_name: str): + super().__init__(connection_name=conn_name) + self._search_paths = [config_path] + + sources_list.append( + TestCliConfig(self.cli_config_path, self.connection_name) ) + # 3. Connections.toml - if configured if self.connections_toml_path and self.connections_toml_path.exists(): - file_paths.append(self.connections_toml_path) - file_handlers.append( - TomlFileHandler(section_path=["connections", self.connection_name]) + # Create a custom connections source that reads from our test path + class TestConnectionsConfig(ConnectionsConfigFile): + def __init__(self, config_path: Path, conn_name: str): + super().__init__(connection_name=conn_name) + self._file_path = config_path + + sources_list.append( + TestConnectionsConfig(self.connections_toml_path, self.connection_name) ) - # Add SnowSQL config files (lower priority) - if self.snowsql_config_path and self.snowsql_config_path.exists(): - file_paths.append(self.snowsql_config_path) - file_handlers.append( - IniFileHandler(section_path=["connections", self.connection_name]) - ) + # 4. SnowSQL environment variables + sources_list.append(SnowSQLEnvironment()) - if file_paths: - file_source = FileSource(file_paths=file_paths, handlers=file_handlers) - sources_list.append(file_source) + # 5. CLI environment variables + sources_list.append(CliEnvironment(connection_name=self.connection_name)) + + # 6. CLI arguments (highest priority) - if configured + if self.cli_args_dict: + sources_list.append(CliParameters(cli_context=self.cli_args_dict)) return ConfigurationResolver(sources=sources_list, track_history=True) diff --git a/tests/config_ng/test_cli_argument_source.py b/tests/config_ng/test_cli_argument_source.py deleted file mode 100644 index 92e6de46b1..0000000000 --- a/tests/config_ng/test_cli_argument_source.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for CliArgumentSource. - -Tests verify: -- Highest priority source (CLI_ARGUMENT) -- Direct value discovery from CLI context -- None value filtering -- No handler support -- Source identification -""" - -from snowflake.cli.api.config_ng.core import SourcePriority -from snowflake.cli.api.config_ng.sources import CliArgumentSource - - -class TestCliArgumentSource: - """Test suite for CliArgumentSource.""" - - def test_create_with_empty_context(self): - """Should create source with empty context.""" - source = CliArgumentSource() - - assert source.source_name == "cli_arguments" - assert source.priority == SourcePriority.CLI_ARGUMENT - - def test_create_with_context(self): - """Should create source with provided context.""" - context = {"account": "my_account", "user": "my_user"} - source = CliArgumentSource(cli_context=context) - - values = source.discover() - assert len(values) == 2 - assert values["account"].value == "my_account" - assert values["user"].value == "my_user" - - def test_has_highest_priority(self): - """Should have CLI_ARGUMENT priority (highest).""" - source = CliArgumentSource() - assert source.priority == SourcePriority.CLI_ARGUMENT - assert source.priority.value == 1 - - def test_discover_all_values(self): - """Should discover all non-None values when key is None.""" - context = {"account": "my_account", "user": "my_user", "port": 443} - source = CliArgumentSource(cli_context=context) - - values = source.discover() - - assert len(values) == 3 - assert values["account"].value == "my_account" - assert values["user"].value == "my_user" - assert values["port"].value == 443 - - def test_discover_specific_key(self): - """Should discover specific key when provided.""" - context = {"account": "my_account", "user": "my_user"} - source = CliArgumentSource(cli_context=context) - - values = source.discover(key="account") - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "my_account" - - def test_discover_nonexistent_key(self): - """Should return empty dict for nonexistent key.""" - context = {"account": "my_account"} - source = CliArgumentSource(cli_context=context) - - values = source.discover(key="nonexistent") - - assert len(values) == 0 - - def test_filters_none_values(self): - """Should not include None values in discovery.""" - context = {"account": "my_account", "user": None, "password": None} - source = CliArgumentSource(cli_context=context) - - values = source.discover() - - assert len(values) == 1 - assert "account" in values - assert "user" not in values - assert "password" not in values - - def test_filters_none_for_specific_key(self): - """Should return empty dict if specific key has None value.""" - context = {"account": None} - source = CliArgumentSource(cli_context=context) - - values = source.discover(key="account") - - assert len(values) == 0 - - def test_values_have_correct_metadata(self): - """Discovered values should have correct metadata.""" - context = {"account": "my_account"} - source = CliArgumentSource(cli_context=context) - - values = source.discover(key="account") - config_value = values["account"] - - assert config_value.key == "account" - assert config_value.value == "my_account" - assert config_value.source_name == "cli_arguments" - assert config_value.priority == SourcePriority.CLI_ARGUMENT - assert config_value.raw_value == "my_account" - - def test_supports_existing_key(self): - """Should return True for keys present in context.""" - context = {"account": "my_account"} - source = CliArgumentSource(cli_context=context) - - assert source.supports_key("account") is True - - def test_supports_nonexistent_key(self): - """Should return False for keys not in context.""" - context = {"account": "my_account"} - source = CliArgumentSource(cli_context=context) - - assert source.supports_key("nonexistent") is False - - def test_supports_key_with_none_value(self): - """Should still support key even if value is None.""" - context = {"account": None} - source = CliArgumentSource(cli_context=context) - - assert source.supports_key("account") is True - - def test_no_handlers(self): - """CLI source should not have any handlers.""" - source = CliArgumentSource() - - handlers = source.get_handlers() - assert len(handlers) == 0 - - def test_discover_direct_returns_same_as_discover(self): - """discover_direct should return same values as discover.""" - context = {"account": "my_account", "user": "my_user"} - source = CliArgumentSource(cli_context=context) - - direct_values = source.discover_direct() - discovered_values = source.discover() - - assert direct_values == discovered_values - - def test_handles_various_value_types(self): - """Should handle different value types correctly.""" - context = { - "string_val": "text", - "int_val": 42, - "bool_val": True, - "list_val": [1, 2, 3], - "dict_val": {"key": "value"}, - } - source = CliArgumentSource(cli_context=context) - - values = source.discover() - - assert len(values) == 5 - assert values["string_val"].value == "text" - assert values["int_val"].value == 42 - assert values["bool_val"].value is True - assert values["list_val"].value == [1, 2, 3] - assert values["dict_val"].value == {"key": "value"} - - def test_empty_context_returns_empty_dict(self): - """Empty context should return empty discovery result.""" - source = CliArgumentSource(cli_context={}) - - values = source.discover() - - assert len(values) == 0 - - def test_raw_value_equals_parsed_value(self): - """For CLI arguments, raw_value should equal parsed value.""" - context = {"account": "my_account"} - source = CliArgumentSource(cli_context=context) - - values = source.discover(key="account") - config_value = values["account"] - - assert config_value.raw_value == config_value.value - - def test_multiple_discover_calls_consistent(self): - """Multiple discover calls should return consistent results.""" - context = {"account": "my_account"} - source = CliArgumentSource(cli_context=context) - - values1 = source.discover() - values2 = source.discover() - - assert values1 == values2 diff --git a/tests/config_ng/test_config_value.py b/tests/config_ng/test_config_value.py index e633acdd88..cfd9748065 100644 --- a/tests/config_ng/test_config_value.py +++ b/tests/config_ng/test_config_value.py @@ -22,7 +22,7 @@ - Representation formatting """ -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority +from snowflake.cli.api.config_ng.core import ConfigValue class TestConfigValue: @@ -34,13 +34,11 @@ def test_create_basic_config_value(self): key="account", value="my_account", source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, ) assert cv.key == "account" assert cv.value == "my_account" assert cv.source_name == "cli_arguments" - assert cv.priority == SourcePriority.CLI_ARGUMENT assert cv.raw_value is None def test_create_config_value_with_raw_value(self): @@ -48,8 +46,7 @@ def test_create_config_value_with_raw_value(self): cv = ConfigValue( key="port", value=443, - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, + source_name="cli_env", raw_value="443", ) @@ -65,7 +62,6 @@ def test_repr_without_conversion(self): key="account", value="my_account", source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, ) repr_str = repr(cv) @@ -78,8 +74,7 @@ def test_repr_with_conversion(self): cv = ConfigValue( key="port", value=443, - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, + source_name="cli_env", raw_value="443", ) @@ -87,7 +82,7 @@ def test_repr_with_conversion(self): assert "port" in repr_str assert "443" in repr_str assert "→" in repr_str - assert "snowflake_cli_env" in repr_str + assert "cli_env" in repr_str def test_repr_with_same_raw_and_parsed_value(self): """__repr__ should not show conversion when values are the same.""" @@ -95,7 +90,6 @@ def test_repr_with_same_raw_and_parsed_value(self): key="account", value="my_account", source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, raw_value="my_account", ) @@ -107,8 +101,7 @@ def test_boolean_conversion_example(self): cv = ConfigValue( key="enable_diag", value=True, - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, + source_name="cli_env", raw_value="true", ) @@ -122,8 +115,7 @@ def test_integer_conversion_example(self): cv = ConfigValue( key="timeout", value=30, - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, + source_name="cli_env", raw_value="30", ) @@ -138,7 +130,6 @@ def test_snowsql_key_mapping_example(self): key="account", value="my_account", source_name="snowsql_config", - priority=SourcePriority.FILE, raw_value="accountname=my_account", ) @@ -152,7 +143,6 @@ def test_none_value(self): key="optional_field", value=None, source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, ) assert cv.value is None @@ -163,61 +153,38 @@ def test_complex_value_types(self): cv_list = ConfigValue( key="tags", value=["tag1", "tag2"], - source_name="toml:connections", - priority=SourcePriority.FILE, + source_name="connections_toml", ) cv_dict = ConfigValue( key="metadata", value={"key1": "value1", "key2": "value2"}, - source_name="toml:connections", - priority=SourcePriority.FILE, + source_name="connections_toml", ) assert cv_list.value == ["tag1", "tag2"] assert cv_dict.value == {"key1": "value1", "key2": "value2"} - def test_all_priority_levels(self): - """Should work with all priority levels.""" + def test_different_source_names(self): + """Should work with different source names.""" cv_cli = ConfigValue( key="account", value="cli_account", source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, ) cv_env = ConfigValue( key="account", value="env_account", - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, + source_name="cli_env", ) cv_file = ConfigValue( key="account", value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, + source_name="connections_toml", ) - assert cv_cli.priority == SourcePriority.CLI_ARGUMENT - assert cv_env.priority == SourcePriority.ENVIRONMENT - assert cv_file.priority == SourcePriority.FILE - - def test_priority_comparison(self): - """Should be able to compare priorities.""" - cv_high = ConfigValue( - key="account", - value="high", - source_name="cli", - priority=SourcePriority.CLI_ARGUMENT, - ) - - cv_low = ConfigValue( - key="account", - value="low", - source_name="file", - priority=SourcePriority.FILE, - ) - - assert cv_high.priority.value < cv_low.priority.value + assert cv_cli.source_name == "cli_arguments" + assert cv_env.source_name == "cli_env" + assert cv_file.source_name == "connections_toml" diff --git a/tests/config_ng/test_configuration_resolver.py b/tests/config_ng/test_configuration_resolver.py deleted file mode 100644 index 07e87efcb7..0000000000 --- a/tests/config_ng/test_configuration_resolver.py +++ /dev/null @@ -1,441 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for ConfigurationResolver. - -Tests verify: -- Source orchestration -- Precedence rules (CLI > Env > Files) -- History tracking integration -- Resolution methods -- Debugging utilities -""" - -import json - -from snowflake.cli.api.config_ng.resolver import ConfigurationResolver -from snowflake.cli.api.config_ng.sources import ( - CliArgumentSource, - EnvironmentSource, - FileSource, -) - - -class TestConfigurationResolver: - """Test suite for ConfigurationResolver.""" - - def test_create_resolver_empty(self): - """Should create resolver with no sources.""" - resolver = ConfigurationResolver() - - assert len(resolver.get_sources()) == 0 - - def test_create_resolver_with_sources(self): - """Should create resolver with provided sources.""" - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - sources = resolver.get_sources() - assert len(sources) == 1 - - def test_sources_sorted_by_priority(self): - """Should sort sources by priority (highest first).""" - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[]) - file_source = FileSource(file_paths=[], handlers=[]) - - # Add in wrong order - resolver = ConfigurationResolver(sources=[file_source, cli_source, env_source]) - - sources = resolver.get_sources() - # Should be sorted: CLI (1), Env (2), File (3) - assert sources[0].priority.value == 1 # CLI - assert sources[1].priority.value == 2 # Env - assert sources[2].priority.value == 3 # File - - def test_add_source(self): - """Should add source and re-sort.""" - resolver = ConfigurationResolver() - - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - resolver.add_source(cli_source) - - assert len(resolver.get_sources()) == 1 - - def test_resolve_from_single_source(self): - """Should resolve values from single source.""" - cli_source = CliArgumentSource( - cli_context={"account": "my_account", "user": "my_user"} - ) - resolver = ConfigurationResolver(sources=[cli_source]) - - config = resolver.resolve() - - assert config["account"] == "my_account" - assert config["user"] == "my_user" - - def test_resolve_specific_key(self): - """Should resolve specific key only.""" - cli_source = CliArgumentSource( - cli_context={"account": "my_account", "user": "my_user"} - ) - resolver = ConfigurationResolver(sources=[cli_source]) - - config = resolver.resolve(key="account") - - assert len(config) == 1 - assert config["account"] == "my_account" - - def test_resolve_value_method(self): - """Should resolve single value.""" - cli_source = CliArgumentSource(cli_context={"account": "my_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - account = resolver.resolve_value("account") - - assert account == "my_account" - - def test_resolve_with_default(self): - """Should return default when key not found.""" - resolver = ConfigurationResolver() - - value = resolver.resolve_value("missing_key", default="default_value") - - assert value == "default_value" - - def test_cli_overrides_env(self, monkeypatch): - """CLI values should override environment values.""" - from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - - resolver = ConfigurationResolver(sources=[cli_source, env_source]) - - account = resolver.resolve_value("account") - - assert account == "cli_account" - - def test_env_overrides_file(self, tmp_path, monkeypatch): - """Environment values should override file values.""" - from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler - from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler - - # Create config file - config_file = tmp_path / "config.toml" - config_file.write_text('[default]\naccount = "file_account"\n') - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[env_source, file_source]) - - account = resolver.resolve_value("account") - - assert account == "env_account" - - def test_complete_precedence_chain(self, tmp_path, monkeypatch): - """Test complete precedence: CLI > Env > File.""" - from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler - from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler - - # Create config file - config_file = tmp_path / "config.toml" - config_file.write_text('[default]\naccount = "file_account"\n') - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - account = resolver.resolve_value("account") - - # CLI should win - assert account == "cli_account" - - def test_fallback_to_lower_priority(self, tmp_path, monkeypatch): - """Should use lower priority source when higher doesn't have value.""" - from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler - from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler - - # Create config file - config_file = tmp_path / "config.toml" - config_file.write_text( - '[default]\naccount = "file_account"\nuser = "file_user"\n' - ) - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - - # CLI doesn't have any values - cli_source = CliArgumentSource(cli_context={}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - config = resolver.resolve() - - # Account from env, user from file - assert config["account"] == "env_account" - assert config["user"] == "file_user" - - def test_get_resolution_history(self): - """Should get resolution history for a key.""" - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - resolver.resolve() - - history = resolver.get_resolution_history("account") - - assert history is not None - assert history.key == "account" - assert history.final_value == "cli_account" - - def test_get_all_histories(self): - """Should get all resolution histories.""" - cli_source = CliArgumentSource( - cli_context={"account": "my_account", "user": "my_user"} - ) - resolver = ConfigurationResolver(sources=[cli_source]) - - resolver.resolve() - - histories = resolver.get_all_histories() - - assert len(histories) == 2 - assert "account" in histories - assert "user" in histories - - def test_get_value_metadata(self): - """Should get metadata for resolved value.""" - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - resolver.resolve() - - metadata = resolver.get_value_metadata("account") - - assert metadata is not None - assert metadata.key == "account" - assert metadata.value == "cli_account" - assert metadata.source_name == "cli_arguments" - - def test_get_history_summary(self, tmp_path, monkeypatch): - """Should get summary statistics.""" - from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler - from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler - - config_file = tmp_path / "config.toml" - config_file.write_text( - '[default]\naccount = "file_account"\nuser = "file_user"\n' - ) - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - - cli_source = CliArgumentSource(cli_context={}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - config = resolver.resolve() - - summary = resolver.get_history_summary() - - # Check that we resolved at least the expected keys - assert summary["total_keys_resolved"] >= 2 - assert summary["keys_with_overrides"] >= 1 # account overridden - assert ( - summary["source_wins"]["snowflake_cli_env"] >= 1 - ) # account (and possibly others) - assert summary["source_wins"]["toml:default"] >= 1 # user and possibly others - - def test_disable_enable_history(self): - """Should disable and enable history tracking.""" - cli_source = CliArgumentSource(cli_context={"account": "my_account"}) - resolver = ConfigurationResolver(sources=[cli_source], track_history=False) - - resolver.resolve() - - # No history tracked - histories = resolver.get_all_histories() - assert len(histories) == 0 - - # Enable and resolve again - resolver.enable_history() - resolver.resolve() - - histories = resolver.get_all_histories() - assert len(histories) == 1 - - def test_clear_history(self): - """Should clear resolution history.""" - cli_source = CliArgumentSource(cli_context={"account": "my_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - resolver.resolve() - assert len(resolver.get_all_histories()) == 1 - - resolver.clear_history() - assert len(resolver.get_all_histories()) == 0 - - def test_format_resolution_chain(self): - """Should format resolution chain.""" - cli_source = CliArgumentSource(cli_context={"account": "my_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - resolver.resolve() - formatted = resolver.format_resolution_chain("account") - - assert "account resolution chain" in formatted - assert "my_account" in formatted - assert "SELECTED" in formatted - - def test_format_resolution_chain_nonexistent_key(self): - """Should return message for nonexistent key.""" - resolver = ConfigurationResolver() - - formatted = resolver.format_resolution_chain("nonexistent") - - assert "No resolution history found" in formatted - - def test_format_all_chains(self): - """Should format all resolution chains.""" - cli_source = CliArgumentSource( - cli_context={"account": "my_account", "user": "my_user"} - ) - resolver = ConfigurationResolver(sources=[cli_source]) - - resolver.resolve() - formatted = resolver.format_all_chains() - - assert "Configuration Resolution History" in formatted - assert "account resolution chain" in formatted - assert "user resolution chain" in formatted - - def test_format_all_chains_when_empty(self): - """Should return message when no history available.""" - resolver = ConfigurationResolver(track_history=False) - - formatted = resolver.format_all_chains() - - assert "No resolution history available" in formatted - - def test_export_history(self, tmp_path): - """Should export history to JSON file.""" - cli_source = CliArgumentSource( - cli_context={"account": "my_account", "user": "my_user"} - ) - resolver = ConfigurationResolver(sources=[cli_source]) - - resolver.resolve() - - export_file = tmp_path / "debug_config.json" - resolver.export_history(export_file) - - assert export_file.exists() - - # Check JSON structure - with open(export_file) as f: - data = json.load(f) - - assert "summary" in data - assert "histories" in data - assert "account" in data["histories"] - assert "user" in data["histories"] - - def test_source_error_does_not_break_resolution(self): - """Should continue resolution if a source fails.""" - from snowflake.cli.api.config_ng.core import SourcePriority, ValueSource - - class FailingSource(ValueSource): - @property - def source_name(self) -> str: - return "failing_source" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.ENVIRONMENT - - def discover(self, key=None): - raise RuntimeError("Source failed") - - def supports_key(self, key: str) -> bool: - return True - - failing_source = FailingSource() - cli_source = CliArgumentSource(cli_context={"account": "my_account"}) - - resolver = ConfigurationResolver(sources=[failing_source, cli_source]) - - # Should still get value from CLI source - account = resolver.resolve_value("account") - assert account == "my_account" - - def test_get_sources_returns_copy(self): - """get_sources should return a copy.""" - cli_source = CliArgumentSource(cli_context={"account": "my_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - sources = resolver.get_sources() - sources.clear() - - # Original sources should be unchanged - assert len(resolver.get_sources()) == 1 - - def test_resolve_with_no_sources(self): - """Should return empty dict when no sources configured.""" - resolver = ConfigurationResolver() - - config = resolver.resolve() - - assert config == {} - - def test_resolve_value_returns_default_when_not_found(self): - """Should return default value when key not found.""" - resolver = ConfigurationResolver() - - value = resolver.resolve_value("missing", default="default_value") - - assert value == "default_value" - - def test_multiple_resolve_calls_consistent(self): - """Multiple resolve calls should return consistent results.""" - cli_source = CliArgumentSource(cli_context={"account": "my_account"}) - resolver = ConfigurationResolver(sources=[cli_source]) - - config1 = resolver.resolve() - config2 = resolver.resolve() - - assert config1 == config2 diff --git a/tests/config_ng/test_env_handler_migration.py b/tests/config_ng/test_env_handler_migration.py deleted file mode 100644 index 4e3fcc29c6..0000000000 --- a/tests/config_ng/test_env_handler_migration.py +++ /dev/null @@ -1,321 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Integration tests for environment variable handler migration scenarios. - -Tests verify: -- Migration from SnowSQL to SnowCLI environment variables -- Handler ordering (SNOWFLAKE_* overrides SNOWSQL_*) -- Fallback behavior for unmigrated keys -- Complete migration scenarios -""" - -import os -from unittest.mock import patch - -from snowflake.cli.api.config_ng.env_handlers import ( - SnowCliEnvHandler, - SnowSqlEnvHandler, -) -from snowflake.cli.api.config_ng.sources import EnvironmentSource - - -class TestEnvironmentHandlerMigration: - """Test suite for environment variable migration scenarios.""" - - def test_pure_snowsql_environment(self): - """Scenario: User has only SNOWSQL_* environment variables.""" - env_vars = { - "SNOWSQL_ACCOUNT": "old_account", - "SNOWSQL_USER": "old_user", - "SNOWSQL_PWD": "old_password", - "SNOWSQL_WAREHOUSE": "old_warehouse", - } - - with patch.dict(os.environ, env_vars, clear=True): - # Setup source with both handlers (SnowCLI first, SnowSQL second) - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - - # All values should come from SnowSQL handler - assert len(values) == 4 - assert values["account"].value == "old_account" - assert values["account"].source_name == "snowsql_env" - assert values["user"].value == "old_user" - assert values["password"].value == "old_password" # Mapped from PWD - assert values["warehouse"].value == "old_warehouse" - - def test_pure_snowflake_cli_environment(self): - """Scenario: User has migrated to SNOWFLAKE_* environment variables.""" - env_vars = { - "SNOWFLAKE_ACCOUNT": "new_account", - "SNOWFLAKE_USER": "new_user", - "SNOWFLAKE_PASSWORD": "new_password", - "SNOWFLAKE_WAREHOUSE": "new_warehouse", - } - - with patch.dict(os.environ, env_vars, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - - # All values should come from SnowCLI handler - assert len(values) == 4 - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowflake_cli_env" - assert values["user"].value == "new_user" - assert values["password"].value == "new_password" - assert values["warehouse"].value == "new_warehouse" - - def test_partial_migration_snowflake_overrides_snowsql(self): - """ - Scenario: User is migrating - some SNOWFLAKE_* vars override SNOWSQL_*. - This is the key migration scenario. - """ - env_vars = { - # Legacy SnowSQL vars (complete set) - "SNOWSQL_ACCOUNT": "old_account", - "SNOWSQL_USER": "old_user", - "SNOWSQL_PWD": "old_password", - "SNOWSQL_WAREHOUSE": "old_warehouse", - "SNOWSQL_DATABASE": "old_database", - # New SnowCLI vars (partial migration) - "SNOWFLAKE_ACCOUNT": "new_account", - "SNOWFLAKE_USER": "new_user", - } - - with patch.dict(os.environ, env_vars, clear=True): - # Handler order: SnowCLI first (higher priority), SnowSQL second (fallback) - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - - # Migrated keys should use SNOWFLAKE_* values - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowflake_cli_env" - assert values["user"].value == "new_user" - assert values["user"].source_name == "snowflake_cli_env" - - # Unmigrated keys should fallback to SNOWSQL_* values - assert values["password"].value == "old_password" - assert values["password"].source_name == "snowsql_env" - assert values["warehouse"].value == "old_warehouse" - assert values["warehouse"].source_name == "snowsql_env" - assert values["database"].value == "old_database" - assert values["database"].source_name == "snowsql_env" - - def test_migration_with_pwd_to_password_mapping(self): - """ - Scenario: User migrates from SNOWSQL_PWD to SNOWFLAKE_PASSWORD. - Tests the key mapping during migration. - """ - env_vars = { - "SNOWSQL_PWD": "old_password", - "SNOWFLAKE_PASSWORD": "new_password", - } - - with patch.dict(os.environ, env_vars, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - - # SNOWFLAKE_PASSWORD should override SNOWSQL_PWD - assert len(values) == 1 # Only "password" key - assert values["password"].value == "new_password" - assert values["password"].source_name == "snowflake_cli_env" - - def test_migration_only_pwd_remains_in_snowsql(self): - """ - Scenario: User has migrated everything except password. - SNOWSQL_PWD should still work as fallback. - """ - env_vars = { - "SNOWFLAKE_ACCOUNT": "new_account", - "SNOWFLAKE_USER": "new_user", - "SNOWSQL_PWD": "old_password", # Not yet migrated - } - - with patch.dict(os.environ, env_vars, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowflake_cli_env" - assert values["user"].value == "new_user" - assert values["user"].source_name == "snowflake_cli_env" - # Password from SnowSQL (mapped from PWD) - assert values["password"].value == "old_password" - assert values["password"].source_name == "snowsql_env" - - def test_both_handlers_provide_different_keys(self): - """ - Scenario: Each handler provides unique keys that don't overlap. - """ - env_vars = { - "SNOWFLAKE_ACCOUNT": "new_account", - "SNOWSQL_WAREHOUSE": "old_warehouse", - } - - with patch.dict(os.environ, env_vars, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - - assert len(values) == 2 - assert values["account"].source_name == "snowflake_cli_env" - assert values["warehouse"].source_name == "snowsql_env" - - def test_handler_order_matters(self): - """ - Verify that handler order determines precedence. - First handler with value wins. - """ - env_vars = { - "SNOWFLAKE_ACCOUNT": "snowflake_value", - "SNOWSQL_ACCOUNT": "snowsql_value", - } - - with patch.dict(os.environ, env_vars, clear=True): - # Test SnowCLI first (correct order for migration) - source_snowcli_first = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - values = source_snowcli_first.discover() - assert values["account"].value == "snowflake_value" - - # Test SnowSQL first (wrong order, but tests the mechanism) - source_snowsql_first = EnvironmentSource( - handlers=[SnowSqlEnvHandler(), SnowCliEnvHandler()] - ) - values = source_snowsql_first.discover() - assert values["account"].value == "snowsql_value" - - def test_discover_specific_key_with_both_handlers(self): - """Should discover specific key considering both handlers.""" - env_vars = { - "SNOWFLAKE_ACCOUNT": "new_account", - "SNOWSQL_USER": "old_user", - } - - with patch.dict(os.environ, env_vars, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - # Discover account - should get from SnowCLI - values = source.discover(key="account") - assert values["account"].value == "new_account" - - # Discover user - should get from SnowSQL - values = source.discover(key="user") - assert values["user"].value == "old_user" - - def test_empty_environment_both_handlers(self): - """With no environment variables, both handlers should return nothing.""" - with patch.dict(os.environ, {}, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - assert len(values) == 0 - - def test_complete_migration_timeline(self): - """ - Simulates a complete migration timeline from Step 1 to Step 4. - """ - # Step 1: Pure SnowSQL user - env_step1 = {"SNOWSQL_ACCOUNT": "account"} - with patch.dict(os.environ, env_step1, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - values = source.discover() - assert values["account"].value == "account" - assert values["account"].source_name == "snowsql_env" - - # Step 2: Start migration - add SNOWFLAKE_ACCOUNT - env_step2 = { - "SNOWSQL_ACCOUNT": "old_account", - "SNOWFLAKE_ACCOUNT": "new_account", - } - with patch.dict(os.environ, env_step2, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - values = source.discover() - # SNOWFLAKE_* should win - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowflake_cli_env" - - # Step 3: SNOWSQL_ACCOUNT still present but ignored - env_step3 = { - "SNOWSQL_ACCOUNT": "old_account", # Still set but ignored - "SNOWFLAKE_ACCOUNT": "new_account", - } - with patch.dict(os.environ, env_step3, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - values = source.discover() - # Still uses SNOWFLAKE_* - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowflake_cli_env" - - # Step 4: Complete migration - remove SNOWSQL_ACCOUNT - env_step4 = {"SNOWFLAKE_ACCOUNT": "new_account"} - with patch.dict(os.environ, env_step4, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - values = source.discover() - # Uses SNOWFLAKE_* (no change in behavior from step 3) - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowflake_cli_env" - - def test_mixed_types_from_both_handlers(self): - """Should handle different value types from both handlers.""" - env_vars = { - "SNOWFLAKE_ACCOUNT": "my_account", # String - "SNOWFLAKE_PORT": "443", # Integer - "SNOWSQL_ENABLE_DIAG": "true", # Boolean - "SNOWSQL_TIMEOUT": "30", # Integer from SnowSQL - } - - with patch.dict(os.environ, env_vars, clear=True): - source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - - values = source.discover() - - assert isinstance(values["account"].value, str) - assert isinstance(values["port"].value, int) - assert isinstance(values["enable_diag"].value, bool) - assert isinstance(values["timeout"].value, int) diff --git a/tests/config_ng/test_environment_source.py b/tests/config_ng/test_environment_source.py deleted file mode 100644 index 3d69174d8c..0000000000 --- a/tests/config_ng/test_environment_source.py +++ /dev/null @@ -1,300 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for EnvironmentSource. - -Tests verify: -- Medium priority source (ENVIRONMENT) -- Handler-based discovery (no direct values) -- Handler ordering for migration support -- Multiple handler support -""" - -from typing import Any, Dict, Optional - -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority -from snowflake.cli.api.config_ng.handlers import SourceHandler -from snowflake.cli.api.config_ng.sources import EnvironmentSource - - -class MockEnvHandler(SourceHandler): - """Mock environment variable handler for testing.""" - - def __init__(self, data: Dict[str, Any], name: str = "mock_env_handler"): - self._data = data - self._name = name - - @property - def source_name(self) -> str: - return self._name - - @property - def priority(self) -> SourcePriority: - return SourcePriority.ENVIRONMENT - - @property - def handler_type(self) -> str: - return "mock_env" - - def can_handle(self) -> bool: - return len(self._data) > 0 - - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - if key is None: - return { - k: ConfigValue( - key=k, - value=v, - source_name=self.source_name, - priority=self.priority, - ) - for k, v in self._data.items() - } - elif key in self._data: - return { - key: ConfigValue( - key=key, - value=self._data[key], - source_name=self.source_name, - priority=self.priority, - ) - } - return {} - - def supports_key(self, key: str) -> bool: - return key in self._data - - -class TestEnvironmentSource: - """Test suite for EnvironmentSource.""" - - def test_create_with_no_handlers(self): - """Should create source with empty handler list.""" - source = EnvironmentSource() - - assert source.source_name == "environment" - assert source.priority == SourcePriority.ENVIRONMENT - assert len(source.get_handlers()) == 0 - - def test_create_with_handlers(self): - """Should create source with provided handlers.""" - handler1 = MockEnvHandler({"key1": "value1"}, "handler1") - handler2 = MockEnvHandler({"key2": "value2"}, "handler2") - - source = EnvironmentSource(handlers=[handler1, handler2]) - - handlers = source.get_handlers() - assert len(handlers) == 2 - - def test_has_environment_priority(self): - """Should have ENVIRONMENT priority (medium).""" - source = EnvironmentSource() - - assert source.priority == SourcePriority.ENVIRONMENT - assert source.priority.value == 2 - - def test_discover_direct_returns_empty(self): - """Environment source should have no direct values.""" - handler = MockEnvHandler({"key1": "value1"}, "handler") - source = EnvironmentSource(handlers=[handler]) - - direct_values = source.discover_direct() - - assert len(direct_values) == 0 - - def test_discover_from_single_handler(self): - """Should discover values from single handler.""" - handler = MockEnvHandler( - {"account": "my_account", "user": "my_user"}, "handler" - ) - source = EnvironmentSource(handlers=[handler]) - - values = source.discover() - - assert len(values) == 2 - assert values["account"].value == "my_account" - assert values["user"].value == "my_user" - - def test_discover_from_multiple_handlers(self): - """Should discover values from multiple handlers.""" - handler1 = MockEnvHandler({"key1": "value1"}, "handler1") - handler2 = MockEnvHandler({"key2": "value2"}, "handler2") - - source = EnvironmentSource(handlers=[handler1, handler2]) - values = source.discover() - - assert len(values) == 2 - assert values["key1"].value == "value1" - assert values["key2"].value == "value2" - - def test_handler_ordering_first_wins(self): - """First handler with value should win for same key.""" - handler1 = MockEnvHandler({"account": "handler1_account"}, "snowflake_cli_env") - handler2 = MockEnvHandler({"account": "handler2_account"}, "snowsql_env") - - source = EnvironmentSource(handlers=[handler1, handler2]) - values = source.discover(key="account") - - assert values["account"].value == "handler1_account" - assert values["account"].source_name == "snowflake_cli_env" - - def test_migration_scenario_snowflake_overrides_snowsql(self): - """ - Migration scenario: SNOWFLAKE_* vars should override SNOWSQL_* vars. - Simulates handler ordering for migration support. - """ - # Handler order: SnowCLI first (higher priority), SnowSQL second (fallback) - snowflake_handler = MockEnvHandler( - {"account": "new_account", "user": "new_user"}, "snowflake_cli_env" - ) - snowsql_handler = MockEnvHandler( - {"account": "old_account", "user": "old_user", "password": "old_password"}, - "snowsql_env", - ) - - source = EnvironmentSource(handlers=[snowflake_handler, snowsql_handler]) - values = source.discover() - - # New values should win - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowflake_cli_env" - assert values["user"].value == "new_user" - assert values["user"].source_name == "snowflake_cli_env" - - # Fallback to legacy for unmigrated keys - assert values["password"].value == "old_password" - assert values["password"].source_name == "snowsql_env" - - def test_discover_specific_key(self): - """Should discover specific key when provided.""" - handler = MockEnvHandler( - {"account": "my_account", "user": "my_user"}, "handler" - ) - source = EnvironmentSource(handlers=[handler]) - - values = source.discover(key="account") - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "my_account" - - def test_discover_nonexistent_key(self): - """Should return empty dict for nonexistent key.""" - handler = MockEnvHandler({"account": "my_account"}, "handler") - source = EnvironmentSource(handlers=[handler]) - - values = source.discover(key="nonexistent") - - assert len(values) == 0 - - def test_supports_key_from_any_handler(self): - """Should return True if any handler supports the key.""" - handler1 = MockEnvHandler({"key1": "value1"}, "handler1") - handler2 = MockEnvHandler({"key2": "value2"}, "handler2") - - source = EnvironmentSource(handlers=[handler1, handler2]) - - assert source.supports_key("key1") is True - assert source.supports_key("key2") is True - assert source.supports_key("nonexistent") is False - - def test_no_handlers_returns_empty(self): - """With no handlers, should return empty dict.""" - source = EnvironmentSource(handlers=[]) - - values = source.discover() - - assert len(values) == 0 - - def test_values_have_correct_priority(self): - """All values should have ENVIRONMENT priority.""" - handler = MockEnvHandler({"account": "my_account"}, "handler") - source = EnvironmentSource(handlers=[handler]) - - values = source.discover() - - assert values["account"].priority == SourcePriority.ENVIRONMENT - - def test_add_handler_dynamically(self): - """Should be able to add handlers after creation.""" - source = EnvironmentSource(handlers=[]) - handler = MockEnvHandler({"account": "my_account"}, "handler") - - source.add_handler(handler) - values = source.discover() - - assert len(values) == 1 - assert values["account"].value == "my_account" - - def test_set_handlers_replaces_all(self): - """Should replace all handlers with new list.""" - handler1 = MockEnvHandler({"key1": "value1"}, "handler1") - handler2 = MockEnvHandler({"key2": "value2"}, "handler2") - handler3 = MockEnvHandler({"key3": "value3"}, "handler3") - - source = EnvironmentSource(handlers=[handler1, handler2]) - source.set_handlers([handler3]) - - values = source.discover() - - assert len(values) == 1 - assert "key3" in values - assert "key1" not in values - - def test_handler_failure_does_not_break_discovery(self): - """Failed handler should not prevent other handlers from working.""" - - class FailingHandler(SourceHandler): - @property - def source_name(self) -> str: - return "failing" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.ENVIRONMENT - - @property - def handler_type(self) -> str: - return "failing" - - def can_handle(self) -> bool: - return True - - def discover(self, key=None): - raise RuntimeError("Handler failed") - - def supports_key(self, key: str) -> bool: - return True - - failing = FailingHandler() - working = MockEnvHandler({"account": "my_account"}, "working") - - source = EnvironmentSource(handlers=[failing, working]) - values = source.discover() - - # Should still get value from working handler - assert len(values) == 1 - assert values["account"].value == "my_account" - - def test_empty_handler_returns_no_values(self): - """Handler with no data should contribute no values.""" - empty_handler = MockEnvHandler({}, "empty") - full_handler = MockEnvHandler({"account": "my_account"}, "full") - - source = EnvironmentSource(handlers=[empty_handler, full_handler]) - values = source.discover() - - assert len(values) == 1 - assert values["account"].value == "my_account" diff --git a/tests/config_ng/test_file_handler_migration.py b/tests/config_ng/test_file_handler_migration.py deleted file mode 100644 index 2a858a476d..0000000000 --- a/tests/config_ng/test_file_handler_migration.py +++ /dev/null @@ -1,378 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Integration tests for file handler migration scenarios. - -Tests verify: -- File path precedence (first file wins) -- Handler ordering (TOML > SnowSQL) -- Migration from SnowSQL to SnowCLI TOML -- Complete integration with FileSource -""" - -from pathlib import Path -from tempfile import NamedTemporaryFile - -from snowflake.cli.api.config_ng.file_handlers import ( - IniFileHandler, - TomlFileHandler, -) -from snowflake.cli.api.config_ng.sources import FileSource - - -class TestFileHandlerMigration: - """Test suite for file handler migration scenarios.""" - - def test_pure_toml_configuration(self): - """Scenario: User has only SnowCLI TOML configuration.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[default]\naccount = "toml_account"\nuser = "toml_user"\n') - f.flush() - toml_path = Path(f.name) - - try: - source = FileSource( - file_paths=[toml_path], - handlers=[ - TomlFileHandler(section_path=["default"]), - IniFileHandler(), - ], - ) - - values = source.discover() - - assert len(values) == 2 - assert values["account"].value == "toml_account" - assert values["account"].source_name == "toml:default" - assert values["user"].value == "toml_user" - finally: - toml_path.unlink() - - def test_pure_snowsql_configuration(self): - """Scenario: User has only SnowSQL configuration.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write( - "[connections]\n" - "accountname = snowsql_account\n" - "username = snowsql_user\n" - ) - f.flush() - snowsql_path = Path(f.name) - - try: - source = FileSource( - file_paths=[snowsql_path], - handlers=[ - TomlFileHandler(section_path=["default"]), - IniFileHandler(), - ], - ) - - values = source.discover() - - # Values should come from SnowSQL with key mapping - assert len(values) == 2 - assert values["account"].value == "snowsql_account" - assert values["account"].source_name == "snowsql_config" - assert values["user"].value == "snowsql_user" - finally: - snowsql_path.unlink() - - def test_partial_migration_toml_overrides_snowsql(self): - """Scenario: User has both configs, TOML should override SnowSQL.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: - f1.write('[default]\naccount = "new_account"\n') - f1.flush() - toml_path = Path(f1.name) - - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write( - "[connections]\n" - "accountname = old_account\n" - "username = old_user\n" - "databasename = old_db\n" - ) - f2.flush() - snowsql_path = Path(f2.name) - - try: - # First file path has highest precedence - source = FileSource( - file_paths=[toml_path, snowsql_path], - handlers=[ - TomlFileHandler(section_path=["default"]), - IniFileHandler(), - ], - ) - - values = source.discover() - - # account from TOML (first file), others from SnowSQL (second file) - assert values["account"].value == "new_account" - assert values["account"].source_name == "toml:default" - assert values["user"].value == "old_user" - assert values["user"].source_name == "snowsql_config" - assert values["database"].value == "old_db" - finally: - toml_path.unlink() - snowsql_path.unlink() - - def test_handler_ordering_within_same_file(self): - """Handler order matters when both can handle same file.""" - # Create a pure TOML file that both handlers could potentially read - # TomlFileHandler will read [default], IniFileHandler will read [connections] - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - # Pure TOML format file with both sections - f.write( - '[default]\naccount = "toml_format"\n' - '[connections]\naccount = "other_format"\n' - ) - f.flush() - temp_path = Path(f.name) - - try: - # TOML handler first - should find account in [default] - source = FileSource( - file_paths=[temp_path], - handlers=[ - TomlFileHandler(section_path=["default"]), - TomlFileHandler(section_path=["connections"]), - ], - ) - - values = source.discover() - - # First TOML handler should win (reads [default]) - assert values["account"].value == "toml_format" - assert values["account"].source_name == "toml:default" - finally: - temp_path.unlink() - - def test_file_path_precedence_first_wins(self): - """First file path should take precedence over later ones.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: - f1.write('[default]\naccount = "file1_account"\n') - f1.flush() - file1_path = Path(f1.name) - - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write('[default]\naccount = "file2_account"\n') - f2.flush() - file2_path = Path(f2.name) - - try: - source = FileSource( - file_paths=[file1_path, file2_path], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - values = source.discover() - - # First file wins - assert values["account"].value == "file1_account" - finally: - file1_path.unlink() - file2_path.unlink() - - def test_nonexistent_files_skipped(self): - """Should skip nonexistent files gracefully.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[default]\naccount = "existing_account"\n') - f.flush() - existing_path = Path(f.name) - - nonexistent_path = Path("/nonexistent/file.toml") - - try: - source = FileSource( - file_paths=[nonexistent_path, existing_path], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - values = source.discover() - - # Should still get values from existing file - assert values["account"].value == "existing_account" - finally: - existing_path.unlink() - - def test_complete_migration_timeline(self): - """Simulates complete migration from SnowSQL to TOML.""" - # Step 1: Pure SnowSQL user - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write("[connections]\naccountname = account\nusername = user\n") - f.flush() - snowsql_path = Path(f.name) - - try: - source = FileSource( - file_paths=[snowsql_path], - handlers=[ - TomlFileHandler(section_path=["default"]), - IniFileHandler(), - ], - ) - - values = source.discover() - assert values["account"].value == "account" - assert values["account"].source_name == "snowsql_config" - finally: - snowsql_path.unlink() - - # Step 2: Start migration - create TOML file - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: - f1.write('[default]\naccount = "new_account"\n') - f1.flush() - toml_path = Path(f1.name) - - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write("[connections]\naccountname = old_account\nusername = old_user\n") - f2.flush() - snowsql_path = Path(f2.name) - - try: - source = FileSource( - file_paths=[toml_path, snowsql_path], - handlers=[ - TomlFileHandler(section_path=["default"]), - IniFileHandler(), - ], - ) - - values = source.discover() - # TOML overrides account, SnowSQL provides user - assert values["account"].value == "new_account" - assert values["account"].source_name == "toml:default" - assert values["user"].value == "old_user" - assert values["user"].source_name == "snowsql_config" - finally: - toml_path.unlink() - snowsql_path.unlink() - - def test_multiple_toml_handlers_different_sections(self): - """Should handle multiple TOML handlers for different sections.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write( - '[default]\naccount = "default_account"\n' - '[prod]\naccount = "prod_account"\n' - ) - f.flush() - temp_path = Path(f.name) - - try: - # Handler for [default] section - source_default = FileSource( - file_paths=[temp_path], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - # Handler for [prod] section - source_prod = FileSource( - file_paths=[temp_path], - handlers=[TomlFileHandler(section_path=["prod"])], - ) - - values_default = source_default.discover() - values_prod = source_prod.discover() - - assert values_default["account"].value == "default_account" - assert values_prod["account"].value == "prod_account" - finally: - temp_path.unlink() - - def test_discover_specific_key_with_migration(self): - """Should handle specific key discovery with migration.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: - f1.write('[default]\naccount = "toml_account"\n') - f1.flush() - toml_path = Path(f1.name) - - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write("[connections]\naccountname = snowsql_account\nusername = user\n") - f2.flush() - snowsql_path = Path(f2.name) - - try: - source = FileSource( - file_paths=[toml_path, snowsql_path], - handlers=[ - TomlFileHandler(section_path=["default"]), - IniFileHandler(), - ], - ) - - # Discover specific key - values = source.discover(key="account") - - # Should get from TOML (first file) - assert len(values) == 1 - assert values["account"].value == "toml_account" - - # Discover different key - values = source.discover(key="user") - - # Should get from SnowSQL (second file) - assert len(values) == 1 - assert values["user"].value == "user" - finally: - toml_path.unlink() - snowsql_path.unlink() - - def test_complex_configuration_with_all_features(self): - """Complex scenario with multiple files, handlers, and sections.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: - f1.write('[connections]\naccount = "connections_account"\n') - f1.flush() - connections_toml = Path(f1.name) - - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write("[cli]\nverbose = true\n") - f2.flush() - config_toml = Path(f2.name) - - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f3: - f3.write( - "[connections]\naccountname = legacy_account\nusername = legacy_user\n" - ) - f3.flush() - snowsql_config = Path(f3.name) - - try: - source = FileSource( - file_paths=[connections_toml, config_toml, snowsql_config], - handlers=[ - TomlFileHandler(section_path=["connections"]), - TomlFileHandler(section_path=["cli"]), - IniFileHandler(), - ], - ) - - values = source.discover() - - # Should get account from connections.toml (first file, first handler) - assert values["account"].value == "connections_account" - assert values["account"].source_name == "toml:connections" - - # Should get verbose from config.toml (second file, second handler) - assert values["verbose"].value is True - assert values["verbose"].source_name == "toml:cli" - - # Should get user from snowsql config (third file, third handler) - assert values["user"].value == "legacy_user" - assert values["user"].source_name == "snowsql_config" - finally: - connections_toml.unlink() - config_toml.unlink() - snowsql_config.unlink() diff --git a/tests/config_ng/test_file_source.py b/tests/config_ng/test_file_source.py deleted file mode 100644 index 0a2d1da8d3..0000000000 --- a/tests/config_ng/test_file_source.py +++ /dev/null @@ -1,463 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for FileSource. - -Tests verify: -- Lowest priority source (FILE) -- File-based discovery with handlers -- File path ordering for precedence -- Handler ordering within files -- File existence handling -""" - -from pathlib import Path -from typing import Any, Dict, Optional - -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority -from snowflake.cli.api.config_ng.handlers import SourceHandler -from snowflake.cli.api.config_ng.sources import FileSource - - -class MockFileHandler(SourceHandler): - """Mock file handler for testing.""" - - def __init__( - self, - data: Dict[Path, Dict[str, Any]], - name: str = "mock_file_handler", - file_extensions: Optional[list] = None, - ): - self._data = data # Path -> {key: value} - self._name = name - self._file_extensions = file_extensions or [".toml", ".conf"] - - @property - def source_name(self) -> str: - return self._name - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - @property - def handler_type(self) -> str: - return "mock_file" - - def can_handle(self) -> bool: - return len(self._data) > 0 - - def can_handle_file(self, file_path: Path) -> bool: - return file_path.suffix in self._file_extensions - - def discover_from_file( - self, file_path: Path, key: Optional[str] = None - ) -> Dict[str, ConfigValue]: - if file_path not in self._data: - return {} - - file_data = self._data[file_path] - - if key is None: - return { - k: ConfigValue( - key=k, - value=v, - source_name=self.source_name, - priority=self.priority, - ) - for k, v in file_data.items() - } - elif key in file_data: - return { - key: ConfigValue( - key=key, - value=file_data[key], - source_name=self.source_name, - priority=self.priority, - ) - } - return {} - - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - # Not used in FileSource - discover_from_file is called instead - return {} - - def supports_key(self, key: str) -> bool: - # Check if key exists in any file - return any(key in file_data for file_data in self._data.values()) - - -class TestFileSource: - """Test suite for FileSource.""" - - def test_create_with_no_paths_or_handlers(self): - """Should create source with empty file paths and handlers.""" - source = FileSource() - - assert source.source_name == "configuration_files" - assert source.priority == SourcePriority.FILE - assert len(source.get_file_paths()) == 0 - assert len(source.get_handlers()) == 0 - - def test_create_with_file_paths(self, tmp_path): - """Should create source with provided file paths.""" - file1 = tmp_path / "config1.toml" - file2 = tmp_path / "config2.toml" - file1.touch() - file2.touch() - - source = FileSource(file_paths=[file1, file2]) - - paths = source.get_file_paths() - assert len(paths) == 2 - assert file1 in paths - assert file2 in paths - - def test_has_file_priority(self): - """Should have FILE priority (lowest).""" - source = FileSource() - - assert source.priority == SourcePriority.FILE - assert source.priority.value == 3 - - def test_discover_direct_returns_empty(self): - """File source should have no direct values.""" - source = FileSource() - - direct_values = source.discover_direct() - - assert len(direct_values) == 0 - - def test_discover_from_single_file(self, tmp_path): - """Should discover values from single file.""" - file_path = tmp_path / "config.toml" - file_path.touch() - - handler = MockFileHandler( - {file_path: {"account": "my_account", "user": "my_user"}}, "toml_handler" - ) - - source = FileSource(file_paths=[file_path], handlers=[handler]) - values = source.discover() - - assert len(values) == 2 - assert values["account"].value == "my_account" - assert values["user"].value == "my_user" - - def test_discover_from_multiple_files(self, tmp_path): - """Should discover values from multiple files.""" - file1 = tmp_path / "config1.toml" - file2 = tmp_path / "config2.toml" - file1.touch() - file2.touch() - - handler = MockFileHandler( - { - file1: {"key1": "value1"}, - file2: {"key2": "value2"}, - }, - "toml_handler", - ) - - source = FileSource(file_paths=[file1, file2], handlers=[handler]) - values = source.discover() - - assert len(values) == 2 - assert values["key1"].value == "value1" - assert values["key2"].value == "value2" - - def test_file_path_ordering_first_wins(self, tmp_path): - """First file path with value should win for same key.""" - file1 = tmp_path / "config1.toml" - file2 = tmp_path / "config2.toml" - file1.touch() - file2.touch() - - handler = MockFileHandler( - { - file1: {"account": "account_from_file1"}, - file2: {"account": "account_from_file2"}, - }, - "toml_handler", - ) - - source = FileSource(file_paths=[file1, file2], handlers=[handler]) - values = source.discover(key="account") - - assert values["account"].value == "account_from_file1" - - def test_handler_ordering_first_wins(self, tmp_path): - """First handler that can read file should win for same key.""" - file_path = tmp_path / "config.toml" - file_path.touch() - - handler1 = MockFileHandler( - {file_path: {"account": "handler1_account"}}, "snowcli_toml" - ) - handler2 = MockFileHandler( - {file_path: {"account": "handler2_account"}}, "legacy_toml" - ) - - source = FileSource(file_paths=[file_path], handlers=[handler1, handler2]) - values = source.discover(key="account") - - assert values["account"].value == "handler1_account" - assert values["account"].source_name == "snowcli_toml" - - def test_skips_nonexistent_files(self, tmp_path): - """Should skip files that don't exist.""" - existing_file = tmp_path / "exists.toml" - nonexistent_file = tmp_path / "does_not_exist.toml" - existing_file.touch() - - handler = MockFileHandler( - { - existing_file: {"key1": "value1"}, - nonexistent_file: {"key2": "value2"}, - }, - "handler", - ) - - source = FileSource( - file_paths=[nonexistent_file, existing_file], handlers=[handler] - ) - values = source.discover() - - # Should only get value from existing file - assert len(values) == 1 - assert "key1" in values - assert "key2" not in values - - def test_skips_files_handler_cannot_handle(self, tmp_path): - """Should skip files that handler cannot handle.""" - toml_file = tmp_path / "config.toml" - json_file = tmp_path / "config.json" - toml_file.touch() - json_file.touch() - - # Handler only handles .toml files - handler = MockFileHandler( - { - toml_file: {"key1": "value1"}, - json_file: {"key2": "value2"}, - }, - "toml_handler", - file_extensions=[".toml"], - ) - - source = FileSource(file_paths=[toml_file, json_file], handlers=[handler]) - values = source.discover() - - # Should only get value from .toml file - assert len(values) == 1 - assert "key1" in values - assert "key2" not in values - - def test_migration_scenario_snowcli_overrides_snowsql(self, tmp_path): - """ - Migration scenario: SnowCLI files should override SnowSQL files. - Simulates file ordering for migration support. - """ - snowcli_file = tmp_path / "connections.toml" - snowsql_file = tmp_path / "snowsql_config" - snowcli_file.touch() - snowsql_file.touch() - - # SnowCLI handler only handles .toml files - snowcli_handler = MockFileHandler( - {snowcli_file: {"account": "new_account", "user": "new_user"}}, - "snowcli_toml", - file_extensions=[".toml"], - ) - # SnowSQL handler handles files without extension - snowsql_handler = MockFileHandler( - { - snowsql_file: { - "account": "old_account", - "user": "old_user", - "password": "old_password", - } - }, - "snowsql_config", - file_extensions=[""], # No extension - ) - - # SnowCLI file comes first (higher precedence) - source = FileSource( - file_paths=[snowcli_file, snowsql_file], - handlers=[snowcli_handler, snowsql_handler], - ) - values = source.discover() - - # New values from SnowCLI should win - assert values["account"].value == "new_account" - assert values["account"].source_name == "snowcli_toml" - assert values["user"].value == "new_user" - - # Fallback to SnowSQL for unmigrated keys - assert values["password"].value == "old_password" - assert values["password"].source_name == "snowsql_config" - - def test_discover_specific_key(self, tmp_path): - """Should discover specific key when provided.""" - file_path = tmp_path / "config.toml" - file_path.touch() - - handler = MockFileHandler( - {file_path: {"account": "my_account", "user": "my_user"}}, "handler" - ) - - source = FileSource(file_paths=[file_path], handlers=[handler]) - values = source.discover(key="account") - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "my_account" - - def test_discover_nonexistent_key(self, tmp_path): - """Should return empty dict for nonexistent key.""" - file_path = tmp_path / "config.toml" - file_path.touch() - - handler = MockFileHandler({file_path: {"account": "my_account"}}, "handler") - - source = FileSource(file_paths=[file_path], handlers=[handler]) - values = source.discover(key="nonexistent") - - assert len(values) == 0 - - def test_supports_key_from_any_handler(self, tmp_path): - """Should return True if any handler supports the key.""" - handler1 = MockFileHandler({tmp_path / "f1": {"key1": "value1"}}, "handler1") - handler2 = MockFileHandler({tmp_path / "f2": {"key2": "value2"}}, "handler2") - - source = FileSource(handlers=[handler1, handler2]) - - assert source.supports_key("key1") is True - assert source.supports_key("key2") is True - assert source.supports_key("nonexistent") is False - - def test_handler_failure_does_not_break_discovery(self, tmp_path): - """Failed handler should not prevent other handlers from working.""" - file_path = tmp_path / "config.toml" - file_path.touch() - - class FailingHandler(SourceHandler): - @property - def source_name(self) -> str: - return "failing" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - @property - def handler_type(self) -> str: - return "failing" - - def can_handle(self) -> bool: - return True - - def can_handle_file(self, file_path: Path) -> bool: - return True - - def discover_from_file(self, file_path: Path, key=None): - raise RuntimeError("Handler failed") - - def discover(self, key=None): - return {} - - def supports_key(self, key: str) -> bool: - return True - - failing = FailingHandler() - working = MockFileHandler({file_path: {"account": "my_account"}}, "working") - - source = FileSource(file_paths=[file_path], handlers=[failing, working]) - values = source.discover() - - # Should still get value from working handler - assert len(values) == 1 - assert values["account"].value == "my_account" - - def test_add_file_path_append(self, tmp_path): - """Should append file path to end of list.""" - file1 = tmp_path / "config1.toml" - file2 = tmp_path / "config2.toml" - - source = FileSource(file_paths=[file1]) - source.add_file_path(file2) - - paths = source.get_file_paths() - assert len(paths) == 2 - assert paths[1] == file2 - - def test_add_file_path_prepend(self, tmp_path): - """Should prepend file path to beginning of list.""" - file1 = tmp_path / "config1.toml" - file2 = tmp_path / "config2.toml" - - source = FileSource(file_paths=[file1]) - source.add_file_path(file2, position=0) - - paths = source.get_file_paths() - assert len(paths) == 2 - assert paths[0] == file2 - - def test_set_file_paths(self, tmp_path): - """Should replace all file paths with new list.""" - file1 = tmp_path / "config1.toml" - file2 = tmp_path / "config2.toml" - file3 = tmp_path / "config3.toml" - - source = FileSource(file_paths=[file1, file2]) - source.set_file_paths([file3]) - - paths = source.get_file_paths() - assert len(paths) == 1 - assert paths[0] == file3 - - def test_get_file_paths_returns_copy(self, tmp_path): - """get_file_paths should return a copy, not the original list.""" - file1 = tmp_path / "config.toml" - source = FileSource(file_paths=[file1]) - - paths = source.get_file_paths() - paths.clear() - - # Original list should be unchanged - assert len(source.get_file_paths()) == 1 - - def test_no_files_returns_empty(self): - """With no file paths, should return empty dict.""" - handler = MockFileHandler({}, "handler") - source = FileSource(file_paths=[], handlers=[handler]) - - values = source.discover() - - assert len(values) == 0 - - def test_values_have_correct_priority(self, tmp_path): - """All values should have FILE priority.""" - file_path = tmp_path / "config.toml" - file_path.touch() - - handler = MockFileHandler({file_path: {"account": "my_account"}}, "handler") - source = FileSource(file_paths=[file_path], handlers=[handler]) - - values = source.discover() - - assert values["account"].priority == SourcePriority.FILE diff --git a/tests/config_ng/test_resolution_history.py b/tests/config_ng/test_resolution_history.py deleted file mode 100644 index 5d5c83237d..0000000000 --- a/tests/config_ng/test_resolution_history.py +++ /dev/null @@ -1,482 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for Resolution History tracking. - -Tests verify: -- ResolutionEntry fields -- ResolutionHistory creation and properties -- Resolution chain formatting -- History export to dictionary -- Timestamp tracking -""" - -from datetime import datetime - -from snowflake.cli.api.config_ng.core import ( - ConfigValue, - ResolutionEntry, - ResolutionHistory, - SourcePriority, -) - - -class TestResolutionEntry: - """Test suite for ResolutionEntry dataclass.""" - - def test_create_resolution_entry(self): - """Should create a ResolutionEntry with all fields.""" - config_value = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - timestamp = datetime.now() - entry = ResolutionEntry( - config_value=config_value, - timestamp=timestamp, - was_used=True, - ) - - assert entry.config_value == config_value - assert entry.timestamp == timestamp - assert entry.was_used is True - assert entry.overridden_by is None - - def test_create_entry_with_override(self): - """Should create entry with overridden_by information.""" - config_value = ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ) - - entry = ResolutionEntry( - config_value=config_value, - timestamp=datetime.now(), - was_used=False, - overridden_by="cli_arguments", - ) - - assert entry.was_used is False - assert entry.overridden_by == "cli_arguments" - - -class TestResolutionHistory: - """Test suite for ResolutionHistory dataclass.""" - - def test_create_empty_resolution_history(self): - """Should create an empty ResolutionHistory.""" - history = ResolutionHistory(key="account") - - assert history.key == "account" - assert len(history.entries) == 0 - assert history.final_value is None - assert history.default_used is False - - def test_create_resolution_history_with_entries(self): - """Should create ResolutionHistory with entries.""" - config_value = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - entry = ResolutionEntry( - config_value=config_value, - timestamp=datetime.now(), - was_used=True, - ) - - history = ResolutionHistory( - key="account", - entries=[entry], - final_value="my_account", - ) - - assert len(history.entries) == 1 - assert history.final_value == "my_account" - - def test_sources_consulted_property(self): - """Should return list of all source names consulted.""" - entries = [ - ResolutionEntry( - config_value=ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ), - timestamp=datetime.now(), - was_used=False, - overridden_by="cli_arguments", - ), - ResolutionEntry( - config_value=ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ), - ] - - history = ResolutionHistory(key="account", entries=entries) - - sources = history.sources_consulted - assert len(sources) == 2 - assert "toml:connections" in sources - assert "cli_arguments" in sources - - def test_values_considered_property(self): - """Should return list of all values considered.""" - entries = [ - ResolutionEntry( - config_value=ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ), - timestamp=datetime.now(), - was_used=False, - ), - ResolutionEntry( - config_value=ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ), - ] - - history = ResolutionHistory(key="account", entries=entries) - - values = history.values_considered - assert len(values) == 2 - assert "file_account" in values - assert "cli_account" in values - - def test_selected_entry_property(self): - """Should return the entry that was selected.""" - entry1 = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ), - timestamp=datetime.now(), - was_used=False, - overridden_by="cli_arguments", - ) - - entry2 = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ) - - history = ResolutionHistory(key="account", entries=[entry1, entry2]) - - selected = history.selected_entry - assert selected == entry2 - assert selected.config_value.value == "cli_account" - - def test_selected_entry_returns_none_when_no_selection(self): - """Should return None when no entry was selected.""" - entry = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ), - timestamp=datetime.now(), - was_used=False, - ) - - history = ResolutionHistory(key="account", entries=[entry]) - - assert history.selected_entry is None - - def test_overridden_entries_property(self): - """Should return all entries that were overridden.""" - entry1 = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ), - timestamp=datetime.now(), - was_used=False, - overridden_by="cli_arguments", - ) - - entry2 = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="env_account", - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, - ), - timestamp=datetime.now(), - was_used=False, - overridden_by="cli_arguments", - ) - - entry3 = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ) - - history = ResolutionHistory(key="account", entries=[entry1, entry2, entry3]) - - overridden = history.overridden_entries - assert len(overridden) == 2 - assert entry1 in overridden - assert entry2 in overridden - assert entry3 not in overridden - - def test_format_chain_simple(self): - """Should format a simple resolution chain.""" - entry = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ) - - history = ResolutionHistory( - key="account", - entries=[entry], - final_value="my_account", - ) - - chain = history.format_chain() - - assert "account resolution chain (1 sources)" in chain - assert "cli_arguments" in chain - assert "my_account" in chain - assert "(SELECTED)" in chain - assert "✅" in chain - - def test_format_chain_with_override(self): - """Should format resolution chain showing override.""" - entry1 = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ), - timestamp=datetime.now(), - was_used=False, - overridden_by="cli_arguments", - ) - - entry2 = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ) - - history = ResolutionHistory( - key="account", - entries=[entry1, entry2], - final_value="cli_account", - ) - - chain = history.format_chain() - - assert "account resolution chain (2 sources)" in chain - assert "toml:connections" in chain - assert "cli_arguments" in chain - assert "overridden by cli_arguments" in chain - assert "(SELECTED)" in chain - assert "❌" in chain - assert "✅" in chain - - def test_format_chain_with_conversion(self): - """Should show conversion in formatted chain.""" - entry = ResolutionEntry( - config_value=ConfigValue( - key="port", - value=443, - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, - raw_value="443", - ), - timestamp=datetime.now(), - was_used=True, - ) - - history = ResolutionHistory( - key="port", - entries=[entry], - final_value=443, - ) - - chain = history.format_chain() - - assert "port resolution chain" in chain - assert "→" in chain - assert "443" in chain - - def test_format_chain_with_default(self): - """Should show default value in formatted chain.""" - history = ResolutionHistory( - key="account", - entries=[], - final_value="default_account", - default_used=True, - ) - - chain = history.format_chain() - - assert "account resolution chain (0 sources)" in chain - assert "Default value used: default_account" in chain - - def test_to_dict_conversion(self): - """Should convert history to dictionary for JSON export.""" - entry = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ) - - history = ResolutionHistory( - key="account", - entries=[entry], - final_value="my_account", - ) - - data = history.to_dict() - - assert data["key"] == "account" - assert data["final_value"] == "my_account" - assert data["default_used"] is False - assert "cli_arguments" in data["sources_consulted"] - assert len(data["entries"]) == 1 - - entry_data = data["entries"][0] - assert entry_data["source"] == "cli_arguments" - assert entry_data["value"] == "my_account" - assert entry_data["priority"] == "CLI_ARGUMENT" - assert entry_data["was_used"] is True - - def test_to_dict_with_multiple_entries(self): - """Should convert complex history to dictionary.""" - entries = [ - ResolutionEntry( - config_value=ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - raw_value="file_account", - ), - timestamp=datetime.now(), - was_used=False, - overridden_by="cli_arguments", - ), - ResolutionEntry( - config_value=ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ), - ] - - history = ResolutionHistory( - key="account", - entries=entries, - final_value="cli_account", - ) - - data = history.to_dict() - - assert len(data["entries"]) == 2 - assert data["entries"][0]["overridden_by"] == "cli_arguments" - assert data["entries"][1]["was_used"] is True - - def test_resolution_history_is_mutable(self): - """ResolutionHistory should be mutable (not frozen).""" - history = ResolutionHistory(key="account") - - entry = ResolutionEntry( - config_value=ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ), - timestamp=datetime.now(), - was_used=True, - ) - - history.entries.append(entry) - history.final_value = "my_account" - - assert len(history.entries) == 1 - assert history.final_value == "my_account" - - def test_empty_history_properties(self): - """Empty history should return empty lists for properties.""" - history = ResolutionHistory(key="account") - - assert history.sources_consulted == [] - assert history.values_considered == [] - assert history.selected_entry is None - assert history.overridden_entries == [] diff --git a/tests/config_ng/test_resolution_history_tracker.py b/tests/config_ng/test_resolution_history_tracker.py deleted file mode 100644 index c7ca164de1..0000000000 --- a/tests/config_ng/test_resolution_history_tracker.py +++ /dev/null @@ -1,431 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for ResolutionHistoryTracker. - -Tests verify: -- Discovery recording -- Selection marking -- Default value tracking -- History retrieval -- Summary statistics -""" - -from datetime import datetime - -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority -from snowflake.cli.api.config_ng.resolver import ResolutionHistoryTracker - - -class TestResolutionHistoryTracker: - """Test suite for ResolutionHistoryTracker.""" - - def test_create_tracker(self): - """Should create empty tracker with tracking enabled.""" - tracker = ResolutionHistoryTracker() - - assert tracker.is_enabled() is True - assert len(tracker.get_all_histories()) == 0 - - def test_enable_disable_tracking(self): - """Should enable and disable tracking.""" - tracker = ResolutionHistoryTracker() - - tracker.disable() - assert tracker.is_enabled() is False - - tracker.enable() - assert tracker.is_enabled() is True - - def test_record_discovery(self): - """Should record value discoveries.""" - tracker = ResolutionHistoryTracker() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv) - - # Discovery recorded but history not finalized yet - assert len(tracker.get_all_histories()) == 0 - - def test_mark_selected_creates_history(self): - """Should create history when value is marked as selected.""" - tracker = ResolutionHistoryTracker() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv) - tracker.mark_selected("account", "cli_arguments") - - history = tracker.get_history("account") - assert history is not None - assert history.key == "account" - assert history.final_value == "my_account" - assert len(history.entries) == 1 - assert history.entries[0].was_used is True - - def test_multiple_discoveries_single_selection(self): - """Should track multiple discoveries with one selected.""" - tracker = ResolutionHistoryTracker() - - # Record discoveries from multiple sources - cv_file = ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ) - cv_env = ConfigValue( - key="account", - value="env_account", - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, - ) - cv_cli = ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv_file) - tracker.record_discovery("account", cv_env) - tracker.record_discovery("account", cv_cli) - - # Mark CLI as selected - tracker.mark_selected("account", "cli_arguments") - - history = tracker.get_history("account") - assert history is not None - assert len(history.entries) == 3 - assert history.final_value == "cli_account" - - # Check which was selected - selected = [e for e in history.entries if e.was_used] - assert len(selected) == 1 - assert selected[0].config_value.source_name == "cli_arguments" - - # Check overridden entries - overridden = [e for e in history.entries if not e.was_used] - assert len(overridden) == 2 - - def test_mark_default_used(self): - """Should mark when default value is used.""" - tracker = ResolutionHistoryTracker() - - tracker.mark_default_used("missing_key", "default_value") - - history = tracker.get_history("missing_key") - assert history is not None - assert history.default_used is True - assert history.final_value == "default_value" - assert len(history.entries) == 0 - - def test_mark_default_after_discoveries(self): - """Should update history when default is used after discoveries.""" - tracker = ResolutionHistoryTracker() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv) - tracker.mark_selected("account", "cli_arguments") - tracker.mark_default_used("account", "default_account") - - history = tracker.get_history("account") - assert history.default_used is True - assert history.final_value == "default_account" - - def test_get_history_nonexistent_key(self): - """Should return None for keys not tracked.""" - tracker = ResolutionHistoryTracker() - - history = tracker.get_history("nonexistent") - assert history is None - - def test_get_all_histories(self): - """Should return all tracked histories.""" - tracker = ResolutionHistoryTracker() - - cv1 = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - cv2 = ConfigValue( - key="user", - value="my_user", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv1) - tracker.mark_selected("account", "cli_arguments") - - tracker.record_discovery("user", cv2) - tracker.mark_selected("user", "cli_arguments") - - histories = tracker.get_all_histories() - assert len(histories) == 2 - assert "account" in histories - assert "user" in histories - - def test_clear_history(self): - """Should clear all recorded history.""" - tracker = ResolutionHistoryTracker() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv) - tracker.mark_selected("account", "cli_arguments") - - assert len(tracker.get_all_histories()) == 1 - - tracker.clear() - - assert len(tracker.get_all_histories()) == 0 - - def test_disabled_tracker_does_not_record(self): - """Should not record when tracking is disabled.""" - tracker = ResolutionHistoryTracker() - tracker.disable() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv) - tracker.mark_selected("account", "cli_arguments") - - assert len(tracker.get_all_histories()) == 0 - - def test_summary_with_no_histories(self): - """Should return empty summary when no histories exist.""" - tracker = ResolutionHistoryTracker() - - summary = tracker.get_summary() - - assert summary["total_keys_resolved"] == 0 - assert summary["keys_with_overrides"] == 0 - assert summary["keys_using_defaults"] == 0 - assert len(summary["source_usage"]) == 0 - assert len(summary["source_wins"]) == 0 - - def test_summary_with_single_source(self): - """Should calculate correct summary for single source.""" - tracker = ResolutionHistoryTracker() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv) - tracker.mark_selected("account", "cli_arguments") - - summary = tracker.get_summary() - - assert summary["total_keys_resolved"] == 1 - assert summary["keys_with_overrides"] == 0 - assert summary["source_usage"]["cli_arguments"] == 1 - assert summary["source_wins"]["cli_arguments"] == 1 - - def test_summary_with_multiple_sources(self): - """Should calculate correct summary with overrides.""" - tracker = ResolutionHistoryTracker() - - # File source provides account - cv_file = ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ) - # Env source overrides account - cv_env = ConfigValue( - key="account", - value="env_account", - source_name="snowflake_cli_env", - priority=SourcePriority.ENVIRONMENT, - ) - - tracker.record_discovery("account", cv_file) - tracker.record_discovery("account", cv_env) - tracker.mark_selected("account", "snowflake_cli_env") - - summary = tracker.get_summary() - - assert summary["total_keys_resolved"] == 1 - assert summary["keys_with_overrides"] == 1 - assert summary["source_usage"]["toml:connections"] == 1 - assert summary["source_usage"]["snowflake_cli_env"] == 1 - assert summary["source_wins"]["snowflake_cli_env"] == 1 - assert summary["source_wins"].get("toml:connections", 0) == 0 - - def test_summary_with_defaults(self): - """Should count keys using defaults.""" - tracker = ResolutionHistoryTracker() - - tracker.mark_default_used("missing_key", "default_value") - - summary = tracker.get_summary() - - assert summary["total_keys_resolved"] == 1 - assert summary["keys_using_defaults"] == 1 - - def test_entries_have_timestamps(self): - """Resolution entries should have timestamps.""" - tracker = ResolutionHistoryTracker() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - before = datetime.now() - tracker.record_discovery("account", cv) - tracker.mark_selected("account", "cli_arguments") - after = datetime.now() - - history = tracker.get_history("account") - entry_timestamp = history.entries[0].timestamp - - assert before <= entry_timestamp <= after - - def test_overridden_by_is_set_correctly(self): - """Should set overridden_by field correctly.""" - tracker = ResolutionHistoryTracker() - - cv_file = ConfigValue( - key="account", - value="file_account", - source_name="toml:connections", - priority=SourcePriority.FILE, - ) - cv_cli = ConfigValue( - key="account", - value="cli_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv_file) - tracker.record_discovery("account", cv_cli) - tracker.mark_selected("account", "cli_arguments") - - history = tracker.get_history("account") - - # File entry should be overridden by CLI - file_entry = [ - e - for e in history.entries - if e.config_value.source_name == "toml:connections" - ][0] - assert file_entry.was_used is False - assert file_entry.overridden_by == "cli_arguments" - - # CLI entry should be selected - cli_entry = [ - e for e in history.entries if e.config_value.source_name == "cli_arguments" - ][0] - assert cli_entry.was_used is True - assert cli_entry.overridden_by is None - - def test_get_all_histories_returns_copy(self): - """get_all_histories should return a copy.""" - tracker = ResolutionHistoryTracker() - - cv = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - tracker.record_discovery("account", cv) - tracker.mark_selected("account", "cli_arguments") - - histories1 = tracker.get_all_histories() - histories1.clear() - - histories2 = tracker.get_all_histories() - assert len(histories2) == 1 - - def test_multiple_keys_tracked_independently(self): - """Should track multiple keys independently.""" - tracker = ResolutionHistoryTracker() - - cv_account = ConfigValue( - key="account", - value="my_account", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - cv_user_file = ConfigValue( - key="user", - value="file_user", - source_name="toml:connections", - priority=SourcePriority.FILE, - ) - cv_user_cli = ConfigValue( - key="user", - value="cli_user", - source_name="cli_arguments", - priority=SourcePriority.CLI_ARGUMENT, - ) - - # Account from CLI only - tracker.record_discovery("account", cv_account) - tracker.mark_selected("account", "cli_arguments") - - # User from File and CLI - tracker.record_discovery("user", cv_user_file) - tracker.record_discovery("user", cv_user_cli) - tracker.mark_selected("user", "cli_arguments") - - # Check account history - account_history = tracker.get_history("account") - assert len(account_history.entries) == 1 - - # Check user history - user_history = tracker.get_history("user") - assert len(user_history.entries) == 2 diff --git a/tests/config_ng/test_resolver_integration.py b/tests/config_ng/test_resolver_integration.py deleted file mode 100644 index 7641e213c5..0000000000 --- a/tests/config_ng/test_resolver_integration.py +++ /dev/null @@ -1,377 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -End-to-end integration tests for ConfigurationResolver. - -Tests verify: -- Complete resolution workflow with all sources -- Real-world migration scenarios -- Complete precedence chains -- History tracking in production scenarios -""" - -from snowflake.cli.api.config_ng.env_handlers import ( - SnowCliEnvHandler, - SnowSqlEnvHandler, -) -from snowflake.cli.api.config_ng.file_handlers import ( - IniFileHandler, - TomlFileHandler, -) -from snowflake.cli.api.config_ng.resolver import ConfigurationResolver -from snowflake.cli.api.config_ng.sources import ( - CliArgumentSource, - EnvironmentSource, - FileSource, -) - - -class TestResolverEndToEnd: - """End-to-end integration tests for complete resolution workflow.""" - - def test_production_configuration_setup(self, tmp_path, monkeypatch): - """Test production-like configuration setup.""" - # Create SnowCLI TOML config - snowcli_config = tmp_path / "connections.toml" - snowcli_config.write_text( - "[default]\n" - 'account = "toml_account"\n' - 'user = "toml_user"\n' - 'database = "toml_db"\n' - ) - - # Set environment variables - monkeypatch.setenv("SNOWFLAKE_WAREHOUSE", "env_warehouse") - - # CLI arguments - cli_context = {"account": "cli_account"} - - # Create sources - cli_source = CliArgumentSource(cli_context=cli_context) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[snowcli_config], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - # Create resolver - resolver = ConfigurationResolver( - sources=[cli_source, env_source, file_source], track_history=True - ) - - # Resolve - config = resolver.resolve() - - # Verify precedence - assert config["account"] == "cli_account" # CLI wins - assert config["warehouse"] == "env_warehouse" # From env - assert config["user"] == "toml_user" # From file - assert config["database"] == "toml_db" # From file - - # Verify history - account_history = resolver.get_resolution_history("account") - assert len(account_history.entries) == 2 # TOML and CLI - assert ( - account_history.selected_entry.config_value.source_name == "cli_arguments" - ) - - def test_snowsql_to_snowcli_migration(self, tmp_path, monkeypatch): - """Test complete SnowSQL to SnowCLI migration scenario.""" - # SnowSQL config (legacy) - snowsql_config = tmp_path / "snowsql.toml" - snowsql_config.write_text( - "[connections]\n" - "accountname = old_account\n" - "username = old_user\n" - "databasename = old_db\n" - "warehousename = old_warehouse\n" - ) - - # SnowCLI config (new, partial migration) - snowcli_config = tmp_path / "connections.toml" - snowcli_config.write_text( - '[default]\naccount = "new_account"\nuser = "new_user"\n' - ) - - # Environment variables (mixed) - monkeypatch.setenv("SNOWSQL_PWD", "env_password") - monkeypatch.setenv("SNOWFLAKE_WAREHOUSE", "env_warehouse") - - # Create sources - env_source = EnvironmentSource( - handlers=[SnowCliEnvHandler(), SnowSqlEnvHandler()] - ) - file_source = FileSource( - file_paths=[snowcli_config, snowsql_config], - handlers=[ - TomlFileHandler(section_path=["default"]), - IniFileHandler(), - ], - ) - - resolver = ConfigurationResolver(sources=[env_source, file_source]) - - config = resolver.resolve() - - # New values should win - assert config["account"] == "new_account" # From SnowCLI TOML - assert config["user"] == "new_user" # From SnowCLI TOML - assert config["warehouse"] == "env_warehouse" # From SnowCLI env - assert ( - config["password"] == "env_password" - ) # From SnowSQL env (mapped from PWD) - - # Legacy values as fallback - assert config["database"] == "old_db" # From SnowSQL config - - def test_debugging_complete_workflow(self, tmp_path, monkeypatch): - """Test complete debugging workflow.""" - # Setup multi-source config - config_file = tmp_path / "config.toml" - config_file.write_text('[default]\naccount = "file_account"\n') - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - # Resolve - resolver.resolve() - - # Format resolution chain - formatted = resolver.format_resolution_chain("account") - - # Verify chain shows all sources - assert "file_account" in formatted - assert "env_account" in formatted - assert "cli_account" in formatted - assert "SELECTED" in formatted - - def test_history_export_complete(self, tmp_path, monkeypatch): - """Test complete history export for debugging.""" - config_file = tmp_path / "config.toml" - config_file.write_text( - "[default]\n" - 'account = "file_account"\n' - 'user = "file_user"\n' - 'database = "file_db"\n' - ) - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - monkeypatch.setenv("SNOWFLAKE_USER", "env_user") - - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - config = resolver.resolve() - - # Export history - export_file = tmp_path / "debug.json" - resolver.export_history(export_file) - - # Verify export contains all keys - import json - - with open(export_file) as f: - data = json.load(f) - - assert "account" in data["histories"] - assert "user" in data["histories"] - assert "database" in data["histories"] - - # Verify summary (may have more keys than expected from TOML) - assert data["summary"]["total_keys_resolved"] >= 3 - assert data["summary"]["keys_with_overrides"] >= 2 # account and user - - def test_cli_override_everything(self, tmp_path, monkeypatch): - """Test CLI arguments override all other sources.""" - # Setup all sources with same key - config_file = tmp_path / "config.toml" - config_file.write_text('[default]\naccount = "file_account"\n') - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - account = resolver.resolve_value("account") - - assert account == "cli_account" - - # Verify all sources were consulted - history = resolver.get_resolution_history("account") - assert len(history.entries) == 3 - assert len(history.overridden_entries) == 2 - - def test_layered_fallback(self, tmp_path, monkeypatch): - """Test layered fallback across multiple sources.""" - config_file = tmp_path / "config.toml" - config_file.write_text( - "[default]\n" - 'account = "file_account"\n' - 'user = "file_user"\n' - 'database = "file_db"\n' - 'warehouse = "file_warehouse"\n' - 'role = "file_role"\n' - ) - - # Env only provides some values - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - monkeypatch.setenv("SNOWFLAKE_USER", "env_user") - - # CLI only provides one value - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - config = resolver.resolve() - - # Verify layered fallback - assert config["account"] == "cli_account" # From CLI - assert config["user"] == "env_user" # From Env (CLI didn't have it) - assert config["database"] == "file_db" # From File (neither CLI nor Env had it) - assert config["warehouse"] == "file_warehouse" # From File - assert config["role"] == "file_role" # From File - - def test_summary_statistics_complete(self, tmp_path, monkeypatch): - """Test summary statistics for complete resolution.""" - config_file = tmp_path / "config.toml" - config_file.write_text( - "[default]\n" - 'account = "file_account"\n' - 'user = "file_user"\n' - 'database = "file_db"\n' - ) - - monkeypatch.setenv("SNOWFLAKE_ACCOUNT", "env_account") - monkeypatch.setenv("SNOWFLAKE_USER", "env_user") - - cli_source = CliArgumentSource(cli_context={"account": "cli_account"}) - env_source = EnvironmentSource(handlers=[SnowCliEnvHandler()]) - file_source = FileSource( - file_paths=[config_file], - handlers=[TomlFileHandler(section_path=["default"])], - ) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - config = resolver.resolve() - - summary = resolver.get_history_summary() - - # At least 3 keys - assert summary["total_keys_resolved"] >= 3 - - # account and user have overrides - assert summary["keys_with_overrides"] >= 2 - - # Source usage: File provided at least 3, Env provided at least 2, CLI provided 1 - assert summary["source_usage"]["toml:default"] >= 3 - assert summary["source_usage"]["snowflake_cli_env"] >= 2 - assert summary["source_usage"]["cli_arguments"] == 1 - - # Source wins: CLI won 1 (account), Env won at least 1 (user), File won at least 1 (database) - assert summary["source_wins"]["cli_arguments"] == 1 # account - assert ( - summary["source_wins"]["snowflake_cli_env"] >= 1 - ) # user and possibly others - assert ( - summary["source_wins"]["toml:default"] >= 1 - ) # database and possibly others - - def test_no_sources_with_default(self): - """Test resolver with no sources returns default.""" - resolver = ConfigurationResolver() - - value = resolver.resolve_value("missing", default="default_value") - - assert value == "default_value" - - # Verify default tracked in history - history = resolver.get_resolution_history("missing") - assert history.default_used is True - assert history.final_value == "default_value" - - def test_real_world_multiple_connections(self, tmp_path): - """Test real-world scenario with multiple connection configs.""" - # User has both SnowCLI and SnowSQL configs with different connections - snowcli_config = tmp_path / "connections.toml" - snowcli_config.write_text( - "[prod]\n" - 'account = "prod_account"\n' - 'user = "prod_user"\n' - "[dev]\n" - 'account = "dev_account"\n' - 'user = "dev_user"\n' - ) - - # Test resolving prod connection - file_source = FileSource( - file_paths=[snowcli_config], - handlers=[TomlFileHandler(section_path=["prod"])], - ) - - resolver_prod = ConfigurationResolver(sources=[file_source]) - prod_config = resolver_prod.resolve() - - assert prod_config["account"] == "prod_account" - assert prod_config["user"] == "prod_user" - - # Test resolving dev connection - file_source_dev = FileSource( - file_paths=[snowcli_config], - handlers=[TomlFileHandler(section_path=["dev"])], - ) - - resolver_dev = ConfigurationResolver(sources=[file_source_dev]) - dev_config = resolver_dev.resolve() - - assert dev_config["account"] == "dev_account" - assert dev_config["user"] == "dev_user" - - def test_empty_sources_empty_result(self): - """Test resolver with empty sources returns empty config.""" - cli_source = CliArgumentSource(cli_context={}) - env_source = EnvironmentSource(handlers=[]) - file_source = FileSource(file_paths=[], handlers=[]) - - resolver = ConfigurationResolver(sources=[cli_source, env_source, file_source]) - - config = resolver.resolve() - - assert config == {} diff --git a/tests/config_ng/test_snowcli_env_handler.py b/tests/config_ng/test_snowcli_env_handler.py deleted file mode 100644 index 23b1bf93eb..0000000000 --- a/tests/config_ng/test_snowcli_env_handler.py +++ /dev/null @@ -1,287 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for SnowCliEnvHandler. - -Tests verify: -- SNOWFLAKE_* environment variable discovery -- Value type parsing (string, int, bool) -- Case handling (env vars are uppercase, keys are lowercase) -- Raw value preservation -- Priority and metadata -""" - -import os -from unittest.mock import patch - -from snowflake.cli.api.config_ng.core import SourcePriority -from snowflake.cli.api.config_ng.env_handlers import SnowCliEnvHandler - - -class TestSnowCliEnvHandler: - """Test suite for SnowCliEnvHandler.""" - - def test_create_handler(self): - """Should create handler with correct properties.""" - handler = SnowCliEnvHandler() - - assert handler.source_name == "snowflake_cli_env" - assert handler.priority == SourcePriority.ENVIRONMENT - assert handler.handler_type == "snowflake_cli_env" - - def test_can_handle_with_no_env_vars(self): - """Should return False when no SNOWFLAKE_* vars are set.""" - with patch.dict(os.environ, {}, clear=True): - handler = SnowCliEnvHandler() - assert handler.can_handle() is False - - def test_can_handle_with_env_vars(self): - """Should return True when SNOWFLAKE_* vars are present.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "test_account"}): - handler = SnowCliEnvHandler() - assert handler.can_handle() is True - - def test_discover_single_string_value(self): - """Should discover single string value.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "my_account" - assert values["account"].key == "account" - - def test_discover_multiple_values(self): - """Should discover multiple environment variables.""" - env_vars = { - "SNOWFLAKE_ACCOUNT": "my_account", - "SNOWFLAKE_USER": "my_user", - "SNOWFLAKE_WAREHOUSE": "my_warehouse", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert len(values) == 3 - assert values["account"].value == "my_account" - assert values["user"].value == "my_user" - assert values["warehouse"].value == "my_warehouse" - - def test_discover_specific_key(self): - """Should discover specific key when provided.""" - env_vars = { - "SNOWFLAKE_ACCOUNT": "my_account", - "SNOWFLAKE_USER": "my_user", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover(key="account") - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "my_account" - - def test_discover_nonexistent_key(self): - """Should return empty dict for nonexistent key.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover(key="nonexistent") - - assert len(values) == 0 - - def test_case_conversion(self): - """Should convert UPPERCASE env var names to lowercase config keys.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "test"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert "account" in values # lowercase key - assert "ACCOUNT" not in values - - def test_parse_string_value(self): - """Should parse string values as-is.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert values["account"].value == "my_account" - assert isinstance(values["account"].value, str) - - def test_parse_integer_value(self): - """Should parse integer strings as integers.""" - with patch.dict(os.environ, {"SNOWFLAKE_PORT": "443"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert values["port"].value == 443 - assert isinstance(values["port"].value, int) - - def test_parse_boolean_true_values(self): - """Should parse various true representations as boolean True.""" - true_values = ["true", "True", "TRUE", "1", "yes", "Yes", "on", "On"] - - for true_val in true_values: - with patch.dict( - os.environ, {"SNOWFLAKE_ENABLE_DIAG": true_val}, clear=True - ): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert values["enable_diag"].value is True, f"Failed for {true_val}" - assert isinstance(values["enable_diag"].value, bool) - - def test_parse_boolean_false_values(self): - """Should parse various false representations as boolean False.""" - false_values = ["false", "False", "FALSE", "0", "no", "No", "off", "Off"] - - for false_val in false_values: - with patch.dict( - os.environ, {"SNOWFLAKE_ENABLE_DIAG": false_val}, clear=True - ): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert values["enable_diag"].value is False, f"Failed for {false_val}" - assert isinstance(values["enable_diag"].value, bool) - - def test_raw_value_preservation(self): - """Should preserve raw string value in raw_value field.""" - with patch.dict(os.environ, {"SNOWFLAKE_PORT": "443"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - config_value = values["port"] - assert config_value.value == 443 # Parsed as int - assert config_value.raw_value == "443" # Original string - - def test_values_have_correct_metadata(self): - """Discovered values should have correct metadata.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - config_value = values["account"] - assert config_value.source_name == "snowflake_cli_env" - assert config_value.priority == SourcePriority.ENVIRONMENT - assert config_value.key == "account" - - def test_supports_any_string_key(self): - """Should support any string key.""" - handler = SnowCliEnvHandler() - - assert handler.supports_key("account") is True - assert handler.supports_key("user") is True - assert handler.supports_key("any_key") is True - assert handler.supports_key("") is True - - def test_ignores_non_snowflake_env_vars(self): - """Should ignore environment variables without SNOWFLAKE_ prefix.""" - env_vars = { - "SNOWFLAKE_ACCOUNT": "snowflake_account", - "SNOWSQL_ACCOUNT": "snowsql_account", - "ACCOUNT": "plain_account", - "PATH": "/usr/bin", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - # Should only get SNOWFLAKE_* variables - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "snowflake_account" - - def test_empty_string_value(self): - """Should handle empty string values.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": ""}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert values["account"].value == "" - assert isinstance(values["account"].value, str) - - def test_special_characters_in_value(self): - """Should handle special characters in values.""" - with patch.dict( - os.environ, - {"SNOWFLAKE_PASSWORD": "p@ss!w0rd#123"}, - clear=True, - ): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert values["password"].value == "p@ss!w0rd#123" - - def test_whitespace_in_value(self): - """Should preserve whitespace in values.""" - with patch.dict( - os.environ, - {"SNOWFLAKE_DESCRIPTION": " spaced value "}, - clear=True, - ): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert values["description"].value == " spaced value " - - def test_numeric_string_not_parsed_as_int(self): - """Should handle strings that look numeric but shouldn't be parsed.""" - # Account identifier that looks like a number - with patch.dict(os.environ, {"SNOWFLAKE_SESSION_ID": "12345abc"}, clear=True): - handler = SnowCliEnvHandler() - values = handler.discover() - - # Should remain string because "abc" makes it non-numeric - assert values["session_id"].value == "12345abc" - assert isinstance(values["session_id"].value, str) - - def test_underscore_in_key_preserved(self): - """Should preserve underscores in environment variable keys.""" - with patch.dict( - os.environ, {"SNOWFLAKE_PRIVATE_KEY_PATH": "/path/to/key"}, clear=True - ): - handler = SnowCliEnvHandler() - values = handler.discover() - - assert "private_key_path" in values - assert values["private_key_path"].value == "/path/to/key" - - def test_multiple_discover_calls_consistent(self): - """Multiple discover calls should return consistent results.""" - with patch.dict(os.environ, {"SNOWFLAKE_ACCOUNT": "my_account"}, clear=True): - handler = SnowCliEnvHandler() - - values1 = handler.discover() - values2 = handler.discover() - - assert values1 == values2 - - def test_discover_with_mixed_case_produces_lowercase_keys(self): - """All config keys should be lowercase regardless of env var case.""" - with patch.dict( - os.environ, - { - "SNOWFLAKE_ACCOUNT": "test1", - "SNOWFLAKE_User": "test2", # Mixed case shouldn't happen, but test anyway - }, - clear=True, - ): - handler = SnowCliEnvHandler() - values = handler.discover() - - # All keys should be lowercase - for key in values.keys(): - assert key == key.lower() diff --git a/tests/config_ng/test_snowsql_config_handler.py b/tests/config_ng/test_snowsql_config_handler.py deleted file mode 100644 index cc5464d041..0000000000 --- a/tests/config_ng/test_snowsql_config_handler.py +++ /dev/null @@ -1,413 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for IniFileHandler. - -Tests verify: -- SnowSQL config file discovery -- Key mapping (accountname → account, username → user, etc.) -- Section navigation -- Migration support -- Raw value preservation showing original key names -""" - -from pathlib import Path -from tempfile import NamedTemporaryFile - -import pytest -from snowflake.cli.api.config_ng.core import SourcePriority -from snowflake.cli.api.config_ng.file_handlers import IniFileHandler - - -class TestIniFileHandler: - """Test suite for IniFileHandler.""" - - def test_create_handler(self): - """Should create handler with correct properties.""" - snowsql_config_handler = IniFileHandler() - - assert snowsql_config_handler.source_name == "snowsql_config" - assert snowsql_config_handler.priority == SourcePriority.FILE - assert snowsql_config_handler.handler_type == "ini" - - def test_default_section_path(self): - """Should default to connections section.""" - # Verify by testing that it can discover from [connections] section - from pathlib import Path - from tempfile import NamedTemporaryFile - - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\naccount = test\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - # Should find value in [connections] section - assert "account" in values - finally: - temp_path.unlink() - - def test_custom_section_path(self): - """Should allow custom section path.""" - from pathlib import Path - from tempfile import NamedTemporaryFile - - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\n\n[connections.prod]\naccount = prod_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler( - section_path=["connections", "prod"] - ) - values = snowsql_config_handler.discover_from_file(temp_path) - # Should find value in custom section path - assert values["account"].value == "prod_account" - finally: - temp_path.unlink() - - def test_can_handle_always_true(self): - """Should always return True.""" - snowsql_config_handler = IniFileHandler() - assert snowsql_config_handler.can_handle() is True - - def test_can_handle_snowsql_config_files(self): - """Should detect SnowSQL config files.""" - snowsql_config_handler = IniFileHandler() - - # Typical SnowSQL config path - assert snowsql_config_handler.can_handle_file(Path("~/.snowsql/config")) is True - assert ( - snowsql_config_handler.can_handle_file(Path("/home/user/.snowsql/config")) - is True - ) - - def test_can_handle_toml_files(self): - """Should also handle .toml files.""" - snowsql_config_handler = IniFileHandler() - - assert snowsql_config_handler.can_handle_file(Path("config.toml")) is True - - def test_discover_raises_not_implemented(self): - """Should raise NotImplementedError for discover() without file_path.""" - snowsql_config_handler = IniFileHandler() - - with pytest.raises(NotImplementedError, match="requires file_path"): - snowsql_config_handler.discover() - - def test_discover_from_nonexistent_file(self): - """Should return empty dict for nonexistent file.""" - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(Path("/nonexistent/config")) - - assert len(values) == 0 - - def test_key_mapping_accountname(self): - """Should map accountname → account.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\naccountname = my_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert len(values) == 1 - assert "account" in values - assert "accountname" not in values - assert values["account"].value == "my_account" - assert values["account"].raw_value == "accountname=my_account" - finally: - temp_path.unlink() - - def test_key_mapping_username(self): - """Should map username → user.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\nusername = my_user\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert values["user"].value == "my_user" - assert values["user"].raw_value == "username=my_user" - finally: - temp_path.unlink() - - def test_key_mapping_multiple_database_keys(self): - """Should map both dbname and databasename → database.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\ndatabasename = my_db\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert values["database"].value == "my_db" - finally: - temp_path.unlink() - - def test_key_mapping_warehouse_schema_role(self): - """Should map warehouse, schema, and role names.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write( - "[connections]\n" - "warehousename = my_wh\n" - "schemaname = my_schema\n" - "rolename = my_role\n" - ) - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert values["warehouse"].value == "my_wh" - assert values["schema"].value == "my_schema" - assert values["role"].value == "my_role" - finally: - temp_path.unlink() - - def test_key_mapping_pwd_to_password(self): - """Should map pwd → password (from env mappings).""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\npwd = secret123\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert "password" in values - assert "pwd" not in values - assert values["password"].value == "secret123" - finally: - temp_path.unlink() - - def test_unmapped_keys_passthrough(self): - """Keys without mappings should pass through.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\ncustom_key = custom_value\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert values["custom_key"].value == "custom_value" - finally: - temp_path.unlink() - - def test_discover_all_common_keys(self): - """Should discover all common SnowSQL keys with mapping.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write( - "[connections]\n" - "accountname = my_account\n" - "username = my_user\n" - "pwd = my_password\n" - "databasename = my_db\n" - "schemaname = my_schema\n" - "warehousename = my_wh\n" - "rolename = my_role\n" - ) - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert len(values) == 7 - assert all( - key in values - for key in [ - "account", - "user", - "password", - "database", - "schema", - "warehouse", - "role", - ] - ) - finally: - temp_path.unlink() - - def test_discover_specific_key(self): - """Should discover specific key with mapping.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\naccountname = my_account\nusername = my_user\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path, key="account") - - assert len(values) == 1 - assert "account" in values - assert "user" not in values - finally: - temp_path.unlink() - - def test_discover_nonexistent_key(self): - """Should return empty dict for nonexistent key.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\naccountname = my_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file( - temp_path, key="nonexistent" - ) - - assert len(values) == 0 - finally: - temp_path.unlink() - - def test_discover_nonexistent_section(self): - """Should return empty dict for nonexistent section.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("accountname = my_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() # Default section: connections - values = snowsql_config_handler.discover_from_file(temp_path) - - assert len(values) == 0 - finally: - temp_path.unlink() - - def test_values_have_correct_metadata(self): - """Discovered values should have correct metadata.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\naccountname = my_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - config_value = values["account"] - assert config_value.source_name == "snowsql_config" - assert config_value.priority == SourcePriority.FILE - assert config_value.key == "account" - assert config_value.value == "my_account" - # Raw value shows original SnowSQL key - assert config_value.raw_value == "accountname=my_account" - finally: - temp_path.unlink() - - def test_supports_any_string_key(self): - """Should support any string key.""" - snowsql_config_handler = IniFileHandler() - - assert snowsql_config_handler.supports_key("account") is True - assert snowsql_config_handler.supports_key("any_key") is True - - def test_reverse_mapping_for_specific_key_query(self): - """Should use reverse mapping when querying specific key.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\naccountname = my_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - # Query for "account" should find "accountname" - values = snowsql_config_handler.discover_from_file(temp_path, key="account") - - assert len(values) == 1 - assert values["account"].value == "my_account" - finally: - temp_path.unlink() - - def test_get_cli_key_method(self): - """Should convert SnowSQL keys to CLI keys.""" - snowsql_config_handler = IniFileHandler() - - assert snowsql_config_handler.get_cli_key("accountname") == "account" - assert snowsql_config_handler.get_cli_key("username") == "user" - assert snowsql_config_handler.get_cli_key("pwd") == "password" - assert snowsql_config_handler.get_cli_key("unmapped") == "unmapped" - - def test_case_insensitive_key_mapping(self): - """Key mappings should be case-insensitive.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\nAccountName = my_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - # Should still map to "account" - assert "account" in values - assert values["account"].value == "my_account" - finally: - temp_path.unlink() - - def test_invalid_ini_returns_empty(self): - """Should handle invalid INI gracefully.""" - with NamedTemporaryFile(mode="w", delete=False) as f: - f.write("invalid ini content [[[") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - values = snowsql_config_handler.discover_from_file(temp_path) - - assert len(values) == 0 - finally: - temp_path.unlink() - - def test_caching_behavior(self): - """Should cache file data for performance.""" - with NamedTemporaryFile(mode="w", suffix=".cnf", delete=False) as f: - f.write("[connections]\naccountname = my_account\n") - f.flush() - temp_path = Path(f.name) - - try: - snowsql_config_handler = IniFileHandler() - - # First call loads file - values1 = snowsql_config_handler.discover_from_file(temp_path) - # Second call uses cache - values2 = snowsql_config_handler.discover_from_file(temp_path) - - assert values1 == values2 - # Verify caching by checking results are consistent - finally: - temp_path.unlink() diff --git a/tests/config_ng/test_snowsql_config_paths.py b/tests/config_ng/test_snowsql_config_paths.py deleted file mode 100644 index 0bfd39cd5f..0000000000 --- a/tests/config_ng/test_snowsql_config_paths.py +++ /dev/null @@ -1,233 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for get_snowsql_config_paths() helper function. - -Tests verify: -- Returns paths in correct precedence order (highest to lowest) -- Order is reversed from SnowSQL's CNF_FILES to match FileSource's "first wins" logic -- Only returns paths that exist -- Handles RPM config precedence correctly -""" - -from pathlib import Path -from unittest.mock import patch - -from snowflake.cli.api.config_ng.file_handlers import get_snowsql_config_paths - - -class TestGetSnowSqlConfigPaths: - """Test suite for get_snowsql_config_paths() function.""" - - def test_returns_list_of_paths(self): - """Should return a list of Path objects.""" - paths = get_snowsql_config_paths() - - assert isinstance(paths, list) - assert all(isinstance(p, Path) for p in paths) - - def test_only_returns_existing_paths(self, tmp_path): - """Should only return paths that exist on the filesystem.""" - with patch("pathlib.Path.home", return_value=tmp_path): - # Create only one of the expected files - snowsql_dir = tmp_path / ".snowsql" - snowsql_dir.mkdir() - config_file = snowsql_dir / "config" - config_file.touch() - - paths = get_snowsql_config_paths() - - # Should only return the one file that exists - assert len(paths) == 1 - assert paths[0] == config_file - - def test_user_config_has_highest_priority(self, tmp_path): - """User config should come first in the list (highest priority).""" - with patch("pathlib.Path.home", return_value=tmp_path): - # Create user .snowsql directory config - snowsql_dir = tmp_path / ".snowsql" - snowsql_dir.mkdir() - user_config = snowsql_dir / "config" - user_config.touch() - - # Create legacy user config - legacy_config = tmp_path / ".snowsql.cnf" - legacy_config.touch() - - paths = get_snowsql_config_paths() - - # User .snowsql/config should come before .snowsql.cnf - assert len(paths) == 2 - assert paths[0] == user_config - assert paths[1] == legacy_config - - def test_rpm_config_has_highest_priority_if_exists(self, tmp_path): - """RPM config should be first if it exists (concept test).""" - # This test verifies the logic conceptually - # In reality, RPM config path is unlikely to exist in test environment - # The important part is that IF it exists, it gets inserted at position 0 - - with patch("pathlib.Path.home", return_value=tmp_path): - # Create user config - snowsql_dir = tmp_path / ".snowsql" - snowsql_dir.mkdir() - user_config = snowsql_dir / "config" - user_config.touch() - - paths = get_snowsql_config_paths() - - # User config should be first (RPM likely doesn't exist) - assert len(paths) >= 1 - assert paths[0] == user_config - - # Verify that the logic in get_snowsql_config_paths checks for RPM - # This is validated by code inspection - the function checks rpm_config.exists() - - def test_precedence_order_matches_snowsql_behavior(self, tmp_path): - """ - Test that the returned order matches SnowSQL's effective precedence. - - SnowSQL reads files where "last one wins", so: - - bundled config (read first, lowest priority) - - system configs - - user configs (read last, highest priority) - - FileSource uses "first one wins", so we reverse the order: - - user configs (first in list, highest priority) - - system configs - - bundled config (last in list, lowest priority) - """ - with patch("pathlib.Path.home", return_value=tmp_path): - # Create all user config files - snowsql_dir = tmp_path / ".snowsql" - snowsql_dir.mkdir() - user_snowsql_config = snowsql_dir / "config" - user_snowsql_config.touch() - - user_legacy_config = tmp_path / ".snowsql.cnf" - user_legacy_config.touch() - - paths = get_snowsql_config_paths() - - # Verify order: most specific (user) configs first - assert len(paths) == 2 - assert paths[0] == user_snowsql_config # Highest priority - assert paths[1] == user_legacy_config # Second priority - - def test_handles_missing_home_directory_gracefully(self): - """Should handle case where home directory doesn't exist.""" - with patch("pathlib.Path.home", return_value=Path("/nonexistent")): - paths = get_snowsql_config_paths() - - # Should return empty list or only system paths that exist - assert isinstance(paths, list) - - def test_returns_empty_list_when_no_configs_exist(self, tmp_path): - """Should return empty list if no config files exist.""" - with patch("pathlib.Path.home", return_value=tmp_path): - paths = get_snowsql_config_paths() - - assert paths == [] - - def test_system_configs_have_lower_priority_than_user(self, tmp_path): - """System configs should appear after user configs in the list.""" - # This test verifies the concept even if system paths don't exist in test env - with patch("pathlib.Path.home", return_value=tmp_path): - snowsql_dir = tmp_path / ".snowsql" - snowsql_dir.mkdir() - user_config = snowsql_dir / "config" - user_config.touch() - - paths = get_snowsql_config_paths() - - # User config should be first (if any paths are returned) - if len(paths) > 0: - assert paths[0] == user_config - - -class TestSnowSqlConfigPathsIntegration: - """Integration tests with FileSource and IniFileHandler.""" - - def test_paths_work_with_file_source(self, tmp_path): - """Paths should work correctly with FileSource.""" - from snowflake.cli.api.config_ng.file_handlers import IniFileHandler - from snowflake.cli.api.config_ng.sources import FileSource - - with patch("pathlib.Path.home", return_value=tmp_path): - # Create a user config file - snowsql_dir = tmp_path / ".snowsql" - snowsql_dir.mkdir() - user_config = snowsql_dir / "config" - user_config.write_text( - "[connections]\naccountname = user_account\nusername = user\n" - ) - - # Get paths using helper - paths = get_snowsql_config_paths() - - # Create FileSource with these paths - source = FileSource(file_paths=paths, handlers=[IniFileHandler()]) - - values = source.discover() - - # Should discover values from user config - assert values["account"].value == "user_account" - assert values["user"].value == "user" - - def test_file_precedence_with_multiple_configs(self, tmp_path): - """ - Test that file precedence matches SnowSQL behavior. - - In SnowSQL: later files override earlier ones - In FileSource: earlier files override later ones - With reversed order: same effective behavior - """ - from snowflake.cli.api.config_ng.file_handlers import IniFileHandler - from snowflake.cli.api.config_ng.sources import FileSource - - with patch("pathlib.Path.home", return_value=tmp_path): - # Create user .snowsql/config (should have highest priority) - snowsql_dir = tmp_path / ".snowsql" - snowsql_dir.mkdir() - user_config = snowsql_dir / "config" - user_config.write_text( - "[connections]\n" - "accountname = priority1_account\n" - "username = priority1_user\n" - ) - - # Create another config in snowsql dir (should have lower priority) - # Using .toml extension so handler can process it - legacy_config = snowsql_dir / "legacy.toml" - legacy_config.write_text( - "[connections]\n" - "accountname = priority2_account\n" - "username = priority2_user\n" - "databasename = priority2_db\n" - ) - - # Manually specify paths to test precedence - paths = [user_config, legacy_config] - - source = FileSource(file_paths=paths, handlers=[IniFileHandler()]) - - values = source.discover() - - # Values from user_config should win (it's first in the list) - assert values["account"].value == "priority1_account" - assert values["user"].value == "priority1_user" - - # Database only exists in legacy config, so it should be found - assert values["database"].value == "priority2_db" diff --git a/tests/config_ng/test_snowsql_env_handler.py b/tests/config_ng/test_snowsql_env_handler.py deleted file mode 100644 index e02312e665..0000000000 --- a/tests/config_ng/test_snowsql_env_handler.py +++ /dev/null @@ -1,309 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for SnowSqlEnvHandler. - -Tests verify: -- SNOWSQL_* environment variable discovery -- Key mapping (PWD → password) -- Value type parsing (string, int, bool) -- Case handling -- Raw value preservation -- Migration support -""" - -import os -from unittest.mock import patch - -from snowflake.cli.api.config_ng.core import SourcePriority -from snowflake.cli.api.config_ng.env_handlers import SnowSqlEnvHandler - - -class TestSnowSqlEnvHandler: - """Test suite for SnowSqlEnvHandler.""" - - def test_create_handler(self): - """Should create handler with correct properties.""" - handler = SnowSqlEnvHandler() - - assert handler.source_name == "snowsql_env" - assert handler.priority == SourcePriority.ENVIRONMENT - assert handler.handler_type == "snowsql_env" - - def test_can_handle_with_no_env_vars(self): - """Should return False when no SNOWSQL_* vars are set.""" - with patch.dict(os.environ, {}, clear=True): - handler = SnowSqlEnvHandler() - assert handler.can_handle() is False - - def test_can_handle_with_env_vars(self): - """Should return True when SNOWSQL_* vars are present.""" - with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "test_account"}): - handler = SnowSqlEnvHandler() - assert handler.can_handle() is True - - def test_discover_single_string_value(self): - """Should discover single string value.""" - with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "my_account" - - def test_key_mapping_pwd_to_password(self): - """Should map SNOWSQL_PWD to 'password' key.""" - with patch.dict(os.environ, {"SNOWSQL_PWD": "secret123"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert len(values) == 1 - assert "password" in values # Mapped key - assert "pwd" not in values # Original key should not appear - assert values["password"].value == "secret123" - - def test_discover_multiple_values_with_mapping(self): - """Should discover multiple values with key mapping applied.""" - env_vars = { - "SNOWSQL_ACCOUNT": "my_account", - "SNOWSQL_USER": "my_user", - "SNOWSQL_PWD": "my_password", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert len(values) == 3 - assert values["account"].value == "my_account" - assert values["user"].value == "my_user" - assert values["password"].value == "my_password" # Mapped from PWD - - def test_discover_specific_key_direct(self): - """Should discover specific key that doesn't require mapping.""" - env_vars = { - "SNOWSQL_ACCOUNT": "my_account", - "SNOWSQL_USER": "my_user", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover(key="account") - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "my_account" - - def test_discover_specific_key_with_mapping(self): - """Should discover specific key using reverse mapping.""" - with patch.dict(os.environ, {"SNOWSQL_PWD": "secret123"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover(key="password") - - assert len(values) == 1 - assert "password" in values - assert values["password"].value == "secret123" - - def test_discover_nonexistent_key(self): - """Should return empty dict for nonexistent key.""" - with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover(key="nonexistent") - - assert len(values) == 0 - - def test_case_conversion(self): - """Should convert UPPERCASE env var names to lowercase config keys.""" - with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "test"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert "account" in values # lowercase key - assert "ACCOUNT" not in values - - def test_parse_value_types_same_as_snowcli(self): - """Should parse values the same way as SnowCliEnvHandler.""" - env_vars = { - "SNOWSQL_ACCOUNT": "my_account", # String - "SNOWSQL_PORT": "443", # Integer - "SNOWSQL_ENABLE_DIAG": "true", # Boolean - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert values["account"].value == "my_account" - assert isinstance(values["account"].value, str) - - assert values["port"].value == 443 - assert isinstance(values["port"].value, int) - - assert values["enable_diag"].value is True - assert isinstance(values["enable_diag"].value, bool) - - def test_parse_boolean_values(self): - """Should parse various boolean representations.""" - for true_val in ["true", "TRUE", "1", "yes", "on"]: - with patch.dict(os.environ, {"SNOWSQL_ENABLE_DIAG": true_val}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - assert values["enable_diag"].value is True - - for false_val in ["false", "FALSE", "0", "no", "off"]: - with patch.dict(os.environ, {"SNOWSQL_ENABLE_DIAG": false_val}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - assert values["enable_diag"].value is False - - def test_raw_value_preservation(self): - """Should preserve raw string value in raw_value field.""" - with patch.dict(os.environ, {"SNOWSQL_PORT": "443"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - config_value = values["port"] - assert config_value.value == 443 # Parsed as int - assert config_value.raw_value == "443" # Original string - - def test_values_have_correct_metadata(self): - """Discovered values should have correct metadata.""" - with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - config_value = values["account"] - assert config_value.source_name == "snowsql_env" - assert config_value.priority == SourcePriority.ENVIRONMENT - assert config_value.key == "account" - - def test_supports_any_string_key(self): - """Should support any string key.""" - handler = SnowSqlEnvHandler() - - assert handler.supports_key("account") is True - assert handler.supports_key("password") is True - assert handler.supports_key("any_key") is True - - def test_ignores_non_snowsql_env_vars(self): - """Should ignore environment variables without SNOWSQL_ prefix.""" - env_vars = { - "SNOWSQL_ACCOUNT": "snowsql_account", - "SNOWFLAKE_ACCOUNT": "snowflake_account", - "ACCOUNT": "plain_account", - "PATH": "/usr/bin", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - # Should only get SNOWSQL_* variables - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "snowsql_account" - - def test_reverse_mapping_lookup(self): - """Should correctly perform reverse lookup for mapped keys.""" - handler = SnowSqlEnvHandler() - - # Test reverse mapping: password -> pwd - snowsql_key = handler.get_snowsql_key("password") - assert snowsql_key == "pwd" - - # Test non-mapped key returns itself - snowsql_key = handler.get_snowsql_key("account") - assert snowsql_key == "account" - - def test_migration_scenario_all_snowsql_vars(self): - """Simulates user with only SnowSQL environment variables.""" - env_vars = { - "SNOWSQL_ACCOUNT": "legacy_account", - "SNOWSQL_USER": "legacy_user", - "SNOWSQL_PWD": "legacy_password", - "SNOWSQL_WAREHOUSE": "legacy_warehouse", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert len(values) == 4 - assert values["account"].value == "legacy_account" - assert values["user"].value == "legacy_user" - assert values["password"].value == "legacy_password" - assert values["warehouse"].value == "legacy_warehouse" - - def test_common_snowsql_variables(self): - """Should handle common SnowSQL environment variables.""" - env_vars = { - "SNOWSQL_ACCOUNT": "my_account", - "SNOWSQL_USER": "my_user", - "SNOWSQL_PWD": "my_password", - "SNOWSQL_DATABASE": "my_database", - "SNOWSQL_SCHEMA": "my_schema", - "SNOWSQL_WAREHOUSE": "my_warehouse", - "SNOWSQL_ROLE": "my_role", - } - with patch.dict(os.environ, env_vars, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert len(values) == 7 - assert all( - key in values - for key in [ - "account", - "user", - "password", - "database", - "schema", - "warehouse", - "role", - ] - ) - - def test_empty_string_value(self): - """Should handle empty string values.""" - with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": ""}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert values["account"].value == "" - - def test_special_characters_in_value(self): - """Should handle special characters in values.""" - with patch.dict(os.environ, {"SNOWSQL_PWD": "p@ss!w0rd#123"}, clear=True): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert values["password"].value == "p@ss!w0rd#123" - - def test_underscore_in_key_preserved(self): - """Should preserve underscores in environment variable keys.""" - with patch.dict( - os.environ, {"SNOWSQL_PRIVATE_KEY_PATH": "/path/to/key"}, clear=True - ): - handler = SnowSqlEnvHandler() - values = handler.discover() - - assert "private_key_path" in values - assert values["private_key_path"].value == "/path/to/key" - - def test_multiple_discover_calls_consistent(self): - """Multiple discover calls should return consistent results.""" - with patch.dict(os.environ, {"SNOWSQL_ACCOUNT": "my_account"}, clear=True): - handler = SnowSqlEnvHandler() - - values1 = handler.discover() - values2 = handler.discover() - - assert values1 == values2 diff --git a/tests/config_ng/test_source_priority.py b/tests/config_ng/test_source_priority.py deleted file mode 100644 index fd86020c91..0000000000 --- a/tests/config_ng/test_source_priority.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for SourcePriority enum. - -Tests verify: -- Enum values are correctly defined -- Priority ordering is correct (lower value = higher priority) -""" - -from snowflake.cli.api.config_ng.core import SourcePriority - - -class TestSourcePriority: - """Test suite for SourcePriority enum.""" - - def test_cli_argument_has_highest_priority(self): - """CLI_ARGUMENT should have the lowest numeric value (highest priority).""" - assert SourcePriority.CLI_ARGUMENT.value == 1 - - def test_environment_has_medium_priority(self): - """ENVIRONMENT should have medium numeric value (medium priority).""" - assert SourcePriority.ENVIRONMENT.value == 2 - - def test_file_has_lowest_priority(self): - """FILE should have the highest numeric value (lowest priority).""" - assert SourcePriority.FILE.value == 3 - - def test_priority_ordering(self): - """Lower numeric value should mean higher priority.""" - assert SourcePriority.CLI_ARGUMENT.value < SourcePriority.ENVIRONMENT.value - assert SourcePriority.ENVIRONMENT.value < SourcePriority.FILE.value - - def test_enum_comparison(self): - """Enum members should be comparable by value.""" - priorities = [ - SourcePriority.FILE, - SourcePriority.CLI_ARGUMENT, - SourcePriority.ENVIRONMENT, - ] - sorted_priorities = sorted(priorities, key=lambda p: p.value) - - assert sorted_priorities[0] == SourcePriority.CLI_ARGUMENT - assert sorted_priorities[1] == SourcePriority.ENVIRONMENT - assert sorted_priorities[2] == SourcePriority.FILE diff --git a/tests/config_ng/test_toml_file_handler.py b/tests/config_ng/test_toml_file_handler.py deleted file mode 100644 index 2cc183d8f2..0000000000 --- a/tests/config_ng/test_toml_file_handler.py +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for TomlFileHandler. - -Tests verify: -- TOML file discovery -- Section navigation -- Caching behavior -- File format detection -- Value metadata -""" - -from pathlib import Path -from tempfile import NamedTemporaryFile - -import pytest -from snowflake.cli.api.config_ng.core import SourcePriority -from snowflake.cli.api.config_ng.file_handlers import TomlFileHandler - - -class TestTomlFileHandler: - """Test suite for TomlFileHandler.""" - - def test_create_handler(self): - """Should create handler with correct properties.""" - handler = TomlFileHandler() - - assert handler.source_name == "toml:root" - assert handler.priority == SourcePriority.FILE - assert handler.handler_type == "toml" - - def test_create_handler_with_section_path(self): - """Should create handler with section path.""" - handler = TomlFileHandler(section_path=["connections", "default"]) - - assert handler.source_name == "toml:connections.default" - assert handler.priority == SourcePriority.FILE - - def test_can_handle_always_true(self): - """Should always return True.""" - handler = TomlFileHandler() - assert handler.can_handle() is True - - def test_can_handle_toml_files(self): - """Should detect TOML files by extension.""" - handler = TomlFileHandler() - - assert handler.can_handle_file(Path("config.toml")) is True - assert handler.can_handle_file(Path("connections.toml")) is True - assert handler.can_handle_file(Path("file.tml")) is True - - def test_cannot_handle_non_toml_files(self): - """Should reject non-TOML files.""" - handler = TomlFileHandler() - - assert handler.can_handle_file(Path("config.json")) is False - assert handler.can_handle_file(Path("config.yaml")) is False - assert handler.can_handle_file(Path("config")) is False - - def test_discover_raises_not_implemented(self): - """Should raise NotImplementedError for discover() without file_path.""" - handler = TomlFileHandler() - - with pytest.raises(NotImplementedError, match="requires file_path"): - handler.discover() - - def test_discover_from_nonexistent_file(self): - """Should return empty dict for nonexistent file.""" - handler = TomlFileHandler() - values = handler.discover_from_file(Path("/nonexistent/file.toml")) - - assert len(values) == 0 - - def test_discover_from_simple_toml(self): - """Should discover values from simple TOML file.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[default]\naccount = "my_account"\nuser = "my_user"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler(section_path=["default"]) - values = handler.discover_from_file(temp_path) - - assert len(values) == 2 - assert values["account"].value == "my_account" - assert values["user"].value == "my_user" - finally: - temp_path.unlink() - - def test_discover_root_level(self): - """Should discover values at root level.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('account = "my_account"\nuser = "my_user"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler() # No section path - values = handler.discover_from_file(temp_path) - - assert len(values) == 2 - assert values["account"].value == "my_account" - finally: - temp_path.unlink() - - def test_discover_nested_section(self): - """Should navigate to nested sections.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[connections]\n[connections.default]\naccount = "test"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler(section_path=["connections", "default"]) - values = handler.discover_from_file(temp_path) - - assert len(values) == 1 - assert values["account"].value == "test" - finally: - temp_path.unlink() - - def test_discover_nonexistent_section(self): - """Should return empty dict for nonexistent section.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('account = "my_account"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler(section_path=["nonexistent"]) - values = handler.discover_from_file(temp_path) - - assert len(values) == 0 - finally: - temp_path.unlink() - - def test_discover_specific_key(self): - """Should discover only specific key.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('account = "my_account"\nuser = "my_user"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler() - values = handler.discover_from_file(temp_path, key="account") - - assert len(values) == 1 - assert "account" in values - assert "user" not in values - finally: - temp_path.unlink() - - def test_discover_nonexistent_key(self): - """Should return empty dict for nonexistent key.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('account = "my_account"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler() - values = handler.discover_from_file(temp_path, key="nonexistent") - - assert len(values) == 0 - finally: - temp_path.unlink() - - def test_values_have_correct_metadata(self): - """Discovered values should have correct metadata.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('[default]\naccount = "my_account"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler(section_path=["default"]) - values = handler.discover_from_file(temp_path) - - config_value = values["account"] - assert config_value.source_name == "toml:default" - assert config_value.priority == SourcePriority.FILE - assert config_value.key == "account" - assert config_value.value == "my_account" - assert config_value.raw_value == "my_account" - finally: - temp_path.unlink() - - def test_handles_various_value_types(self): - """Should handle different TOML value types.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write( - 'string_val = "text"\n' - "int_val = 42\n" - "bool_val = true\n" - 'list_val = ["a", "b"]\n' - ) - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler() - values = handler.discover_from_file(temp_path) - - assert values["string_val"].value == "text" - assert values["int_val"].value == 42 - assert values["bool_val"].value is True - assert values["list_val"].value == ["a", "b"] - finally: - temp_path.unlink() - - def test_caching_behavior(self): - """Should cache file data for performance.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('account = "my_account"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler() - - # First call loads file - values1 = handler.discover_from_file(temp_path) - # Second call uses cache - values2 = handler.discover_from_file(temp_path) - - assert values1 == values2 - # Verify caching by checking results are consistent - finally: - temp_path.unlink() - - def test_cache_invalidation_on_different_file(self): - """Should invalidate cache when file changes.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f1: - f1.write('account = "account1"\n') - f1.flush() - temp_path1 = Path(f1.name) - - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f2: - f2.write('account = "account2"\n') - f2.flush() - temp_path2 = Path(f2.name) - - try: - handler = TomlFileHandler() - - values1 = handler.discover_from_file(temp_path1) - values2 = handler.discover_from_file(temp_path2) - - assert values1["account"].value == "account1" - assert values2["account"].value == "account2" - finally: - temp_path1.unlink() - temp_path2.unlink() - - def test_supports_any_string_key(self): - """Should support any string key.""" - handler = TomlFileHandler() - - assert handler.supports_key("account") is True - assert handler.supports_key("any_key") is True - assert handler.supports_key("") is True - - def test_invalid_toml_returns_empty(self): - """Should handle invalid TOML gracefully.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write("invalid toml content [[[") - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler() - values = handler.discover_from_file(temp_path) - - assert len(values) == 0 - finally: - temp_path.unlink() - - def test_multiple_discover_calls_consistent(self): - """Multiple discover calls should return consistent results.""" - with NamedTemporaryFile(mode="w", suffix=".toml", delete=False) as f: - f.write('account = "my_account"\n') - f.flush() - temp_path = Path(f.name) - - try: - handler = TomlFileHandler() - - values1 = handler.discover_from_file(temp_path) - values2 = handler.discover_from_file(temp_path) - - assert values1 == values2 - finally: - temp_path.unlink() diff --git a/tests/config_ng/test_value_source.py b/tests/config_ng/test_value_source.py deleted file mode 100644 index 16e2f5c6ac..0000000000 --- a/tests/config_ng/test_value_source.py +++ /dev/null @@ -1,191 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Unit tests for ValueSource interface. - -Tests verify: -- Concrete implementations work correctly -- Common protocol is enforced -""" - -from snowflake.cli.api.config_ng.core import ConfigValue, SourcePriority, ValueSource - - -class TestValueSourceConcreteImplementation: - """Test a concrete implementation of ValueSource.""" - - class MockSource(ValueSource): - """Mock source for testing.""" - - def __init__(self, data: dict): - self._data = data - - @property - def source_name(self) -> str: - return "mock_source" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def discover(self, key=None): - if key is None: - return { - k: ConfigValue( - key=k, - value=v, - source_name=self.source_name, - priority=self.priority, - ) - for k, v in self._data.items() - } - elif key in self._data: - return { - key: ConfigValue( - key=key, - value=self._data[key], - source_name=self.source_name, - priority=self.priority, - ) - } - else: - return {} - - def supports_key(self, key: str) -> bool: - return key in self._data - - def test_discover_all_values(self): - """Should discover all values when key is None.""" - source = self.MockSource({"account": "test_account", "user": "test_user"}) - - values = source.discover() - - assert len(values) == 2 - assert "account" in values - assert "user" in values - assert values["account"].value == "test_account" - assert values["user"].value == "test_user" - - def test_discover_specific_key(self): - """Should discover specific key when provided.""" - source = self.MockSource({"account": "test_account", "user": "test_user"}) - - values = source.discover(key="account") - - assert len(values) == 1 - assert "account" in values - assert values["account"].value == "test_account" - - def test_discover_nonexistent_key(self): - """Should return empty dict for nonexistent key.""" - source = self.MockSource({"account": "test_account"}) - - values = source.discover(key="nonexistent") - - assert len(values) == 0 - - def test_supports_existing_key(self): - """Should return True for existing key.""" - source = self.MockSource({"account": "test_account"}) - - assert source.supports_key("account") is True - - def test_supports_nonexistent_key(self): - """Should return False for nonexistent key.""" - source = self.MockSource({"account": "test_account"}) - - assert source.supports_key("nonexistent") is False - - def test_source_name_is_accessible(self): - """Should be able to access source_name property.""" - source = self.MockSource({}) - - assert source.source_name == "mock_source" - - def test_priority_is_accessible(self): - """Should be able to access priority property.""" - source = self.MockSource({}) - - assert source.priority == SourcePriority.FILE - - def test_discovered_values_have_correct_metadata(self): - """Discovered values should have correct metadata.""" - source = self.MockSource({"account": "test_account"}) - - values = source.discover(key="account") - config_value = values["account"] - - assert config_value.source_name == "mock_source" - assert config_value.priority == SourcePriority.FILE - assert config_value.key == "account" - assert config_value.value == "test_account" - - def test_discover_returns_dict_of_config_values(self): - """discover() should return Dict[str, ConfigValue].""" - source = self.MockSource({"account": "test_account"}) - - values = source.discover() - - assert isinstance(values, dict) - for key, value in values.items(): - assert isinstance(key, str) - assert isinstance(value, ConfigValue) - - def test_empty_source_discover(self): - """Should handle empty source gracefully.""" - source = self.MockSource({}) - - values = source.discover() - - assert len(values) == 0 - assert isinstance(values, dict) - - def test_multiple_sources_with_different_priorities(self): - """Should be able to create sources with different priorities.""" - - class HighPrioritySource(ValueSource): - @property - def source_name(self) -> str: - return "high_priority" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.CLI_ARGUMENT - - def discover(self, key=None): - return {} - - def supports_key(self, key: str) -> bool: - return False - - class LowPrioritySource(ValueSource): - @property - def source_name(self) -> str: - return "low_priority" - - @property - def priority(self) -> SourcePriority: - return SourcePriority.FILE - - def discover(self, key=None): - return {} - - def supports_key(self, key: str) -> bool: - return False - - high = HighPrioritySource() - low = LowPrioritySource() - - assert high.priority.value < low.priority.value From 79b6c7f2d0fd1b24bf279e899dd54bb3ce09380d Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 08:54:58 +0200 Subject: [PATCH 18/78] SNOW-2306184: config refactor - simplified implementation --- src/snowflake/cli/api/config.py | 34 ++- src/snowflake/cli/api/config_ng/resolver.py | 32 ++- src/snowflake/cli/api/config_ng/sources.py | 252 +++++++++++--------- src/snowflake/cli/api/config_provider.py | 31 ++- src/snowflake/cli/api/connections.py | 21 +- tests/config_ng/conftest.py | 50 ++-- tests/config_ng/test_configuration.py | 28 ++- 7 files changed, 282 insertions(+), 166 deletions(-) diff --git a/src/snowflake/cli/api/config.py b/src/snowflake/cli/api/config.py index 5be189c2fb..4cb7f16a7b 100644 --- a/src/snowflake/cli/api/config.py +++ b/src/snowflake/cli/api/config.py @@ -308,19 +308,35 @@ def config_section_exists(*path) -> bool: def get_all_connections() -> dict[str, ConnectionConfig]: - return { - k: ConnectionConfig.from_dict(connection_dict) - for k, connection_dict in get_config_section("connections").items() - } + # Use config provider if available + try: + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + return provider.get_all_connections() + except Exception: + # Fall back to legacy implementation + return { + k: ConnectionConfig.from_dict(connection_dict) + for k, connection_dict in get_config_section("connections").items() + } def get_connection_dict(connection_name: str) -> dict: + # Use config provider if available try: - return get_config_section(CONNECTIONS_SECTION, connection_name) - except KeyError: - raise MissingConfigurationError( - f"Connection {connection_name} is not configured" - ) + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + return provider.get_connection_dict(connection_name) + except Exception: + # Fall back to legacy implementation + try: + return get_config_section(CONNECTIONS_SECTION, connection_name) + except KeyError: + raise MissingConfigurationError( + f"Connection {connection_name} is not configured" + ) def get_default_connection_name() -> str: diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index e71b59a795..09ffbc3f63 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -348,9 +348,11 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A Resolution Process: 1. Iterate sources in order (lowest to highest priority) 2. Record all discovered values in history - 3. Later sources overwrite earlier sources (simple dict update) - 4. Mark which value was selected - 5. Return final resolved values + 3. For connection keys (connections.{name}.{param}): + - Merge connection-by-connection: later sources extend/overwrite individual params + 4. For flat keys: later sources overwrite earlier sources + 5. Mark which value was selected + 6. Return final resolved values Args: key: Specific key to resolve (None = all keys) @@ -360,9 +362,10 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A Dictionary of resolved values (key -> value) """ all_values: Dict[str, ConfigValue] = {} + # Track connection values separately for intelligent merging + connections: Dict[str, Dict[str, ConfigValue]] = defaultdict(dict) # Process sources in order (first = lowest priority, last = highest) - # Later sources overwrite earlier ones via dict.update() for source in self._sources: try: source_values = source.discover(key) @@ -371,12 +374,29 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A for k, config_value in source_values.items(): self._history_tracker.record_discovery(k, config_value) - # Update current values (later source overwrites earlier) - all_values.update(source_values) + # Separate connection keys from flat keys + for k, config_value in source_values.items(): + if k.startswith("connections."): + # Parse: connections.{name}.{param} + parts = k.split(".", 2) + if len(parts) == 3: + conn_name = parts[1] + param = parts[2] + param_key = f"connections.{conn_name}.{param}" + + # Merge at parameter level: later source overwrites/extends + connections[conn_name][param_key] = config_value + else: + # Flat key: later source overwrites + all_values[k] = config_value except Exception as e: log.warning("Error from source %s: %s", source.source_name, e) + # Flatten connection data back into all_values + for conn_name, conn_params in connections.items(): + all_values.update(conn_params) + # Mark which values were selected in history for k, config_value in all_values.items(): self._history_tracker.mark_selected(k, config_value.source_name) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 38ce00abd5..769f589c3e 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -51,6 +51,7 @@ class SnowSQLConfigFile(ValueSource): Reads multiple config files in order and MERGES them (SnowSQL behavior). Later files override earlier files for the same keys. + Returns configuration for ALL connections. Config files searched (in order): 1. Bundled default config (if in package) @@ -61,14 +62,31 @@ class SnowSQLConfigFile(ValueSource): 6. ~/.snowsql/config (current user config) """ - def __init__(self, connection_name: str = "default"): - """ - Initialize SnowSQL config file source. + # SnowSQL uses different key names - map them to CLI standard names + KEY_MAPPING = { + "accountname": "account", + "username": "user", + "rolename": "role", + "warehousename": "warehouse", + "schemaname": "schema", + "dbname": "database", + "pwd": "password", + # Keys that don't need mapping (already correct) + "password": "password", + "database": "database", + "schema": "schema", + "role": "role", + "warehouse": "warehouse", + "host": "host", + "port": "port", + "protocol": "protocol", + "authenticator": "authenticator", + "private_key_path": "private_key_path", + "private_key_passphrase": "private_key_passphrase", + } - Args: - connection_name: Name of the connection to read from - """ - self._connection_name = connection_name + def __init__(self): + """Initialize SnowSQL config file source.""" self._config_files = [ Path("/etc/snowsql.cnf"), Path("/etc/snowflake/snowsql.cnf"), @@ -85,6 +103,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Read and MERGE all SnowSQL config files. Later files override earlier files (SnowSQL merging behavior). + Returns keys in format: connections.{name}.{param} for ALL connections. """ merged_values: Dict[str, ConfigValue] = {} @@ -96,25 +115,35 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: config = configparser.ConfigParser() config.read(config_file) - # Try connection-specific section first: [connections.prod] - section_name = f"connections.{self._connection_name}" - if config.has_section(section_name): - section_data = dict(config[section_name]) - # Fall back to default [connections] section - elif config.has_section("connections"): - section_data = dict(config["connections"]) - else: - continue - - # Merge values (later file wins for conflicts) - for k, v in section_data.items(): - if key is None or k == key: - merged_values[k] = ConfigValue( - key=k, - value=v, - source_name=self.source_name, - raw_value=v, - ) + # Process all connection sections + for section in config.sections(): + if section.startswith("connections"): + # Extract connection name + if section == "connections": + # This is default connection + connection_name = "default" + else: + # Format: connections.qa6 -> qa6 + connection_name = ( + section.split(".", 1)[1] + if "." in section + else "default" + ) + + section_data = dict(config[section]) + + # Add all params for this connection + for param_key, param_value in section_data.items(): + # Map SnowSQL key names to CLI standard names + normalized_key = self.KEY_MAPPING.get(param_key, param_key) + full_key = f"connections.{connection_name}.{normalized_key}" + if key is None or full_key == key: + merged_values[full_key] = ConfigValue( + key=full_key, + value=param_value, + source_name=self.source_name, + raw_value=f"{param_key}={param_value}", # Show original key in raw_value + ) except Exception as e: log.debug("Failed to read SnowSQL config %s: %s", config_file, e) @@ -131,20 +160,15 @@ class CliConfigFile(ValueSource): Scans for config.toml files in order and uses FIRST file found (CLI behavior). Does NOT merge multiple files - first found wins. + Returns configuration for ALL connections. Search order: 1. ./config.toml (current directory) 2. ~/.snowflake/config.toml (user config) """ - def __init__(self, connection_name: str = "default"): - """ - Initialize CLI config file source. - - Args: - connection_name: Name of the connection to read from - """ - self._connection_name = connection_name + def __init__(self): + """Initialize CLI config file source.""" self._search_paths = [ Path.cwd() / "config.toml", Path.home() / ".snowflake" / "config.toml", @@ -158,6 +182,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Find FIRST existing config file and use it (CLI behavior). Does NOT merge multiple files. + Returns keys in format: connections.{name}.{param} for ALL connections. """ for config_file in self._search_paths: if config_file.exists(): @@ -168,21 +193,28 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: def _parse_toml_file( self, file_path: Path, key: Optional[str] = None ) -> Dict[str, ConfigValue]: - """Parse TOML file and extract connection configuration.""" + """Parse TOML file and extract ALL connection configurations.""" try: with open(file_path, "rb") as f: data = tomllib.load(f) - # Navigate to connections. - conn_data = data.get("connections", {}).get(self._connection_name, {}) + result = {} - return { - k: ConfigValue( - key=k, value=v, source_name=self.source_name, raw_value=v - ) - for k, v in conn_data.items() - if key is None or k == key - } + # Get all connections + connections = data.get("connections", {}) + for conn_name, conn_data in connections.items(): + if isinstance(conn_data, dict): + for param_key, param_value in conn_data.items(): + full_key = f"connections.{conn_name}.{param_key}" + if key is None or full_key == key: + result[full_key] = ConfigValue( + key=full_key, + value=param_value, + source_name=self.source_name, + raw_value=param_value, + ) + + return result except Exception as e: log.debug("Failed to parse CLI config %s: %s", file_path, e) @@ -197,16 +229,11 @@ class ConnectionsConfigFile(ValueSource): Dedicated connections.toml file source. Reads ~/.snowflake/connections.toml specifically. + Returns configuration for ALL connections. """ - def __init__(self, connection_name: str = "default"): - """ - Initialize connections.toml source. - - Args: - connection_name: Name of the connection to read from - """ - self._connection_name = connection_name + def __init__(self): + """Initialize connections.toml source.""" self._file_path = Path.home() / ".snowflake" / "connections.toml" @property @@ -214,7 +241,10 @@ def source_name(self) -> str: return "connections_toml" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: - """Read connections.toml if it exists.""" + """ + Read connections.toml if it exists. + Returns keys in format: connections.{name}.{param} for ALL connections. + """ if not self._file_path.exists(): return {} @@ -222,15 +252,22 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: with open(self._file_path, "rb") as f: data = tomllib.load(f) - conn_data = data.get("connections", {}).get(self._connection_name, {}) + result = {} + connections = data.get("connections", {}) - return { - k: ConfigValue( - key=k, value=v, source_name=self.source_name, raw_value=v - ) - for k, v in conn_data.items() - if key is None or k == key - } + for conn_name, conn_data in connections.items(): + if isinstance(conn_data, dict): + for param_key, param_value in conn_data.items(): + full_key = f"connections.{conn_name}.{param_key}" + if key is None or full_key == key: + result[full_key] = ConfigValue( + key=full_key, + value=param_value, + source_name=self.source_name, + raw_value=param_value, + ) + + return result except Exception as e: log.debug("Failed to read connections.toml: %s", e) @@ -347,73 +384,66 @@ class CliEnvironment(ValueSource): "authenticator", ] - def __init__(self, connection_name: Optional[str] = None): - """ - Initialize CLI environment source. - - Args: - connection_name: Optional connection name for connection-specific vars - """ - self._connection_name = connection_name - @property def source_name(self) -> str: - if self._connection_name: - return f"cli_env:{self._connection_name}" return "cli_env" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Discover SNOWFLAKE_* environment variables. + Returns both general (flat) and connection-specific (prefixed) keys. - Supports two patterns: - 1. SNOWFLAKE_ACCOUNT (general) - 2. SNOWFLAKE_CONNECTION__ACCOUNT (connection-specific, higher priority) + Patterns: + 1. SNOWFLAKE_ACCOUNT=x -> account=x (flat key) + 2. SNOWFLAKE_CONNECTION_PROD_ACCOUNT=y -> connections.prod.account=y """ values: Dict[str, ConfigValue] = {} - # Pattern 1: General SNOWFLAKE_* variables - for config_key in self.CONFIG_KEYS: - if key is not None and config_key != key: + # Scan all environment variables + for env_name, env_value in os.environ.items(): + if not env_name.startswith("SNOWFLAKE_"): continue - env_var = f"SNOWFLAKE_{config_key.upper()}" - env_value = os.getenv(env_var) - - if env_value is not None: - values[config_key] = ConfigValue( - key=config_key, - value=env_value, - source_name=self.source_name, - raw_value=env_value, - ) - - # Pattern 2: Connection-specific SNOWFLAKE_CONNECTION__* variables - # These override general variables - if self._connection_name: - conn_prefix = f"SNOWFLAKE_CONNECTION_{self._connection_name.upper()}_" - - for config_key in self.CONFIG_KEYS: - if key is not None and config_key != key: - continue - - env_var = f"{conn_prefix}{config_key.upper()}" - env_value = os.getenv(env_var) - - if env_value is not None: - # Override general variable - values[config_key] = ConfigValue( - key=config_key, - value=env_value, - source_name=self.source_name, - raw_value=env_value, - ) + # Check for connection-specific pattern: SNOWFLAKE_CONNECTION__ + if env_name.startswith("SNOWFLAKE_CONNECTION_"): + # Extract connection name and config key + remainder = env_name[len("SNOWFLAKE_CONNECTION_") :] + parts = remainder.split("_", 1) + if len(parts) == 2: + conn_name_upper, config_key_upper = parts + conn_name = conn_name_upper.lower() + config_key = config_key_upper.lower() + + if config_key in self.CONFIG_KEYS: + full_key = f"connections.{conn_name}.{config_key}" + if key is None or full_key == key: + values[full_key] = ConfigValue( + key=full_key, + value=env_value, + source_name=self.source_name, + raw_value=f"{env_name}={env_value}", + ) + + # Check for general pattern: SNOWFLAKE_ + else: + config_key_upper = env_name[len("SNOWFLAKE_") :] + config_key = config_key_upper.lower() + + if config_key in self.CONFIG_KEYS: + if key is None or config_key == key: + values[config_key] = ConfigValue( + key=config_key, + value=env_value, + source_name=self.source_name, + raw_value=f"{env_name}={env_value}", + ) return values def supports_key(self, key: str) -> bool: - if key not in self.CONFIG_KEYS: - return False + discovered = self.discover() + if key in discovered: + return True # Check general var if os.getenv(f"SNOWFLAKE_{key.upper()}") is not None: diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index fc71e414c8..6b4102a52e 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -154,30 +154,28 @@ def _ensure_initialized(self) -> None: SnowSQLEnvironment, ) - # Get CLI context and connection name safely + # Get CLI context safely try: cli_context = get_cli_context().connection_context cli_context_dict = cli_context.present_values_as_dict() - connection_name = cli_context_dict.get("connection", "default") except Exception: cli_context_dict = {} - connection_name = "default" # Create sources in precedence order (lowest to highest priority) - # Order: SnowSQL config -> CLI config -> connections.toml -> - # SnowSQL env -> CLI env -> CLI arguments + # File sources return keys: connections.{name}.{param} + # Env/CLI sources return flat keys: account, user, etc. sources = [ # 1. SnowSQL config files (lowest priority, merged) - SnowSQLConfigFile(connection_name=connection_name), + SnowSQLConfigFile(), # 2. CLI config.toml (first-found behavior) - CliConfigFile(connection_name=connection_name), + CliConfigFile(), # 3. Dedicated connections.toml - ConnectionsConfigFile(connection_name=connection_name), + ConnectionsConfigFile(), # 4. SnowSQL environment variables (SNOWSQL_*) SnowSQLEnvironment(), - # 5. CLI environment variables (SNOWFLAKE_* and SNOWFLAKE_CONNECTION_*) - CliEnvironment(connection_name=connection_name), + # 5. CLI environment variables (SNOWFLAKE_*) + CliEnvironment(), # 6. CLI command-line arguments (highest priority) CliParameters(cli_context=cli_context_dict), ] @@ -329,6 +327,10 @@ def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: """ Get connection configuration by name. + Merges two types of keys: + 1. Connection-specific: connections.{name}.{param} (from files) + 2. Flat keys: {param} (from env/CLI, applies to active connection) + Args: connection_name: Name of the connection @@ -341,16 +343,21 @@ def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: assert self._resolver is not None self._config_cache = self._resolver.resolve() - # Look for keys like "connections.{connection_name}.{param}" - connection_prefix = f"connections.{connection_name}." connection_dict: Dict[str, Any] = {} + # First, get connection-specific keys (from file sources) + connection_prefix = f"connections.{connection_name}." for key, value in self._config_cache.items(): if key.startswith(connection_prefix): # Extract parameter name param_name = key[len(connection_prefix) :] connection_dict[param_name] = value + # Then, overlay flat keys (from env/CLI sources) - these have higher priority + for key, value in self._config_cache.items(): + if "." not in key: # Flat key like "account", "user" + connection_dict[key] = value + if not connection_dict: from snowflake.cli.api.exceptions import MissingConfigurationError diff --git a/src/snowflake/cli/api/connections.py b/src/snowflake/cli/api/connections.py index 671c22e55d..745dc21d94 100644 --- a/src/snowflake/cli/api/connections.py +++ b/src/snowflake/cli/api/connections.py @@ -137,10 +137,16 @@ def validate_schema(self, value: Optional[str]): def validate_and_complete(self): """ Ensure we can create a connection from this context. + Loads connection parameters from config if not already set. """ if not self.temporary_connection and not self.connection_name: self.connection_name = get_default_connection_name() + # Load connection parameters from config if we have a connection_name + # and haven't loaded them yet (e.g., user is still None) + if self.connection_name and not self.user: + self.update_from_config() + def build_connection(self): from snowflake.cli._app.snow_connector import connect_to_snowflake @@ -153,7 +159,20 @@ def build_connection(self): module="snowflake.connector.config_manager", ) - return connect_to_snowflake(**self.present_values_as_dict()) + # Get connection parameters but exclude connection_name + # The Snowflake connector validates connection_name against its own + # config manager (only reads config.toml), which doesn't know about + # connections from SnowSQL config files. We handle connection resolution + # ourselves, so don't let the connector validate it. + conn_params = self.present_values_as_dict() + conn_params.pop("connection_name", None) + + # If we removed connection_name, mark as temporary_connection + # so the connector doesn't require it + if "connection_name" not in conn_params: + conn_params["temporary_connection"] = True + + return connect_to_snowflake(**conn_params) class OpenConnectionCache: diff --git a/tests/config_ng/conftest.py b/tests/config_ng/conftest.py index c45a8685a6..f17df1a9ef 100644 --- a/tests/config_ng/conftest.py +++ b/tests/config_ng/conftest.py @@ -334,43 +334,37 @@ def get_resolver(self) -> "ConfigurationResolver": if self.snowsql_config_path and self.snowsql_config_path.exists(): # Create a custom SnowSQL source that reads from our test path class TestSnowSQLConfig(SnowSQLConfigFile): - def __init__(self, config_path: Path, conn_name: str): - super().__init__(connection_name=conn_name) + def __init__(self, config_path: Path): + super().__init__() self._config_files = [config_path] - sources_list.append( - TestSnowSQLConfig(self.snowsql_config_path, self.connection_name) - ) + sources_list.append(TestSnowSQLConfig(self.snowsql_config_path)) # 2. CLI config.toml - if configured if self.cli_config_path and self.cli_config_path.exists(): # Create a custom CLI config source that reads from our test path class TestCliConfig(CliConfigFile): - def __init__(self, config_path: Path, conn_name: str): - super().__init__(connection_name=conn_name) + def __init__(self, config_path: Path): + super().__init__() self._search_paths = [config_path] - sources_list.append( - TestCliConfig(self.cli_config_path, self.connection_name) - ) + sources_list.append(TestCliConfig(self.cli_config_path)) # 3. Connections.toml - if configured if self.connections_toml_path and self.connections_toml_path.exists(): # Create a custom connections source that reads from our test path class TestConnectionsConfig(ConnectionsConfigFile): - def __init__(self, config_path: Path, conn_name: str): - super().__init__(connection_name=conn_name) + def __init__(self, config_path: Path): + super().__init__() self._file_path = config_path - sources_list.append( - TestConnectionsConfig(self.connections_toml_path, self.connection_name) - ) + sources_list.append(TestConnectionsConfig(self.connections_toml_path)) # 4. SnowSQL environment variables sources_list.append(SnowSQLEnvironment()) # 5. CLI environment variables - sources_list.append(CliEnvironment(connection_name=self.connection_name)) + sources_list.append(CliEnvironment()) # 6. CLI arguments (highest priority) - if configured if self.cli_args_dict: @@ -382,11 +376,31 @@ def get_merged_config(self) -> Dict[str, Any]: """ Get the merged configuration from all sources. + Extracts connection-specific values for the configured connection. + Returns: - Dictionary with resolved configuration values + Dictionary with resolved configuration values (flat keys) """ resolver = self.get_resolver() - return resolver.resolve() + all_config = resolver.resolve() + + # Extract connection-specific values similar to _get_connection_dict_internal + connection_dict: Dict[str, Any] = {} + + # First, get connection-specific keys (from file sources) + connection_prefix = f"connections.{self.connection_name}." + for key, value in all_config.items(): + if key.startswith(connection_prefix): + # Extract parameter name + param_name = key[len(connection_prefix) :] + connection_dict[param_name] = value + + # Then, overlay flat keys (from env/CLI sources) - these have higher priority + for key, value in all_config.items(): + if "." not in key: # Flat key like "account", "user" + connection_dict[key] = value + + return connection_dict @contextmanager diff --git a/tests/config_ng/test_configuration.py b/tests/config_ng/test_configuration.py index 87fcbbffbd..63f0e6ac3c 100644 --- a/tests/config_ng/test_configuration.py +++ b/tests/config_ng/test_configuration.py @@ -206,17 +206,27 @@ def test_resolution_history_tracking(self): resolver = ctx.get_resolver() config = resolver.resolve() - # Check that account was overridden + # Check that account was overridden (flat key from CLI) assert config["account"] == "cli-account" - # Check resolution history - history = resolver.get_resolution_history("account") - assert history is not None - assert len(history.entries) >= 2 # At least config file and CLI param - - # The selected entry should be from CLI - assert history.selected_entry - assert history.selected_entry.config_value.source_name == "cli_arguments" + # Also check that connection-specific key exists (from file sources) + assert config.get("connections.a.account") == "account-a" + + # Check resolution history for flat key (from CLI params) + cli_history = resolver.get_resolution_history("account") + assert cli_history is not None + assert ( + len(cli_history.entries) == 1 + ) # Only CLI param provides flat "account" + assert cli_history.selected_entry + assert ( + cli_history.selected_entry.config_value.source_name == "cli_arguments" + ) + + # Check resolution history for prefixed key (from file sources) + file_history = resolver.get_resolution_history("connections.a.account") + assert file_history is not None + assert len(file_history.entries) >= 1 # Config files provide prefixed key def test_resolution_summary(self): """Test that resolution summary provides useful statistics.""" From a0290b1926c3976c940b9a2b613cb22a15b3cc31 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 11:16:28 +0200 Subject: [PATCH 19/78] SNOW-2306184: config refactor - gh workflows --- .github/workflows/test.yaml | 26 +++++++++++++++++++++++++ .github/workflows/test_integration.yaml | 20 +++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b2495b7336..40ea7cea64 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -43,3 +43,29 @@ jobs: - name: Test with hatch run: hatch run test-cov - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 + + tests-ng: + needs: define-matrix + strategy: + fail-fast: true + matrix: + os: ${{ fromJSON(needs.define-matrix.outputs.os) }} + python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} + runs-on: ${{ matrix.os }} + env: + SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install hatch + run: | + pip install -U click==8.2.1 hatch + hatch env create default + - name: Test with hatch + run: hatch run test-cov + - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 diff --git a/.github/workflows/test_integration.yaml b/.github/workflows/test_integration.yaml index 0ef3b78e84..2d5d9736d8 100644 --- a/.github/workflows/test_integration.yaml +++ b/.github/workflows/test_integration.yaml @@ -46,6 +46,26 @@ jobs: hatch-run: integration:test secrets: inherit + integration-trusted-ng: + needs: define-matrix + strategy: + fail-fast: false + matrix: + os: ${{ fromJSON(needs.define-matrix.outputs.os) }} + python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} + if: | + (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || + (github.event_name == 'push' && github.ref == 'refs/heads/main') + uses: ./.github/workflows/test_trusted.yaml + with: + runs-on: ${{ matrix.os }} + python-version: ${{ matrix.python-version }} + python-env: integration + hatch-run: integration:test + secrets: inherit + env: + SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 + # Repo owner has commented /ok-to-test on a (fork-based) pull request integration-fork: needs: define-matrix From 8242756f4a627d5f25767fc282bc67f478d2d923 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 11:16:59 +0200 Subject: [PATCH 20/78] SNOW-2306184: config refactor - cli env update --- src/snowflake/cli/api/config_ng/sources.py | 51 ++++++++++++++++++++-- 1 file changed, 48 insertions(+), 3 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 769f589c3e..305da90d8d 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -355,15 +355,17 @@ class CliEnvironment(ValueSource): """ CLI environment variables source. - Discovers SNOWFLAKE_* environment variables with two patterns: + Discovers SNOWFLAKE_* environment variables with three patterns: 1. General: SNOWFLAKE_ACCOUNT (applies to all connections) 2. Connection-specific: SNOWFLAKE_CONNECTION__ACCOUNT (overrides general) + 3. Legacy connection-specific: SNOWFLAKE_CONNECTIONS__ACCOUNT (backward compatibility) Connection-specific variables take precedence within this source. Examples: SNOWFLAKE_ACCOUNT -> account (general) SNOWFLAKE_CONNECTION_PROD_ACCOUNT -> account (for "prod" connection) + SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT -> account (for "integration" connection, legacy) SNOWFLAKE_USER -> user SNOWFLAKE_CONNECTION_DEV_USER -> user (for "dev" connection) """ @@ -382,6 +384,21 @@ class CliEnvironment(ValueSource): "port", "region", "authenticator", + "workload_identity_provider", + "private_key_file", + "private_key_path", # Used by integration tests + "private_key_raw", # Used by integration tests + "token_file_path", + "oauth_client_id", + "oauth_client_secret", + "oauth_authorization_url", + "oauth_token_request_url", + "oauth_redirect_uri", + "oauth_scope", + "oatuh_enable_pkce", + "oauth_enable_refresh_tokens", + "oauth_enable_single_use_refresh_tokens", + "client_store_temporary_credential", ] @property @@ -396,6 +413,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: Patterns: 1. SNOWFLAKE_ACCOUNT=x -> account=x (flat key) 2. SNOWFLAKE_CONNECTION_PROD_ACCOUNT=y -> connections.prod.account=y + 3. SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT=z -> connections.integration.account=z (legacy) """ values: Dict[str, ConfigValue] = {} @@ -424,6 +442,26 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: raw_value=f"{env_name}={env_value}", ) + # Check for legacy connection-specific pattern: SNOWFLAKE_CONNECTIONS__ + elif env_name.startswith("SNOWFLAKE_CONNECTIONS_"): + # Extract connection name and config key + remainder = env_name[len("SNOWFLAKE_CONNECTIONS_") :] + parts = remainder.split("_", 1) + if len(parts) == 2: + conn_name_upper, config_key_upper = parts + conn_name = conn_name_upper.lower() + config_key = config_key_upper.lower() + + if config_key in self.CONFIG_KEYS: + full_key = f"connections.{conn_name}.{config_key}" + if key is None or full_key == key: + values[full_key] = ConfigValue( + key=full_key, + value=env_value, + source_name=self.source_name, + raw_value=f"{env_name}={env_value}", + ) + # Check for general pattern: SNOWFLAKE_ else: config_key_upper = env_name[len("SNOWFLAKE_") :] @@ -449,14 +487,21 @@ def supports_key(self, key: str) -> bool: if os.getenv(f"SNOWFLAKE_{key.upper()}") is not None: return True - # Check connection-specific var - if self._connection_name: + # Check connection-specific var (new pattern) + if hasattr(self, "_connection_name") and self._connection_name: conn_var = ( f"SNOWFLAKE_CONNECTION_{self._connection_name.upper()}_{key.upper()}" ) if os.getenv(conn_var) is not None: return True + # Check legacy connection-specific var (legacy pattern) + legacy_conn_var = ( + f"SNOWFLAKE_CONNECTIONS_{self._connection_name.upper()}_{key.upper()}" + ) + if os.getenv(legacy_conn_var) is not None: + return True + return False From 2ab550289eb76c32bee93a643bbc17dcd302ae90 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 13:54:45 +0200 Subject: [PATCH 21/78] SNOW-2306184: config refactor - snowsql env support --- src/snowflake/cli/api/config.py | 6 ++- src/snowflake/cli/api/config_ng/sources.py | 6 ++- src/snowflake/cli/api/config_provider.py | 53 +++++++++++++++++++++- src/snowflake/cli/api/connections.py | 1 + 4 files changed, 62 insertions(+), 4 deletions(-) diff --git a/src/snowflake/cli/api/config.py b/src/snowflake/cli/api/config.py index 4cb7f16a7b..af676c5dd3 100644 --- a/src/snowflake/cli/api/config.py +++ b/src/snowflake/cli/api/config.py @@ -99,6 +99,10 @@ class ConnectionConfig: authenticator: Optional[str] = None workload_identity_provider: Optional[str] = None private_key_file: Optional[str] = None + private_key_passphrase: Optional[str] = field(default=None, repr=False) + token: Optional[str] = field(default=None, repr=False) + session_token: Optional[str] = field(default=None, repr=False) + master_token: Optional[str] = field(default=None, repr=False) token_file_path: Optional[str] = None oauth_client_id: Optional[str] = None oauth_client_secret: Optional[str] = None @@ -106,7 +110,7 @@ class ConnectionConfig: oauth_token_request_url: Optional[str] = None oauth_redirect_uri: Optional[str] = None oauth_scope: Optional[str] = None - oatuh_enable_pkce: Optional[bool] = None + oauth_enable_pkce: Optional[bool] = None oauth_enable_refresh_tokens: Optional[bool] = None oauth_enable_single_use_refresh_tokens: Optional[bool] = None client_store_temporary_credential: Optional[bool] = None diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 305da90d8d..ce9567e91c 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -388,6 +388,10 @@ class CliEnvironment(ValueSource): "private_key_file", "private_key_path", # Used by integration tests "private_key_raw", # Used by integration tests + "private_key_passphrase", # Private key passphrase for encrypted keys + "token", # OAuth token + "session_token", # Session token for session-based authentication + "master_token", # Master token for advanced authentication "token_file_path", "oauth_client_id", "oauth_client_secret", @@ -395,7 +399,7 @@ class CliEnvironment(ValueSource): "oauth_token_request_url", "oauth_redirect_uri", "oauth_scope", - "oatuh_enable_pkce", + "oauth_enable_pkce", # Fixed typo: was "oatuh_enable_pkce" "oauth_enable_refresh_tokens", "oauth_enable_single_use_refresh_tokens", "client_store_temporary_credential", diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 6b4102a52e..eedc57a602 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -70,6 +70,53 @@ def get_all_connections(self) -> dict: """Get all connection configurations.""" ... + def _transform_private_key_raw(self, connection_dict: dict) -> dict: + """ + Transform private_key_raw to private_key_file for ConnectionContext compatibility. + + The ConnectionContext dataclass doesn't have a private_key_raw field, so it gets + filtered out by merge_with_config. To work around this, we write private_key_raw + content to a temporary file and return it as private_key_file. + + Args: + connection_dict: Connection configuration dictionary + + Returns: + Modified connection dictionary with private_key_raw transformed to private_key_file + """ + if "private_key_raw" not in connection_dict: + return connection_dict + + # Don't transform if private_key_file is already set + if "private_key_file" in connection_dict: + return connection_dict + + import os + import tempfile + + try: + # Create a temporary file with the private key content + with tempfile.NamedTemporaryFile( + mode="w", suffix=".pem", delete=False + ) as f: + f.write(connection_dict["private_key_raw"]) + temp_file_path = f.name + + # Set restrictive permissions on the temporary file + os.chmod(temp_file_path, 0o600) + + # Create a copy of the connection dict with the transformation + result = connection_dict.copy() + result["private_key_file"] = temp_file_path + del result["private_key_raw"] + + return result + + except Exception: + # If transformation fails, return original dict + # The error will be handled downstream + return connection_dict + class LegacyConfigProvider(ConfigProvider): """ @@ -113,7 +160,8 @@ def read_config(self) -> None: def get_connection_dict(self, connection_name: str) -> dict: from snowflake.cli.api.config import get_connection_dict - return get_connection_dict(connection_name) + result = get_connection_dict(connection_name) + return self._transform_private_key_raw(result) def get_all_connections(self) -> dict: from snowflake.cli.api.config import get_all_connections @@ -377,7 +425,8 @@ def get_connection_dict(self, connection_name: str) -> dict: Returns: Dictionary of connection parameters """ - return self._get_connection_dict_internal(connection_name) + result = self._get_connection_dict_internal(connection_name) + return self._transform_private_key_raw(result) def _get_all_connections_dict(self) -> Dict[str, Dict[str, Any]]: """ diff --git a/src/snowflake/cli/api/connections.py b/src/snowflake/cli/api/connections.py index 745dc21d94..3a1d97c6d2 100644 --- a/src/snowflake/cli/api/connections.py +++ b/src/snowflake/cli/api/connections.py @@ -47,6 +47,7 @@ class ConnectionContext: authenticator: Optional[str] = None workload_identity_provider: Optional[str] = None private_key_file: Optional[str] = None + private_key_passphrase: Optional[str] = field(default=None, repr=False) warehouse: Optional[str] = None mfa_passcode: Optional[str] = None token: Optional[str] = None From aa7853b50fdd35baeee5bcae9ebb98bd6d80439c Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 14:40:15 +0200 Subject: [PATCH 22/78] SNOW-2306184: config refactor - restore temp conn --- src/snowflake/cli/api/connections.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/src/snowflake/cli/api/connections.py b/src/snowflake/cli/api/connections.py index 3a1d97c6d2..c441979491 100644 --- a/src/snowflake/cli/api/connections.py +++ b/src/snowflake/cli/api/connections.py @@ -160,18 +160,9 @@ def build_connection(self): module="snowflake.connector.config_manager", ) - # Get connection parameters but exclude connection_name - # The Snowflake connector validates connection_name against its own - # config manager (only reads config.toml), which doesn't know about - # connections from SnowSQL config files. We handle connection resolution - # ourselves, so don't let the connector validate it. + # Get connection parameters and pass them directly to connect_to_snowflake + # This restores the original behavior before the change that enforced temporary_connection conn_params = self.present_values_as_dict() - conn_params.pop("connection_name", None) - - # If we removed connection_name, mark as temporary_connection - # so the connector doesn't require it - if "connection_name" not in conn_params: - conn_params["temporary_connection"] = True return connect_to_snowflake(**conn_params) From 1ced02d02c2e8bf0fea7ebf322c799e3632d6505 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 14:45:52 +0200 Subject: [PATCH 23/78] SNOW-2306184: config refactor - tests-ng fix --- .github/workflows/test.yaml | 50 ++++++++++++++++++------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 40ea7cea64..01d9b70f28 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -12,7 +12,7 @@ on: - features/* env: - TERM: unknown # Disables colors in rich + TERM: unknown # Disables colors in rich permissions: contents: read @@ -45,27 +45,27 @@ jobs: - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 tests-ng: - needs: define-matrix - strategy: - fail-fast: true - matrix: - os: ${{ fromJSON(needs.define-matrix.outputs.os) }} - python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} - runs-on: ${{ matrix.os }} - env: - SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Install hatch - run: | - pip install -U click==8.2.1 hatch - hatch env create default - - name: Test with hatch - run: hatch run test-cov - - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 + needs: define-matrix + strategy: + fail-fast: true + matrix: + os: ${{ fromJSON(needs.define-matrix.outputs.os) }} + python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install hatch + run: | + pip install -U click==8.2.1 hatch + hatch env create default + - name: Test with hatch + run: hatch run test-cov + - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 + env: + SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 From 1b13208869f78c78653223d943377702b6c638d6 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 15:05:16 +0200 Subject: [PATCH 24/78] SNOW-2306184: config refactor - integrations ng --- .github/workflows/test_integration.yaml | 21 ------------------- .github/workflows/test_trusted.yaml | 28 +++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 21 deletions(-) diff --git a/.github/workflows/test_integration.yaml b/.github/workflows/test_integration.yaml index 2d5d9736d8..07ef4ace7e 100644 --- a/.github/workflows/test_integration.yaml +++ b/.github/workflows/test_integration.yaml @@ -46,27 +46,6 @@ jobs: hatch-run: integration:test secrets: inherit - integration-trusted-ng: - needs: define-matrix - strategy: - fail-fast: false - matrix: - os: ${{ fromJSON(needs.define-matrix.outputs.os) }} - python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} - if: | - (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || - (github.event_name == 'push' && github.ref == 'refs/heads/main') - uses: ./.github/workflows/test_trusted.yaml - with: - runs-on: ${{ matrix.os }} - python-version: ${{ matrix.python-version }} - python-env: integration - hatch-run: integration:test - secrets: inherit - env: - SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 - - # Repo owner has commented /ok-to-test on a (fork-based) pull request integration-fork: needs: define-matrix strategy: diff --git a/.github/workflows/test_trusted.yaml b/.github/workflows/test_trusted.yaml index 20e12e0f32..a101d03631 100644 --- a/.github/workflows/test_trusted.yaml +++ b/.github/workflows/test_trusted.yaml @@ -46,3 +46,31 @@ jobs: SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }} SNOWFLAKE_CONNECTIONS_INTEGRATION_PRIVATE_KEY_RAW: ${{ secrets.SNOWFLAKE_PRIVATE_KEY_RAW }} run: python -m hatch run ${{ inputs.hatch-run }} + + tests-trusted-ng: + runs-on: ${{ inputs.runs-on }} + env: + SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip click==8.2.1 hatch + python -m hatch env create ${{ inputs.python-env }} + - name: Run integration tests + env: + GH_TOKEN: ${{ secrets.SNOWFLAKE_GITHUB_TOKEN }} + TERM: unknown + SNOWFLAKE_CONNECTIONS_INTEGRATION_AUTHENTICATOR: SNOWFLAKE_JWT + SNOWFLAKE_CONNECTIONS_INTEGRATION_HOST: ${{ secrets.SNOWFLAKE_HOST }} + SNOWFLAKE_CONNECTIONS_INTEGRATION_USER: ${{ secrets.SNOWFLAKE_USER }} + SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT: ${{ secrets.SNOWFLAKE_ACCOUNT }} + SNOWFLAKE_CONNECTIONS_INTEGRATION_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }} + SNOWFLAKE_CONNECTIONS_INTEGRATION_PRIVATE_KEY_RAW: ${{ secrets.SNOWFLAKE_PRIVATE_KEY_RAW }} + run: python -m hatch run ${{ inputs.hatch-run }} From c04a7ccc83499931324272c5d5caef23fd8c3d00 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 9 Oct 2025 20:58:40 +0200 Subject: [PATCH 25/78] SNOW-2306184: config refactor - old & new unit tests pass --- src/snowflake/cli/api/config_ng/sources.py | 122 +++++++++++++++++---- src/snowflake/cli/api/config_provider.py | 83 +++++++++++--- src/snowflake/cli/api/connections.py | 51 +++++++-- tests/conftest.py | 5 + 4 files changed, 214 insertions(+), 47 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index ce9567e91c..04bd941c1b 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -53,13 +53,16 @@ class SnowSQLConfigFile(ValueSource): Later files override earlier files for the same keys. Returns configuration for ALL connections. - Config files searched (in order): + Config files searched (in order, when not in test mode): 1. Bundled default config (if in package) 2. /etc/snowsql.cnf (system-wide) 3. /etc/snowflake/snowsql.cnf (alternative system) 4. /usr/local/etc/snowsql.cnf (local system) 5. ~/.snowsql.cnf (legacy user config) 6. ~/.snowsql/config (current user config) + + In test mode (when config_file_override is set), SnowSQL config files are skipped + to ensure test isolation. """ # SnowSQL uses different key names - map them to CLI standard names @@ -87,13 +90,31 @@ class SnowSQLConfigFile(ValueSource): def __init__(self): """Initialize SnowSQL config file source.""" - self._config_files = [ - Path("/etc/snowsql.cnf"), - Path("/etc/snowflake/snowsql.cnf"), - Path("/usr/local/etc/snowsql.cnf"), - Path.home() / ".snowsql.cnf", - Path.home() / ".snowsql" / "config", - ] + # Use SNOWFLAKE_HOME if set and directory exists, otherwise use standard paths + snowflake_home = os.environ.get("SNOWFLAKE_HOME") + if snowflake_home: + snowflake_home_path = Path(snowflake_home).expanduser() + if snowflake_home_path.exists(): + # Use only the SnowSQL config file within SNOWFLAKE_HOME + self._config_files = [snowflake_home_path / "config"] + else: + # SNOWFLAKE_HOME set but doesn't exist, use standard paths + self._config_files = [ + Path("/etc/snowsql.cnf"), + Path("/etc/snowflake/snowsql.cnf"), + Path("/usr/local/etc/snowsql.cnf"), + Path.home() / ".snowsql.cnf", + Path.home() / ".snowsql" / "config", + ] + else: + # Standard paths when SNOWFLAKE_HOME not set + self._config_files = [ + Path("/etc/snowsql.cnf"), + Path("/etc/snowflake/snowsql.cnf"), + Path("/usr/local/etc/snowsql.cnf"), + Path.home() / ".snowsql.cnf", + Path.home() / ".snowsql" / "config", + ] @property def source_name(self) -> str: @@ -162,17 +183,46 @@ class CliConfigFile(ValueSource): Does NOT merge multiple files - first found wins. Returns configuration for ALL connections. - Search order: + Search order (when no override is set): 1. ./config.toml (current directory) 2. ~/.snowflake/config.toml (user config) + + When config_file_override is set (e.g., in tests), only that file is used. """ def __init__(self): """Initialize CLI config file source.""" - self._search_paths = [ - Path.cwd() / "config.toml", - Path.home() / ".snowflake" / "config.toml", - ] + # Check for config file override from CLI context first + try: + from snowflake.cli.api.cli_global_context import get_cli_context + + cli_context = get_cli_context() + config_override = cli_context.config_file_override + if config_override: + self._search_paths = [Path(config_override)] + return + except Exception: + pass + + # Use SNOWFLAKE_HOME if set and directory exists, otherwise use standard paths + snowflake_home = os.environ.get("SNOWFLAKE_HOME") + if snowflake_home: + snowflake_home_path = Path(snowflake_home).expanduser() + if snowflake_home_path.exists(): + # Use only config.toml within SNOWFLAKE_HOME + self._search_paths = [snowflake_home_path / "config.toml"] + else: + # SNOWFLAKE_HOME set but doesn't exist, use standard paths + self._search_paths = [ + Path.cwd() / "config.toml", + Path.home() / ".snowflake" / "config.toml", + ] + else: + # Standard paths when SNOWFLAKE_HOME not set + self._search_paths = [ + Path.cwd() / "config.toml", + Path.home() / ".snowflake" / "config.toml", + ] @property def source_name(self) -> str: @@ -234,7 +284,16 @@ class ConnectionsConfigFile(ValueSource): def __init__(self): """Initialize connections.toml source.""" - self._file_path = Path.home() / ".snowflake" / "connections.toml" + # Use SNOWFLAKE_HOME if set and directory exists, otherwise use standard path + snowflake_home = os.environ.get("SNOWFLAKE_HOME") + if snowflake_home: + snowflake_home_path = Path(snowflake_home).expanduser() + if snowflake_home_path.exists(): + self._file_path = snowflake_home_path / "connections.toml" + else: + self._file_path = Path.home() / ".snowflake" / "connections.toml" + else: + self._file_path = Path.home() / ".snowflake" / "connections.toml" @property def source_name(self) -> str: @@ -244,6 +303,15 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Read connections.toml if it exists. Returns keys in format: connections.{name}.{param} for ALL connections. + + Supports both legacy formats: + 1. Direct connection sections (legacy): + [default] + database = "value" + + 2. Nested under [connections] section: + [connections.default] + database = "value" """ if not self._file_path.exists(): return {} @@ -253,12 +321,13 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: data = tomllib.load(f) result = {} - connections = data.get("connections", {}) - for conn_name, conn_data in connections.items(): - if isinstance(conn_data, dict): - for param_key, param_value in conn_data.items(): - full_key = f"connections.{conn_name}.{param_key}" + # Check for direct connection sections (legacy format) + for section_name, section_data in data.items(): + if isinstance(section_data, dict) and section_name != "connections": + # This is a direct connection section like [default] + for param_key, param_value in section_data.items(): + full_key = f"connections.{section_name}.{param_key}" if key is None or full_key == key: result[full_key] = ConfigValue( key=full_key, @@ -267,6 +336,21 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: raw_value=param_value, ) + # Check for nested [connections] section format + connections_section = data.get("connections", {}) + if isinstance(connections_section, dict): + for conn_name, conn_data in connections_section.items(): + if isinstance(conn_data, dict): + for param_key, param_value in conn_data.items(): + full_key = f"connections.{conn_name}.{param_key}" + if key is None or full_key == key: + result[full_key] = ConfigValue( + key=full_key, + value=param_value, + source_name=self.source_name, + raw_value=param_value, + ) + return result except Exception as e: diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index eedc57a602..cba0086435 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -158,10 +158,17 @@ def read_config(self) -> None: config_manager.read_config() def get_connection_dict(self, connection_name: str) -> dict: - from snowflake.cli.api.config import get_connection_dict + from snowflake.cli.api.config import get_config_section - result = get_connection_dict(connection_name) - return self._transform_private_key_raw(result) + try: + result = get_config_section("connections", connection_name) + return self._transform_private_key_raw(result) + except KeyError: + from snowflake.cli.api.exceptions import MissingConfigurationError + + raise MissingConfigurationError( + f"Connection {connection_name} is not configured" + ) def get_all_connections(self) -> dict: from snowflake.cli.api.config import get_all_connections @@ -375,15 +382,16 @@ def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: """ Get connection configuration by name. - Merges two types of keys: - 1. Connection-specific: connections.{name}.{param} (from files) - 2. Flat keys: {param} (from env/CLI, applies to active connection) + Behavior is controlled by SNOWFLAKE_CLI_CONNECTIONS_TOML_REPLACE environment variable: + - If set to "true" (default): connections.toml completely replaces connections + from config.toml (legacy behavior) + - If set to "false": connections.toml values are merged with config.toml values Args: connection_name: Name of the connection Returns: - Dictionary of connection parameters + Dictionary of connection parameters from file sources only """ self._ensure_initialized() @@ -392,19 +400,58 @@ def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: self._config_cache = self._resolver.resolve() connection_dict: Dict[str, Any] = {} - - # First, get connection-specific keys (from file sources) connection_prefix = f"connections.{connection_name}." - for key, value in self._config_cache.items(): - if key.startswith(connection_prefix): - # Extract parameter name - param_name = key[len(connection_prefix) :] - connection_dict[param_name] = value - # Then, overlay flat keys (from env/CLI sources) - these have higher priority - for key, value in self._config_cache.items(): - if "." not in key: # Flat key like "account", "user" - connection_dict[key] = value + # Check if replacement behavior is enabled (default: true for backward compatibility) + import os + + replace_behavior = os.environ.get( + "SNOWFLAKE_CLI_CONNECTIONS_TOML_REPLACE", "true" + ).lower() in ("true", "1", "yes", "on") + + if replace_behavior: + # Legacy replacement behavior: if connections.toml has the connection, + # use ONLY values from connections.toml + has_connections_toml = False + if self._resolver is not None: + for key in self._config_cache.keys(): + if key.startswith(connection_prefix): + # Check resolution history to see if this came from connections.toml + history = self._resolver.get_resolution_history(key) + if history and history.selected_entry: + if ( + history.selected_entry.config_value.source_name + == "connections_toml" + ): + has_connections_toml = True + break + + if has_connections_toml: + # Use ONLY connections.toml values (replacement behavior) + for key, value in self._config_cache.items(): + if key.startswith(connection_prefix): + # Check if this specific value comes from connections.toml + if self._resolver is not None: + history = self._resolver.get_resolution_history(key) + if history and history.selected_entry: + if ( + history.selected_entry.config_value.source_name + == "connections_toml" + ): + param_name = key[len(connection_prefix) :] + connection_dict[param_name] = value + else: + # No connections.toml, use merged values from other sources + for key, value in self._config_cache.items(): + if key.startswith(connection_prefix): + param_name = key[len(connection_prefix) :] + connection_dict[param_name] = value + else: + # New merging behavior: merge all sources normally + for key, value in self._config_cache.items(): + if key.startswith(connection_prefix): + param_name = key[len(connection_prefix) :] + connection_dict[param_name] = value if not connection_dict: from snowflake.cli.api.exceptions import MissingConfigurationError diff --git a/src/snowflake/cli/api/connections.py b/src/snowflake/cli/api/connections.py index c441979491..671d9db3d8 100644 --- a/src/snowflake/cli/api/connections.py +++ b/src/snowflake/cli/api/connections.py @@ -69,11 +69,18 @@ class ConnectionContext: oauth_enable_single_use_refresh_tokens: Optional[bool] = None client_store_temporary_credential: Optional[bool] = None + # Internal flag to track if config has been loaded + _config_loaded: bool = field(default=False, repr=False, init=False) + VALIDATED_FIELD_NAMES = ["schema"] def present_values_as_dict(self) -> dict: """Dictionary representation of this ConnectionContext for values that are not None""" - return {k: v for (k, v) in asdict(self).items() if v is not None} + return { + k: v + for (k, v) in asdict(self).items() + if v is not None and not k.startswith("_") + } def clone(self) -> ConnectionContext: return replace(self) @@ -112,6 +119,7 @@ def update_from_config(self) -> ConnectionContext: del connection_config["private_key_path"] self.merge_with_config(**connection_config) + self._config_loaded = True return self def __repr__(self) -> str: @@ -138,16 +146,12 @@ def validate_schema(self, value: Optional[str]): def validate_and_complete(self): """ Ensure we can create a connection from this context. - Loads connection parameters from config if not already set. + Sets default connection name if needed, but does not load configuration. + Configuration is loaded lazily in build_connection(). """ if not self.temporary_connection and not self.connection_name: self.connection_name = get_default_connection_name() - # Load connection parameters from config if we have a connection_name - # and haven't loaded them yet (e.g., user is still None) - if self.connection_name and not self.user: - self.update_from_config() - def build_connection(self): from snowflake.cli._app.snow_connector import connect_to_snowflake @@ -160,9 +164,36 @@ def build_connection(self): module="snowflake.connector.config_manager", ) - # Get connection parameters and pass them directly to connect_to_snowflake - # This restores the original behavior before the change that enforced temporary_connection - conn_params = self.present_values_as_dict() + if self.temporary_connection: + # For temporary connections, pass all parameters + # connect_to_snowflake will use these directly without loading config + conn_params = self.present_values_as_dict() + else: + # For named connections, pass connection_name and all override parameters + # connect_to_snowflake will load the connection config internally and apply overrides + all_params = self.present_values_as_dict() + control_params = { + "connection_name", + "enable_diag", + "diag_log_path", + "diag_allowlist_path", + "temporary_connection", + "mfa_passcode", + } + + # Separate control parameters from connection overrides + conn_params = {} + overrides = {} + + for k, v in all_params.items(): + if k in control_params: + conn_params[k] = v + else: + # These are connection parameters that should override config values + overrides[k] = v + + # Merge overrides into conn_params + conn_params.update(overrides) return connect_to_snowflake(**conn_params) diff --git a/tests/conftest.py b/tests/conftest.py index c36de67ae3..0390868308 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -106,6 +106,11 @@ def os_agnostic_snapshot(snapshot): def reset_global_context_and_setup_config_and_logging_levels( request, test_snowcli_config ): + # Reset config provider singleton to prevent test interference + from snowflake.cli.api.config_provider import reset_config_provider + + reset_config_provider() + with fork_cli_context(): connection_cache = OpenConnectionCache() cli_context_manager = get_cli_context_manager() From 005e3db42b6a5dc7a2b747a699619ff516d8456d Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 09:35:53 +0200 Subject: [PATCH 26/78] SNOW-2306184: config refactor - old & new unit tests --- tests_integration/conftest.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index 4a19c0c829..965fed0990 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -120,6 +120,12 @@ def invoke(self, *a, **kw): kw.update(catch_exceptions=False) kw = self._with_env_vars(kw) + # Reset config provider to ensure fresh config resolution + # This is critical for tests that set environment variables + from snowflake.cli.api.config_provider import reset_config_provider + + reset_config_provider() + # between every invocation, we need to reset the CLI context # and ensure no connections are cached going forward (to prevent # test cases from impacting each other / align with CLI usage) From 2ae42389e58c3b201a60da2f3e30ab4d4d029f68 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 12:10:36 +0200 Subject: [PATCH 27/78] SNOW-2306184: config refactor - plugin tests in JSON format --- src/snowflake/cli/api/config_ng/sources.py | 52 +- .../plugin_tests/logs/snowflake-cli.log | 2760 +++++++++++++++++ .../plugin/test_broken_plugin.py | 24 +- .../plugin/test_failing_plugin.py | 24 +- .../test_override_by_external_plugins.py | 46 +- 5 files changed, 2825 insertions(+), 81 deletions(-) create mode 100644 tests_integration/config/plugin_tests/logs/snowflake-cli.log diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 04bd941c1b..cfd73726aa 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -20,7 +20,7 @@ - CLI configuration files (TOML format, first-found) - Connections configuration files (dedicated connections.toml) - SnowSQL environment variables (SNOWSQL_* prefix) -- CLI environment variables (SNOWFLAKE_* and SNOWFLAKE_CONNECTION_* patterns) +- CLI environment variables (SNOWFLAKE_* patterns) - CLI command-line parameters Precedence is determined by the order sources are provided to the resolver. @@ -439,19 +439,17 @@ class CliEnvironment(ValueSource): """ CLI environment variables source. - Discovers SNOWFLAKE_* environment variables with three patterns: + Discovers SNOWFLAKE_* environment variables with two patterns: 1. General: SNOWFLAKE_ACCOUNT (applies to all connections) - 2. Connection-specific: SNOWFLAKE_CONNECTION__ACCOUNT (overrides general) - 3. Legacy connection-specific: SNOWFLAKE_CONNECTIONS__ACCOUNT (backward compatibility) + 2. Connection-specific: SNOWFLAKE_CONNECTIONS__ACCOUNT (overrides general) Connection-specific variables take precedence within this source. Examples: SNOWFLAKE_ACCOUNT -> account (general) - SNOWFLAKE_CONNECTION_PROD_ACCOUNT -> account (for "prod" connection) - SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT -> account (for "integration" connection, legacy) + SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT -> account (for "integration" connection) SNOWFLAKE_USER -> user - SNOWFLAKE_CONNECTION_DEV_USER -> user (for "dev" connection) + SNOWFLAKE_CONNECTIONS_DEV_USER -> user (for "dev" connection) """ # Base configuration keys that can be set via environment @@ -500,8 +498,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: Patterns: 1. SNOWFLAKE_ACCOUNT=x -> account=x (flat key) - 2. SNOWFLAKE_CONNECTION_PROD_ACCOUNT=y -> connections.prod.account=y - 3. SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT=z -> connections.integration.account=z (legacy) + 2. SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT=y -> connections.integration.account=y """ values: Dict[str, ConfigValue] = {} @@ -510,28 +507,8 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: if not env_name.startswith("SNOWFLAKE_"): continue - # Check for connection-specific pattern: SNOWFLAKE_CONNECTION__ - if env_name.startswith("SNOWFLAKE_CONNECTION_"): - # Extract connection name and config key - remainder = env_name[len("SNOWFLAKE_CONNECTION_") :] - parts = remainder.split("_", 1) - if len(parts) == 2: - conn_name_upper, config_key_upper = parts - conn_name = conn_name_upper.lower() - config_key = config_key_upper.lower() - - if config_key in self.CONFIG_KEYS: - full_key = f"connections.{conn_name}.{config_key}" - if key is None or full_key == key: - values[full_key] = ConfigValue( - key=full_key, - value=env_value, - source_name=self.source_name, - raw_value=f"{env_name}={env_value}", - ) - - # Check for legacy connection-specific pattern: SNOWFLAKE_CONNECTIONS__ - elif env_name.startswith("SNOWFLAKE_CONNECTIONS_"): + # Check for connection-specific pattern: SNOWFLAKE_CONNECTIONS__ + if env_name.startswith("SNOWFLAKE_CONNECTIONS_"): # Extract connection name and config key remainder = env_name[len("SNOWFLAKE_CONNECTIONS_") :] parts = remainder.split("_", 1) @@ -551,7 +528,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: ) # Check for general pattern: SNOWFLAKE_ - else: + elif not env_name.startswith("SNOWFLAKE_CONNECTIONS_"): config_key_upper = env_name[len("SNOWFLAKE_") :] config_key = config_key_upper.lower() @@ -575,19 +552,12 @@ def supports_key(self, key: str) -> bool: if os.getenv(f"SNOWFLAKE_{key.upper()}") is not None: return True - # Check connection-specific var (new pattern) + # Check connection-specific var if hasattr(self, "_connection_name") and self._connection_name: conn_var = ( - f"SNOWFLAKE_CONNECTION_{self._connection_name.upper()}_{key.upper()}" - ) - if os.getenv(conn_var) is not None: - return True - - # Check legacy connection-specific var (legacy pattern) - legacy_conn_var = ( f"SNOWFLAKE_CONNECTIONS_{self._connection_name.upper()}_{key.upper()}" ) - if os.getenv(legacy_conn_var) is not None: + if os.getenv(conn_var) is not None: return True return False diff --git a/tests_integration/config/plugin_tests/logs/snowflake-cli.log b/tests_integration/config/plugin_tests/logs/snowflake-cli.log new file mode 100644 index 0000000000..0c36c105d0 --- /dev/null +++ b/tests_integration/config/plugin_tests/logs/snowflake-cli.log @@ -0,0 +1,2760 @@ +2025-10-10 09:44:38 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 09:44:38 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 09:44:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 09:44:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 09:50:55 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 09:50:55 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 09:50:57 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 09:50:57 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:06:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:06:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:06:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:06:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:06:09 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:06:11 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:06:13 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:06:13 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:06:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:06:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:06:50 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:06:50 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:06:52 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:06:52 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:06:54 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:06:54 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:06:56 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:06:56 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:06:58 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:07:01 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:07:09 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:07:09 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:07:10 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:07:10 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:07:14 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:07:14 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:07:16 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:07:16 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:07:18 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:07:20 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:08:25 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:08:25 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:08:27 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:08:27 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:08:29 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:08:32 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:08:34 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:08:34 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:08:36 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:08:36 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:09:09 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:09:09 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:09:11 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:09:11 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:09:13 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:09:15 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:09:18 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:09:18 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:09:21 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:09:21 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:09:37 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:09:40 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:09:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:09:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:09:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:09:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:09:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:09:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:09:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:09:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:10:24 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:10:26 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:10:28 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:10:28 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:10:30 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:10:30 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:10:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:10:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:10:35 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:10:35 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:18:18 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:18:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:18:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:18:22 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:18:22 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:18:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:18:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:18:35 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:18:35 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:18:37 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:18:37 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:18:39 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:18:39 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:18:41 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:18:43 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:19:35 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:19:37 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:19:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:19:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:19:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:19:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:19:45 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:19:45 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:19:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:19:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:41:06 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:41:06 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:41:08 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:41:08 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:45:18 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:45:18 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:45:21 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:45:21 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:45:23 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:48:13 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:48:13 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:48:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:48:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:48:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:48:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:48:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:48:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:48:22 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:48:24 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:48:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:48:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:48:49 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:48:49 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:49:01 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:49:01 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:49:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:49:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:49:17 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load + module = import_module(self.module) + File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + File "", line 1050, in _gcd_import + File "", line 1027, in _find_and_load + File "", line 1006, in _find_and_load_unlocked + File "", line 688, in _load_unlocked + File "", line 883, in exec_module + File "", line 241, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:49:19 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load + module = import_module(self.module) + File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + File "", line 1050, in _gcd_import + File "", line 1027, in _find_and_load + File "", line 1006, in _find_and_load_unlocked + File "", line 688, in _load_unlocked + File "", line 883, in exec_module + File "", line 241, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:49:22 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:49:22 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:49:24 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:49:24 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:49:26 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:49:26 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:49:27 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:49:27 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:51:50 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:51:50 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:51:52 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:51:52 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:51:54 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:52:10 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:52:13 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 10:52:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:52:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:52:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 10:52:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 10:52:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:52:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 10:52:22 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 10:52:22 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:04:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:04:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:04:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:04:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:05:25 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:05:27 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:05:30 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:05:30 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:05:32 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:05:32 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:05:34 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:05:34 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:05:36 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:05:36 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:05:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:05:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:05:47 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:05:49 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:05:49 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:21:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:21:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:21:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:21:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:21:29 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:21:29 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:21:31 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:21:31 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:21:58 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:21:58 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:21:58 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:21:58 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:22:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:22:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:22:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:22:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:22:19 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load + module = import_module(self.module) + File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + File "", line 1050, in _gcd_import + File "", line 1027, in _find_and_load + File "", line 1006, in _find_and_load_unlocked + File "", line 688, in _load_unlocked + File "", line 883, in exec_module + File "", line 241, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:22:21 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load + module = import_module(self.module) + File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + File "", line 1050, in _gcd_import + File "", line 1027, in _find_and_load + File "", line 1006, in _find_and_load_unlocked + File "", line 688, in _load_unlocked + File "", line 883, in exec_module + File "", line 241, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:22:24 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:22:24 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:22:26 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:22:26 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:23:24 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:23:24 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:23:26 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:23:26 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:23:28 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:23:28 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:23:30 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load + module = import_module(self.module) + File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + File "", line 1050, in _gcd_import + File "", line 1027, in _find_and_load + File "", line 1006, in _find_and_load_unlocked + File "", line 688, in _load_unlocked + File "", line 883, in exec_module + File "", line 241, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:23:41 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load + module = import_module(self.module) + File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + File "", line 1050, in _gcd_import + File "", line 1027, in _find_and_load + File "", line 1006, in _find_and_load_unlocked + File "", line 688, in _load_unlocked + File "", line 883, in exec_module + File "", line 241, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:23:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:23:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:23:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:23:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:23:46 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:23:46 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:28:29 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:28:29 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:28:31 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:28:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:28:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:28:34 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:28:34 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:32:09 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:32:09 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:32:11 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:32:11 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:32:13 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:32:13 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:32:15 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:32:32 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:32:32 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:32:32 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:32:32 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:32:34 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:32:37 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:32:37 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:33:59 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:33:59 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:34:01 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:34:01 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:34:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:34:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:34:06 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:34:11 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:34:11 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:34:13 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:34:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:34:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:34:16 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:34:16 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:36:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:36:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:36:35 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:36:35 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:36:38 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:36:38 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:36:40 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:37:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:37:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:37:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:37:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:37:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:37:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:37:44 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:37:53 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:37:53 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:37:55 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:37:58 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:37:58 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:38:00 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:38:00 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:38:57 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin +2025-10-10 11:38:57 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer + parent_group = self._find_typer_group_at_path( + current_level_group=self._main_typer_command_group, + remaining_parent_path_segments=command_spec.parent_command_path.path_segments, + command_spec=command_spec, + ) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path + raise RuntimeError( + ...<2 lines>... + ) +RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. +2025-10-10 11:39:00 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:39:00 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:39:02 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:39:02 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( + f"Cannot add command [{command_spec.full_command_path}] because it already exists." + ) +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:39:04 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins + self._plugin_manager.load_setuptools_entrypoints( + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ + SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name + ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + ) + ^ + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints + plugin = ep.load() + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load + module = import_module(match.group('module')) + File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module + return _bootstrap._gcd_import(name[level:], package, level) + ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + File "", line 1387, in _gcd_import + File "", line 1360, in _find_and_load + File "", line 1331, in _find_and_load_unlocked + File "", line 935, in _load_unlocked + File "", line 1026, in exec_module + File "", line 488, in _call_with_frames_removed + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in + from snowflakecli.test_plugins.failing_plugin import commands + File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in + raise RuntimeError("Some error in plugin") +RuntimeError: Some error in plugin +2025-10-10 11:40:05 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:40:05 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. +2025-10-10 11:40:07 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override +2025-10-10 11:40:07 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. +Traceback (most recent call last): + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands + self._add_plugin_to_typer(plugin.command_spec) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer + self._validate_command_spec(command_spec, parent_group) + File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec + raise RuntimeError( +RuntimeError: Cannot add command [snow connection list] because it already exists. diff --git a/tests_integration/plugin/test_broken_plugin.py b/tests_integration/plugin/test_broken_plugin.py index d60ff6d197..0cfa5e61a3 100644 --- a/tests_integration/plugin/test_broken_plugin.py +++ b/tests_integration/plugin/test_broken_plugin.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from textwrap import dedent +import json import pytest @@ -23,7 +23,9 @@ def test_broken_command_path_plugin(runner, test_root_path, _install_plugin, cap test_root_path / "config" / "plugin_tests" / "broken_plugin_config.toml" ) - result = runner.invoke(["--config-file", config_path, "connection", "list"]) + result = runner.invoke( + ["--config-file", config_path, "connection", "list", "--format", "JSON"] + ) assert result.exit_code == 0, result.output assert "Loaded external plugin: broken_plugin" in caplog.messages @@ -31,15 +33,17 @@ def test_broken_command_path_plugin(runner, test_root_path, _install_plugin, cap "Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist." in caplog.messages ) - assert result.output == dedent( - """\ - +----------------------------------------------------+ - | connection_name | parameters | is_default | - |-----------------+---------------------+------------| - | test | {'account': 'test'} | False | - +----------------------------------------------------+ - """ + + # Parse JSON output and check for test connection existence + connections = json.loads(result.output) + + # Find the 'test' connection + test_connection = next( + (conn for conn in connections if conn["connection_name"] == "test"), None ) + assert test_connection is not None, "Expected 'test' connection not found in output" + assert test_connection["parameters"] == {"account": "test"} + assert test_connection["is_default"] is False @pytest.fixture(scope="module") diff --git a/tests_integration/plugin/test_failing_plugin.py b/tests_integration/plugin/test_failing_plugin.py index a7e8af70e2..b782fa2823 100644 --- a/tests_integration/plugin/test_failing_plugin.py +++ b/tests_integration/plugin/test_failing_plugin.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from textwrap import dedent +import json import pytest @@ -23,21 +23,25 @@ def test_failing_plugin(runner, test_root_path, _install_plugin, caplog): test_root_path / "config" / "plugin_tests" / "failing_plugin_config.toml" ) - result = runner.invoke(["--config-file", config_path, "connection", "list"]) + result = runner.invoke( + ["--config-file", config_path, "connection", "list", "--format", "JSON"] + ) assert ( "Cannot register plugin [failing_plugin]: Some error in plugin" in caplog.messages ) - assert result.output == dedent( - """\ - +----------------------------------------------------+ - | connection_name | parameters | is_default | - |-----------------+---------------------+------------| - | test | {'account': 'test'} | False | - +----------------------------------------------------+ - """ + + # Parse JSON output and check for test connection existence + connections = json.loads(result.output) + + # Find the 'test' connection + test_connection = next( + (conn for conn in connections if conn["connection_name"] == "test"), None ) + assert test_connection is not None, "Expected 'test' connection not found in output" + assert test_connection["parameters"] == {"account": "test"} + assert test_connection["is_default"] is False @pytest.fixture(scope="module") diff --git a/tests_integration/plugin/test_override_by_external_plugins.py b/tests_integration/plugin/test_override_by_external_plugins.py index 2bf1043afa..387125f22d 100644 --- a/tests_integration/plugin/test_override_by_external_plugins.py +++ b/tests_integration/plugin/test_override_by_external_plugins.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from textwrap import dedent +import json import pytest @@ -23,22 +23,25 @@ def test_override_build_in_commands(runner, test_root_path, _install_plugin, cap test_root_path / "config" / "plugin_tests" / "override_plugin_config.toml" ) - result = runner.invoke(["--config-file", config_path, "connection", "list"]) + result = runner.invoke( + ["--config-file", config_path, "connection", "list", "--format", "JSON"] + ) assert ( "Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists." in caplog.messages ) - assert result.output == dedent( - """\ - Outside command code - +----------------------------------------------------+ - | connection_name | parameters | is_default | - |-----------------+---------------------+------------| - | test | {'account': 'test'} | False | - +----------------------------------------------------+ - """ + + # Parse JSON output and check for test connection existence + connections = json.loads(result.output) + + # Find the 'test' connection + test_connection = next( + (conn for conn in connections if conn["connection_name"] == "test"), None ) + assert test_connection is not None, "Expected 'test' connection not found in output" + assert test_connection["parameters"] == {"account": "test"} + assert test_connection["is_default"] is False @pytest.mark.integration @@ -52,17 +55,20 @@ def test_disabled_plugin_is_not_executed( / "disabled_override_plugin_config.toml" ) - result = runner.invoke(["--config-file", config_path, "connection", "list"]) + result = runner.invoke( + ["--config-file", config_path, "connection", "list", "--format", "JSON"] + ) + + # Parse JSON output and check for test connection existence + connections = json.loads(result.output) - assert result.output == dedent( - """\ - +----------------------------------------------------+ - | connection_name | parameters | is_default | - |-----------------+---------------------+------------| - | test | {'account': 'test'} | False | - +----------------------------------------------------+ - """ + # Find the 'test' connection + test_connection = next( + (conn for conn in connections if conn["connection_name"] == "test"), None ) + assert test_connection is not None, "Expected 'test' connection not found in output" + assert test_connection["parameters"] == {"account": "test"} + assert test_connection["is_default"] is False @pytest.fixture(scope="module") From 00daf6a38f9335d3dc91516c013c4d602ff5972c Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 14:16:31 +0200 Subject: [PATCH 28/78] SNOW-2306184: config refactor - e2e fix attempt --- src/snowflake/cli/api/config_ng/sources.py | 13 +++++++++++++ src/snowflake/cli/api/config_provider.py | 7 +++++++ tests/test_connection.py | 1 + tests_e2e/conftest.py | 13 +++++++++++++ 4 files changed, 34 insertions(+) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index cfd73726aa..8ad443e062 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -254,6 +254,7 @@ def _parse_toml_file( connections = data.get("connections", {}) for conn_name, conn_data in connections.items(): if isinstance(conn_data, dict): + # Process parameters if they exist for param_key, param_value in conn_data.items(): full_key = f"connections.{conn_name}.{param_key}" if key is None or full_key == key: @@ -264,6 +265,18 @@ def _parse_toml_file( raw_value=param_value, ) + # For empty connections, we need to ensure they are recognized + # even if they have no parameters. We add a special marker. + if not conn_data: # Empty connection section + marker_key = f"connections.{conn_name}._empty_connection" + if key is None or marker_key == key: + result[marker_key] = ConfigValue( + key=marker_key, + value=True, + source_name=self.source_name, + raw_value=True, + ) + return result except Exception as e: diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index cba0086435..2e4900144a 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -499,6 +499,13 @@ def _get_all_connections_dict(self) -> Dict[str, Dict[str, Any]]: conn_name, param_name = parts if conn_name not in connections: connections[conn_name] = {} + + # Skip internal markers, but ensure connection exists + if param_name == "_empty_connection": + # This is just a marker for empty connections + # Connection dict already created above + continue + connections[conn_name][param_name] = value return connections diff --git a/tests/test_connection.py b/tests/test_connection.py index 81cfaebd10..d6612d25de 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -293,6 +293,7 @@ def test_fails_if_existing_connection(runner): @mock.patch("snowflake.cli._plugins.connection.commands.get_default_connection_name") +@mock.patch.dict(os.environ, {}, clear=True) def test_lists_connection_information(mock_get_default_conn_name, runner): mock_get_default_conn_name.return_value = "empty" result = runner.invoke(["connection", "list", "--format", "json"]) diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 8286f0276d..9dc6c4142d 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import shutil import subprocess import sys @@ -111,6 +112,18 @@ def isolate_default_config_location(monkeypatch, temporary_directory): monkeypatch.setenv("SNOWFLAKE_HOME", temporary_directory) +@pytest.fixture(autouse=True) +def isolate_environment_variables(monkeypatch): + """ + Clear Snowflake-specific environment variables that could interfere with e2e tests. + This ensures tests run in a clean environment and only use the config files they specify. + """ + # Clear all SNOWFLAKE_CONNECTIONS_* environment variables + for env_var in list(os.environ.keys()): + if env_var.startswith(("SNOWFLAKE_CONNECTIONS_", "SNOWSQL_")): + monkeypatch.delenv(env_var, raising=False) + + def _create_venv(tmp_dir: Path) -> None: subprocess_check_output(["python", "-m", "venv", tmp_dir]) From 231f1b2f1417690ead07225db695ba9dfd858910 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 15:34:08 +0200 Subject: [PATCH 29/78] SNOW-2306184: config refactor - e2e fix attempt 3 --- tests_e2e/conftest.py | 7 +- tests_e2e/test_import_snowsql_connections.py | 210 ++++++++++--------- 2 files changed, 118 insertions(+), 99 deletions(-) diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 9dc6c4142d..996d906506 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -117,11 +117,14 @@ def isolate_environment_variables(monkeypatch): """ Clear Snowflake-specific environment variables that could interfere with e2e tests. This ensures tests run in a clean environment and only use the config files they specify. + Exception: Keep INTEGRATION connection vars for e2e testing. """ - # Clear all SNOWFLAKE_CONNECTIONS_* environment variables + # Clear all SNOWFLAKE_CONNECTIONS_* environment variables except INTEGRATION for env_var in list(os.environ.keys()): if env_var.startswith(("SNOWFLAKE_CONNECTIONS_", "SNOWSQL_")): - monkeypatch.delenv(env_var, raising=False) + # Preserve all INTEGRATION connection environment variables + if not env_var.startswith("SNOWFLAKE_CONNECTIONS_INTEGRATION_"): + monkeypatch.delenv(env_var, raising=False) def _create_venv(tmp_dir: Path) -> None: diff --git a/tests_e2e/test_import_snowsql_connections.py b/tests_e2e/test_import_snowsql_connections.py index 49ebc30554..572bfdfc0d 100644 --- a/tests_e2e/test_import_snowsql_connections.py +++ b/tests_e2e/test_import_snowsql_connections.py @@ -6,32 +6,37 @@ from tests_e2e.conftest import subprocess_check_output, subprocess_run -@pytest.fixture() -def _assert_json_output_matches_snapshot(snapshot): - def f(cmd, stdin: Optional[str] = None): - output = subprocess_check_output(cmd, stdin) - parsed_json = json.loads(output) - snapshot.assert_match(json.dumps(parsed_json)) - - return f - - -@pytest.mark.e2e -def test_import_of_snowsql_connections( - snowcli, test_root_path, empty_config_file, _assert_json_output_matches_snapshot -): - _assert_json_output_matches_snapshot( +def _get_connections_list(snowcli, config_file) -> list: + """Helper function to get connections list as parsed JSON.""" + output = subprocess_check_output( [ snowcli, "--config-file", - empty_config_file, + config_file, "connection", "list", "--format", "json", - ], + ] ) + return json.loads(output) + + +def _find_connection(connections: list, name: str) -> Optional[dict]: + """Helper function to find a connection by name.""" + for conn in connections: + if conn["connection_name"] == name: + return conn + return None + + +@pytest.mark.e2e +def test_import_of_snowsql_connections(snowcli, test_root_path, empty_config_file): + # Initially should have empty or minimal connections list + initial_connections = _get_connections_list(snowcli, empty_config_file) + initial_count = len(initial_connections) + # Import snowsql connections result = subprocess_run( [ snowcli, @@ -47,35 +52,49 @@ def test_import_of_snowsql_connections( ) assert result.returncode == 0 - _assert_json_output_matches_snapshot( - [ - snowcli, - "--config-file", - empty_config_file, - "connection", - "list", - "--format", - "json", - ] - ) + # After import, should have multiple connections + final_connections = _get_connections_list(snowcli, empty_config_file) + + # Should have more connections than initially + assert len(final_connections) > initial_count + + # Check that expected connections exist + connection_names = {conn["connection_name"] for conn in final_connections} + expected_names = {"snowsql1", "snowsql2", "snowsql3", "example", "default"} + assert expected_names.issubset(connection_names) + + # Check specific connection details + snowsql1 = _find_connection(final_connections, "snowsql1") + assert snowsql1 is not None + assert snowsql1["parameters"]["account"] == "a1" + assert snowsql1["parameters"]["user"] == "u1" + assert snowsql1["parameters"]["host"] == "h1_override" # From overriding config + assert snowsql1["is_default"] is False + + snowsql2 = _find_connection(final_connections, "snowsql2") + assert snowsql2 is not None + assert snowsql2["parameters"]["account"] == "a2" + assert snowsql2["parameters"]["port"] == 1234 + assert snowsql2["is_default"] is False + + default_conn = _find_connection(final_connections, "default") + assert default_conn is not None + assert default_conn["parameters"]["account"] == "default_connection_account" + assert ( + default_conn["parameters"]["database"] == "default_connection_database_override" + ) # From overriding config + assert default_conn["is_default"] is True @pytest.mark.e2e def test_import_prompt_for_different_default_connection_name_on_conflict( - snowcli, test_root_path, empty_config_file, _assert_json_output_matches_snapshot + snowcli, test_root_path, empty_config_file ): - _assert_json_output_matches_snapshot( - [ - snowcli, - "--config-file", - empty_config_file, - "connection", - "list", - "--format", - "json", - ], - ) + # Initially should have empty or minimal connections list + initial_connections = _get_connections_list(snowcli, empty_config_file) + initial_count = len(initial_connections) + # Import with different default connection name result = subprocess_run( [ snowcli, @@ -94,17 +113,19 @@ def test_import_prompt_for_different_default_connection_name_on_conflict( ) assert result.returncode == 0 - _assert_json_output_matches_snapshot( - [ - snowcli, - "--config-file", - empty_config_file, - "connection", - "list", - "--format", - "json", - ] - ) + # After import, snowsql2 should be the default + final_connections = _get_connections_list(snowcli, empty_config_file) + + # Should have more connections than initially + assert len(final_connections) > initial_count + + snowsql2 = _find_connection(final_connections, "snowsql2") + assert snowsql2 is not None + assert snowsql2["is_default"] is True + + default_conn = _find_connection(final_connections, "default") + assert default_conn is not None + assert default_conn["is_default"] is False @pytest.mark.e2e @@ -112,20 +133,19 @@ def test_import_confirm_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, - _assert_json_output_matches_snapshot, ): - _assert_json_output_matches_snapshot( - [ - snowcli, - "--config-file", - example_connection_config_file, - "connection", - "list", - "--format", - "json", - ], - ) + # Initially should have example and integration connections + initial_connections = _get_connections_list(snowcli, example_connection_config_file) + example_conn = _find_connection(initial_connections, "example") + assert example_conn is not None + assert example_conn["parameters"]["user"] == "u1" + assert example_conn["parameters"]["authenticator"] == "SNOWFLAKE_JWT" + + integration_conn = _find_connection(initial_connections, "integration") + assert integration_conn is not None + + # Import with confirmation (y) result = subprocess_run( [ snowcli, @@ -142,17 +162,15 @@ def test_import_confirm_on_conflict_with_existing_cli_connection( ) assert result.returncode == 0 - _assert_json_output_matches_snapshot( - [ - snowcli, - "--config-file", - example_connection_config_file, - "connection", - "list", - "--format", - "json", - ], - ) + # After import, example connection should be overwritten with snowsql data + final_connections = _get_connections_list(snowcli, example_connection_config_file) + + example_conn = _find_connection(final_connections, "example") + assert example_conn is not None + assert example_conn["parameters"]["account"] == "accountname" + assert example_conn["parameters"]["user"] == "username" + # Should not have the old JWT authenticator + assert "authenticator" not in example_conn["parameters"] @pytest.mark.e2e @@ -160,20 +178,16 @@ def test_import_reject_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, - _assert_json_output_matches_snapshot, ): - _assert_json_output_matches_snapshot( - [ - snowcli, - "--config-file", - example_connection_config_file, - "connection", - "list", - "--format", - "json", - ], - ) + # Initially should have example and integration connections + initial_connections = _get_connections_list(snowcli, example_connection_config_file) + + example_conn = _find_connection(initial_connections, "example") + assert example_conn is not None + original_user = example_conn["parameters"]["user"] + original_auth = example_conn["parameters"]["authenticator"] + # Import with rejection (n) result = subprocess_run( [ snowcli, @@ -190,17 +204,18 @@ def test_import_reject_on_conflict_with_existing_cli_connection( ) assert result.returncode == 0 - _assert_json_output_matches_snapshot( - [ - snowcli, - "--config-file", - example_connection_config_file, - "connection", - "list", - "--format", - "json", - ], - ) + # After import, example connection should remain unchanged + final_connections = _get_connections_list(snowcli, example_connection_config_file) + + example_conn = _find_connection(final_connections, "example") + assert example_conn is not None + assert example_conn["parameters"]["user"] == original_user + assert example_conn["parameters"]["authenticator"] == original_auth + + # But other connections should still be imported + snowsql1 = _find_connection(final_connections, "snowsql1") + assert snowsql1 is not None + assert snowsql1["parameters"]["account"] == "a1" @pytest.mark.e2e @@ -218,6 +233,7 @@ def test_connection_imported_from_snowsql(snowcli, test_root_path, empty_config_ ) assert result.returncode == 0 + # Test that the imported integration connection works result = subprocess_run( [ snowcli, From 16c69000a0db4c30a5ecc5d843b169efad0b90e6 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 21:39:29 +0200 Subject: [PATCH 30/78] SNOW-2306184: config refactor - e2e fix attempt 4 --- tests_e2e/conftest.py | 28 ++++ tests_e2e/test_import_snowsql_connections.py | 142 ++++++------------ tests_integration/conftest.py | 29 ++++ .../plugin/test_broken_plugin.py | 21 +-- .../plugin/test_failing_plugin.py | 21 +-- .../test_override_by_external_plugins.py | 38 ++--- 6 files changed, 131 insertions(+), 148 deletions(-) diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 996d906506..328463d7b8 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -210,3 +210,31 @@ def example_connection_config_file(test_root_path, prepare_test_config_file): yield prepare_test_config_file( SecurePath(test_root_path) / "config" / "example_connection.toml" ) + + +@pytest.fixture +def config_mode(request, monkeypatch): + """ + Fixture to switch between legacy and config_ng modes. + + When parameterized with ["legacy", "config_ng"], this fixture sets the + appropriate environment variable to enable/disable the new config system. + Each parameter value creates a separate test instance with its own snapshot. + + Usage: + @pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) + def test_something(config_mode, snapshot): + # Test runs twice: once with legacy, once with config_ng + # Each gets its own snapshot: test_something[legacy] and test_something[config_ng] + ... + """ + mode = getattr(request, "param", "config_ng") # default to config_ng + + if mode == "config_ng": + # Enable new config system + monkeypatch.setenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", "true") + else: + # Ensure new config system is disabled (legacy mode) + monkeypatch.delenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", raising=False) + + return mode diff --git a/tests_e2e/test_import_snowsql_connections.py b/tests_e2e/test_import_snowsql_connections.py index 572bfdfc0d..2cbef91be2 100644 --- a/tests_e2e/test_import_snowsql_connections.py +++ b/tests_e2e/test_import_snowsql_connections.py @@ -1,14 +1,11 @@ -import json -from typing import Optional - import pytest from tests_e2e.conftest import subprocess_check_output, subprocess_run -def _get_connections_list(snowcli, config_file) -> list: - """Helper function to get connections list as parsed JSON.""" - output = subprocess_check_output( +def _get_connections_list_output(snowcli, config_file) -> str: + """Helper function to get connections list output as string.""" + return subprocess_check_output( [ snowcli, "--config-file", @@ -19,22 +16,17 @@ def _get_connections_list(snowcli, config_file) -> list: "json", ] ) - return json.loads(output) - - -def _find_connection(connections: list, name: str) -> Optional[dict]: - """Helper function to find a connection by name.""" - for conn in connections: - if conn["connection_name"] == name: - return conn - return None +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e -def test_import_of_snowsql_connections(snowcli, test_root_path, empty_config_file): +def test_import_of_snowsql_connections( + snowcli, test_root_path, empty_config_file, snapshot, config_mode +): + """Test connection import with both legacy and config_ng systems.""" # Initially should have empty or minimal connections list - initial_connections = _get_connections_list(snowcli, empty_config_file) - initial_count = len(initial_connections) + initial_output = _get_connections_list_output(snowcli, empty_config_file) + assert initial_output == snapshot # Import snowsql connections result = subprocess_run( @@ -53,46 +45,19 @@ def test_import_of_snowsql_connections(snowcli, test_root_path, empty_config_fil assert result.returncode == 0 # After import, should have multiple connections - final_connections = _get_connections_list(snowcli, empty_config_file) - - # Should have more connections than initially - assert len(final_connections) > initial_count - - # Check that expected connections exist - connection_names = {conn["connection_name"] for conn in final_connections} - expected_names = {"snowsql1", "snowsql2", "snowsql3", "example", "default"} - assert expected_names.issubset(connection_names) - - # Check specific connection details - snowsql1 = _find_connection(final_connections, "snowsql1") - assert snowsql1 is not None - assert snowsql1["parameters"]["account"] == "a1" - assert snowsql1["parameters"]["user"] == "u1" - assert snowsql1["parameters"]["host"] == "h1_override" # From overriding config - assert snowsql1["is_default"] is False - - snowsql2 = _find_connection(final_connections, "snowsql2") - assert snowsql2 is not None - assert snowsql2["parameters"]["account"] == "a2" - assert snowsql2["parameters"]["port"] == 1234 - assert snowsql2["is_default"] is False - - default_conn = _find_connection(final_connections, "default") - assert default_conn is not None - assert default_conn["parameters"]["account"] == "default_connection_account" - assert ( - default_conn["parameters"]["database"] == "default_connection_database_override" - ) # From overriding config - assert default_conn["is_default"] is True + final_output = _get_connections_list_output(snowcli, empty_config_file) + assert final_output == snapshot +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e def test_import_prompt_for_different_default_connection_name_on_conflict( - snowcli, test_root_path, empty_config_file + snowcli, test_root_path, empty_config_file, snapshot, config_mode ): + """Test importing with different default connection name.""" # Initially should have empty or minimal connections list - initial_connections = _get_connections_list(snowcli, empty_config_file) - initial_count = len(initial_connections) + initial_output = _get_connections_list_output(snowcli, empty_config_file) + assert initial_output == snapshot # Import with different default connection name result = subprocess_run( @@ -114,36 +79,25 @@ def test_import_prompt_for_different_default_connection_name_on_conflict( assert result.returncode == 0 # After import, snowsql2 should be the default - final_connections = _get_connections_list(snowcli, empty_config_file) - - # Should have more connections than initially - assert len(final_connections) > initial_count - - snowsql2 = _find_connection(final_connections, "snowsql2") - assert snowsql2 is not None - assert snowsql2["is_default"] is True - - default_conn = _find_connection(final_connections, "default") - assert default_conn is not None - assert default_conn["is_default"] is False + final_output = _get_connections_list_output(snowcli, empty_config_file) + assert final_output == snapshot +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e def test_import_confirm_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, + snapshot, + config_mode, ): + """Test import with confirmation on conflict.""" # Initially should have example and integration connections - initial_connections = _get_connections_list(snowcli, example_connection_config_file) - - example_conn = _find_connection(initial_connections, "example") - assert example_conn is not None - assert example_conn["parameters"]["user"] == "u1" - assert example_conn["parameters"]["authenticator"] == "SNOWFLAKE_JWT" - - integration_conn = _find_connection(initial_connections, "integration") - assert integration_conn is not None + initial_output = _get_connections_list_output( + snowcli, example_connection_config_file + ) + assert initial_output == snapshot # Import with confirmation (y) result = subprocess_run( @@ -163,29 +117,25 @@ def test_import_confirm_on_conflict_with_existing_cli_connection( assert result.returncode == 0 # After import, example connection should be overwritten with snowsql data - final_connections = _get_connections_list(snowcli, example_connection_config_file) - - example_conn = _find_connection(final_connections, "example") - assert example_conn is not None - assert example_conn["parameters"]["account"] == "accountname" - assert example_conn["parameters"]["user"] == "username" - # Should not have the old JWT authenticator - assert "authenticator" not in example_conn["parameters"] + final_output = _get_connections_list_output(snowcli, example_connection_config_file) + assert final_output == snapshot +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e def test_import_reject_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, + snapshot, + config_mode, ): + """Test import with rejection on conflict.""" # Initially should have example and integration connections - initial_connections = _get_connections_list(snowcli, example_connection_config_file) - - example_conn = _find_connection(initial_connections, "example") - assert example_conn is not None - original_user = example_conn["parameters"]["user"] - original_auth = example_conn["parameters"]["authenticator"] + initial_output = _get_connections_list_output( + snowcli, example_connection_config_file + ) + assert initial_output == snapshot # Import with rejection (n) result = subprocess_run( @@ -205,21 +155,17 @@ def test_import_reject_on_conflict_with_existing_cli_connection( assert result.returncode == 0 # After import, example connection should remain unchanged - final_connections = _get_connections_list(snowcli, example_connection_config_file) - - example_conn = _find_connection(final_connections, "example") - assert example_conn is not None - assert example_conn["parameters"]["user"] == original_user - assert example_conn["parameters"]["authenticator"] == original_auth - # But other connections should still be imported - snowsql1 = _find_connection(final_connections, "snowsql1") - assert snowsql1 is not None - assert snowsql1["parameters"]["account"] == "a1" + final_output = _get_connections_list_output(snowcli, example_connection_config_file) + assert final_output == snapshot +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e -def test_connection_imported_from_snowsql(snowcli, test_root_path, empty_config_file): +def test_connection_imported_from_snowsql( + snowcli, test_root_path, empty_config_file, config_mode +): + """Test that imported connection works.""" result = subprocess_run( [ snowcli, diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index 965fed0990..8f827efd90 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -308,3 +308,32 @@ def enable_snowpark_glob_support_feature_flag(): def global_setup(monkeypatch): width = 81 if IS_WINDOWS else 80 monkeypatch.setenv("COLUMNS", str(width)) + + +@pytest.fixture +def config_mode(request, monkeypatch): + """ + Fixture to switch between legacy and config_ng modes. + + When parameterized with ["legacy", "config_ng"], this fixture sets the + appropriate environment variable to enable/disable the new config system. + Each parameter value creates a separate test instance with its own snapshot. + + Usage: + @pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) + @pytest.mark.integration + def test_something(runner, config_mode, snapshot): + # Test runs twice: once with legacy, once with config_ng + # Each gets its own snapshot + ... + """ + mode = getattr(request, "param", "config_ng") # default to config_ng + + if mode == "config_ng": + # Enable new config system + monkeypatch.setenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", "true") + else: + # Ensure new config system is disabled (legacy mode) + monkeypatch.delenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", raising=False) + + return mode diff --git a/tests_integration/plugin/test_broken_plugin.py b/tests_integration/plugin/test_broken_plugin.py index 0cfa5e61a3..931de53f5e 100644 --- a/tests_integration/plugin/test_broken_plugin.py +++ b/tests_integration/plugin/test_broken_plugin.py @@ -12,13 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json - import pytest +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration -def test_broken_command_path_plugin(runner, test_root_path, _install_plugin, caplog): +def test_broken_command_path_plugin( + runner, test_root_path, _install_plugin, caplog, snapshot, config_mode +): + """Test broken plugin with both legacy and config_ng systems.""" config_path = ( test_root_path / "config" / "plugin_tests" / "broken_plugin_config.toml" ) @@ -34,16 +36,9 @@ def test_broken_command_path_plugin(runner, test_root_path, _install_plugin, cap in caplog.messages ) - # Parse JSON output and check for test connection existence - connections = json.loads(result.output) - - # Find the 'test' connection - test_connection = next( - (conn for conn in connections if conn["connection_name"] == "test"), None - ) - assert test_connection is not None, "Expected 'test' connection not found in output" - assert test_connection["parameters"] == {"account": "test"} - assert test_connection["is_default"] is False + # Use snapshot to capture the output + # Each config_mode gets its own snapshot automatically + assert result.output == snapshot @pytest.fixture(scope="module") diff --git a/tests_integration/plugin/test_failing_plugin.py b/tests_integration/plugin/test_failing_plugin.py index b782fa2823..d26a94ab3f 100644 --- a/tests_integration/plugin/test_failing_plugin.py +++ b/tests_integration/plugin/test_failing_plugin.py @@ -12,13 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json - import pytest +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration -def test_failing_plugin(runner, test_root_path, _install_plugin, caplog): +def test_failing_plugin( + runner, test_root_path, _install_plugin, caplog, snapshot, config_mode +): + """Test failing plugin with both legacy and config_ng systems.""" config_path = ( test_root_path / "config" / "plugin_tests" / "failing_plugin_config.toml" ) @@ -32,16 +34,9 @@ def test_failing_plugin(runner, test_root_path, _install_plugin, caplog): in caplog.messages ) - # Parse JSON output and check for test connection existence - connections = json.loads(result.output) - - # Find the 'test' connection - test_connection = next( - (conn for conn in connections if conn["connection_name"] == "test"), None - ) - assert test_connection is not None, "Expected 'test' connection not found in output" - assert test_connection["parameters"] == {"account": "test"} - assert test_connection["is_default"] is False + # Use snapshot to capture the output + # Each config_mode gets its own snapshot automatically + assert result.output == snapshot @pytest.fixture(scope="module") diff --git a/tests_integration/plugin/test_override_by_external_plugins.py b/tests_integration/plugin/test_override_by_external_plugins.py index 387125f22d..9e4ae487ac 100644 --- a/tests_integration/plugin/test_override_by_external_plugins.py +++ b/tests_integration/plugin/test_override_by_external_plugins.py @@ -12,13 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import json - import pytest +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration -def test_override_build_in_commands(runner, test_root_path, _install_plugin, caplog): +def test_override_build_in_commands( + runner, test_root_path, _install_plugin, caplog, snapshot, config_mode +): + """Test plugin override attempt with both legacy and config_ng systems.""" config_path = ( test_root_path / "config" / "plugin_tests" / "override_plugin_config.toml" ) @@ -32,22 +34,17 @@ def test_override_build_in_commands(runner, test_root_path, _install_plugin, cap in caplog.messages ) - # Parse JSON output and check for test connection existence - connections = json.loads(result.output) - - # Find the 'test' connection - test_connection = next( - (conn for conn in connections if conn["connection_name"] == "test"), None - ) - assert test_connection is not None, "Expected 'test' connection not found in output" - assert test_connection["parameters"] == {"account": "test"} - assert test_connection["is_default"] is False + # Use snapshot to capture the output + # Each config_mode gets its own snapshot automatically + assert result.output == snapshot +@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration def test_disabled_plugin_is_not_executed( - runner, test_root_path, _install_plugin, caplog + runner, test_root_path, _install_plugin, caplog, snapshot, config_mode ): + """Test disabled plugin with both legacy and config_ng systems.""" config_path = ( test_root_path / "config" @@ -59,16 +56,9 @@ def test_disabled_plugin_is_not_executed( ["--config-file", config_path, "connection", "list", "--format", "JSON"] ) - # Parse JSON output and check for test connection existence - connections = json.loads(result.output) - - # Find the 'test' connection - test_connection = next( - (conn for conn in connections if conn["connection_name"] == "test"), None - ) - assert test_connection is not None, "Expected 'test' connection not found in output" - assert test_connection["parameters"] == {"account": "test"} - assert test_connection["is_default"] is False + # Use snapshot to capture the output + # Each config_mode gets its own snapshot automatically + assert result.output == snapshot @pytest.fixture(scope="module") From 8a484f306900c921613e78a789e7cb755f158b01 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 22:23:26 +0200 Subject: [PATCH 31/78] SNOW-2306184: config refactor - e2e fix attempt 5 --- .github/workflows/test.yaml | 3 + .github/workflows/test_e2e.yaml | 4 + .github/workflows/test_integration.yaml | 4 + .github/workflows/test_trusted.yaml | 3 + pyproject.toml | 2 + .../test_import_snowsql_connections.ambr | 309 ++++++++++++++++++ .../__snapshots__/test_broken_plugin.ambr | 42 +++ .../__snapshots__/test_failing_plugin.ambr | 42 +++ .../test_override_by_external_plugins.ambr | 85 +++++ 9 files changed, 494 insertions(+) create mode 100644 tests_integration/plugin/__snapshots__/test_broken_plugin.ambr create mode 100644 tests_integration/plugin/__snapshots__/test_failing_plugin.ambr create mode 100644 tests_integration/plugin/__snapshots__/test_override_by_external_plugins.ambr diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 01d9b70f28..8fbc3779c4 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -28,6 +28,8 @@ jobs: os: ${{ fromJSON(needs.define-matrix.outputs.os) }} python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} runs-on: ${{ matrix.os }} + env: + PYTEST_ADDOPTS: -k legacy steps: - uses: actions/checkout@v4 with: @@ -69,3 +71,4 @@ jobs: - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 env: SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 + PYTEST_ADDOPTS: -k config_ng diff --git a/.github/workflows/test_e2e.yaml b/.github/workflows/test_e2e.yaml index 6842460719..f3a73c4340 100644 --- a/.github/workflows/test_e2e.yaml +++ b/.github/workflows/test_e2e.yaml @@ -44,6 +44,8 @@ jobs: python-env: e2e hatch-run: e2e:test secrets: inherit + env: + PYTEST_ADDOPTS: -k legacy # Repo owner has commented /ok-to-test on a (fork-based) pull request e2e-fork: @@ -71,3 +73,5 @@ jobs: hatch-run: e2e:test job-name: e2e-fork secrets: inherit + env: + PYTEST_ADDOPTS: -k legacy diff --git a/.github/workflows/test_integration.yaml b/.github/workflows/test_integration.yaml index 07ef4ace7e..9232dc687d 100644 --- a/.github/workflows/test_integration.yaml +++ b/.github/workflows/test_integration.yaml @@ -45,6 +45,8 @@ jobs: python-env: integration hatch-run: integration:test secrets: inherit + env: + PYTEST_ADDOPTS: -k legacy integration-fork: needs: define-matrix @@ -71,3 +73,5 @@ jobs: hatch-run: integration:test job-name: integration-fork secrets: inherit + env: + PYTEST_ADDOPTS: -k legacy diff --git a/.github/workflows/test_trusted.yaml b/.github/workflows/test_trusted.yaml index a101d03631..1ca2fbb228 100644 --- a/.github/workflows/test_trusted.yaml +++ b/.github/workflows/test_trusted.yaml @@ -23,6 +23,8 @@ permissions: jobs: tests-trusted: runs-on: ${{ inputs.runs-on }} + env: + PYTEST_ADDOPTS: -k legacy steps: - uses: actions/checkout@v4 with: @@ -51,6 +53,7 @@ jobs: runs-on: ${{ inputs.runs-on }} env: SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 + PYTEST_ADDOPTS: -k config_ng steps: - uses: actions/checkout@v4 with: diff --git a/pyproject.toml b/pyproject.toml index 6b0bc51d14..ae0560a9d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -198,6 +198,8 @@ markers = [ "integration_experimental: experimental integration test", "no_qa: mark test as not to be run in QA", "qa_only: mark test as to be run only in QA", + "config_ng: mark test as using new config system", + "legacy: mark test as using legacy config system", ] diff --git a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr b/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr index bf4d0ea7a7..a1129e8aee 100644 --- a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr +++ b/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr @@ -5,21 +5,330 @@ # name: test_import_confirm_on_conflict_with_existing_cli_connection.1 '[{"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' # --- +# name: test_import_confirm_on_conflict_with_existing_cli_connection[legacy] + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_confirm_on_conflict_with_existing_cli_connection[legacy].1 + ''' + [ + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + } + ] + ''' +# --- # name: test_import_of_snowsql_connections '[]' # --- # name: test_import_of_snowsql_connections.1 '[{"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' # --- +# name: test_import_of_snowsql_connections[legacy] + '[]' +# --- +# name: test_import_of_snowsql_connections[legacy].1 + ''' + [ + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + } + ] + ''' +# --- # name: test_import_prompt_for_different_default_connection_name_on_conflict '[]' # --- # name: test_import_prompt_for_different_default_connection_name_on_conflict.1 '[{"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": true}, {"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": false}]' # --- +# name: test_import_prompt_for_different_default_connection_name_on_conflict[legacy] + '[]' +# --- +# name: test_import_prompt_for_different_default_connection_name_on_conflict[legacy].1 + ''' + [ + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": true + }, + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": false + } + ] + ''' +# --- # name: test_import_reject_on_conflict_with_existing_cli_connection '[{"connection_name": "example", "parameters": {"user": "u1", "schema": "public", "authenticator": "SNOWFLAKE_JWT"}, "is_default": false}]' # --- # name: test_import_reject_on_conflict_with_existing_cli_connection.1 '[{"connection_name": "example", "parameters": {"user": "u1", "schema": "public", "authenticator": "SNOWFLAKE_JWT"}, "is_default": false}, {"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' # --- +# name: test_import_reject_on_conflict_with_existing_cli_connection[legacy] + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_reject_on_conflict_with_existing_cli_connection[legacy].1 + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + }, + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + } + ] + ''' +# --- diff --git a/tests_integration/plugin/__snapshots__/test_broken_plugin.ambr b/tests_integration/plugin/__snapshots__/test_broken_plugin.ambr new file mode 100644 index 0000000000..cc63440005 --- /dev/null +++ b/tests_integration/plugin/__snapshots__/test_broken_plugin.ambr @@ -0,0 +1,42 @@ +# serializer version: 1 +# name: test_broken_command_path_plugin[config_ng] + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + + ''' +# --- +# name: test_broken_command_path_plugin[legacy] + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- diff --git a/tests_integration/plugin/__snapshots__/test_failing_plugin.ambr b/tests_integration/plugin/__snapshots__/test_failing_plugin.ambr new file mode 100644 index 0000000000..abde5267c6 --- /dev/null +++ b/tests_integration/plugin/__snapshots__/test_failing_plugin.ambr @@ -0,0 +1,42 @@ +# serializer version: 1 +# name: test_failing_plugin[config_ng] + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + + ''' +# --- +# name: test_failing_plugin[legacy] + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- diff --git a/tests_integration/plugin/__snapshots__/test_override_by_external_plugins.ambr b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins.ambr new file mode 100644 index 0000000000..1531ece165 --- /dev/null +++ b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins.ambr @@ -0,0 +1,85 @@ +# serializer version: 1 +# name: test_disabled_plugin_is_not_executed[config_ng] + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + + ''' +# --- +# name: test_disabled_plugin_is_not_executed[legacy] + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- +# name: test_override_build_in_commands[config_ng] + ''' + Outside command code + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + + ''' +# --- +# name: test_override_build_in_commands[legacy] + ''' + Outside command code + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- From 93cb1e2e87a22043b8379671ebe5f67f5e632a86 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 22:57:18 +0200 Subject: [PATCH 32/78] SNOW-2306184: config refactor - e2e fix attempt 5a --- .../test_import_snowsql_connections.ambr | 419 ++++++++++++++++++ 1 file changed, 419 insertions(+) diff --git a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr b/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr index a1129e8aee..18dd1e3141 100644 --- a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr +++ b/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr @@ -5,6 +5,115 @@ # name: test_import_confirm_on_conflict_with_existing_cli_connection.1 '[{"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' # --- +# name: test_import_confirm_on_conflict_with_existing_cli_connection[config_ng] + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_confirm_on_conflict_with_existing_cli_connection[config_ng].1 + ''' + [ + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- # name: test_import_confirm_on_conflict_with_existing_cli_connection[legacy] ''' [ @@ -94,6 +203,106 @@ # name: test_import_of_snowsql_connections.1 '[{"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' # --- +# name: test_import_of_snowsql_connections[config_ng] + ''' + [ + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_of_snowsql_connections[config_ng].1 + ''' + [ + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- # name: test_import_of_snowsql_connections[legacy] '[]' # --- @@ -171,6 +380,106 @@ # name: test_import_prompt_for_different_default_connection_name_on_conflict.1 '[{"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": true}, {"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": false}]' # --- +# name: test_import_prompt_for_different_default_connection_name_on_conflict[config_ng] + ''' + [ + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_prompt_for_different_default_connection_name_on_conflict[config_ng].1 + ''' + [ + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": true + }, + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": false + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- # name: test_import_prompt_for_different_default_connection_name_on_conflict[legacy] '[]' # --- @@ -248,6 +557,116 @@ # name: test_import_reject_on_conflict_with_existing_cli_connection.1 '[{"connection_name": "example", "parameters": {"user": "u1", "schema": "public", "authenticator": "SNOWFLAKE_JWT"}, "is_default": false}, {"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' # --- +# name: test_import_reject_on_conflict_with_existing_cli_connection[config_ng] + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_reject_on_conflict_with_existing_cli_connection[config_ng].1 + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + }, + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + }, + { + "connection_name": "integration", + "parameters": { + "account": "po52878", + "user": "mraba", + "host": "po52878.snowflakecomputing.com", + "database": "SNOWCLI_DB", + "warehouse": "xsmall", + "role": "integration_tests", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- # name: test_import_reject_on_conflict_with_existing_cli_connection[legacy] ''' [ From 2db49fbd27b1c5c58373138faa9bcc8ca2db0f8b Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 10 Oct 2025 23:08:50 +0200 Subject: [PATCH 33/78] SNOW-2306184: config refactor - e2e fix attempt 5b --- .../test_import_snowsql_connections.ambr | 24 ------------------- tests_e2e/test_import_snowsql_connections.py | 5 ++++ 2 files changed, 5 insertions(+), 24 deletions(-) diff --git a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr b/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr index 18dd1e3141..62da1c75ed 100644 --- a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr +++ b/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr @@ -1,10 +1,4 @@ # serializer version: 1 -# name: test_import_confirm_on_conflict_with_existing_cli_connection - '[{"connection_name": "example", "parameters": {"user": "u1", "schema": "public", "authenticator": "SNOWFLAKE_JWT"}, "is_default": false}]' -# --- -# name: test_import_confirm_on_conflict_with_existing_cli_connection.1 - '[{"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' -# --- # name: test_import_confirm_on_conflict_with_existing_cli_connection[config_ng] ''' [ @@ -197,12 +191,6 @@ ] ''' # --- -# name: test_import_of_snowsql_connections - '[]' -# --- -# name: test_import_of_snowsql_connections.1 - '[{"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' -# --- # name: test_import_of_snowsql_connections[config_ng] ''' [ @@ -374,12 +362,6 @@ ] ''' # --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict - '[]' -# --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict.1 - '[{"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": true}, {"connection_name": "example", "parameters": {"account": "accountname", "user": "username"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": false}]' -# --- # name: test_import_prompt_for_different_default_connection_name_on_conflict[config_ng] ''' [ @@ -551,12 +533,6 @@ ] ''' # --- -# name: test_import_reject_on_conflict_with_existing_cli_connection - '[{"connection_name": "example", "parameters": {"user": "u1", "schema": "public", "authenticator": "SNOWFLAKE_JWT"}, "is_default": false}]' -# --- -# name: test_import_reject_on_conflict_with_existing_cli_connection.1 - '[{"connection_name": "example", "parameters": {"user": "u1", "schema": "public", "authenticator": "SNOWFLAKE_JWT"}, "is_default": false}, {"connection_name": "snowsql1", "parameters": {"account": "a1", "user": "u1", "host": "h1_override", "database": "d1", "schema": "public", "warehouse": "w1", "role": "r1"}, "is_default": false}, {"connection_name": "snowsql2", "parameters": {"account": "a2", "user": "u2", "host": "h2", "port": 1234, "database": "d2", "schema": "public", "warehouse": "w2", "role": "r2"}, "is_default": false}, {"connection_name": "snowsql3", "parameters": {"account": "a3", "user": "u3", "password": "****", "host": "h3", "database": "d3", "schema": "public", "warehouse": "w3", "role": "r3"}, "is_default": false}, {"connection_name": "default", "parameters": {"account": "default_connection_account", "user": "default_connection_user", "host": "localhost", "database": "default_connection_database_override", "schema": "public", "warehouse": "default_connection_warehouse", "role": "accountadmin"}, "is_default": true}]' -# --- # name: test_import_reject_on_conflict_with_existing_cli_connection[config_ng] ''' [ diff --git a/tests_e2e/test_import_snowsql_connections.py b/tests_e2e/test_import_snowsql_connections.py index 2cbef91be2..38ac21a923 100644 --- a/tests_e2e/test_import_snowsql_connections.py +++ b/tests_e2e/test_import_snowsql_connections.py @@ -166,6 +166,10 @@ def test_connection_imported_from_snowsql( snowcli, test_root_path, empty_config_file, config_mode ): """Test that imported connection works.""" + # In config_ng, an INTEGRATION connection may already exist via env vars. + # Confirm override explicitly to avoid interactive abort. + stdin = "y\n" if config_mode == "config_ng" else None + result = subprocess_run( [ snowcli, @@ -176,6 +180,7 @@ def test_connection_imported_from_snowsql( "--snowsql-config-file", test_root_path / "config" / "snowsql" / "integration_config", ], + stdin=stdin, ) assert result.returncode == 0 From 18a52ba479d6096009b442d8cf76397ffb77ca91 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 13 Oct 2025 10:05:20 +0200 Subject: [PATCH 34/78] SNOW-2306184: config refactor - config_snapshot to distingush between legacy and config_ng --- .github/workflows/test.yaml | 9 +- .github/workflows/test_e2e.yaml | 4 - .github/workflows/test_integration.yaml | 4 - .github/workflows/test_trusted.yaml | 3 - tests/conftest.py | 20 ++ ...import_snowsql_connections_config_ng.ambr} | 325 +----------------- ...est_import_snowsql_connections_legacy.ambr | 310 +++++++++++++++++ tests_e2e/conftest.py | 28 -- tests_e2e/test_import_snowsql_connections.py | 42 +-- tests_integration/conftest.py | 29 -- ...ambr => test_broken_plugin_config_ng.ambr} | 16 +- .../test_broken_plugin_legacy.ambr | 15 + ...mbr => test_failing_plugin_config_ng.ambr} | 16 +- .../test_failing_plugin_legacy.ambr | 15 + ...erride_by_external_plugins_config_ng.ambr} | 33 +- ...t_override_by_external_plugins_legacy.ambr | 30 ++ .../plugin/test_broken_plugin.py | 8 +- .../plugin/test_failing_plugin.py | 8 +- .../test_override_by_external_plugins.py | 16 +- 19 files changed, 433 insertions(+), 498 deletions(-) rename tests_e2e/__snapshots__/{test_import_snowsql_connections.ambr => test_import_snowsql_connections_config_ng.ambr} (57%) create mode 100644 tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr rename tests_integration/plugin/__snapshots__/{test_failing_plugin.ambr => test_broken_plugin_config_ng.ambr} (70%) create mode 100644 tests_integration/plugin/__snapshots__/test_broken_plugin_legacy.ambr rename tests_integration/plugin/__snapshots__/{test_broken_plugin.ambr => test_failing_plugin_config_ng.ambr} (68%) create mode 100644 tests_integration/plugin/__snapshots__/test_failing_plugin_legacy.ambr rename tests_integration/plugin/__snapshots__/{test_override_by_external_plugins.ambr => test_override_by_external_plugins_config_ng.ambr} (67%) create mode 100644 tests_integration/plugin/__snapshots__/test_override_by_external_plugins_legacy.ambr diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 8fbc3779c4..181285a064 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -28,8 +28,6 @@ jobs: os: ${{ fromJSON(needs.define-matrix.outputs.os) }} python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} runs-on: ${{ matrix.os }} - env: - PYTEST_ADDOPTS: -k legacy steps: - uses: actions/checkout@v4 with: @@ -46,7 +44,7 @@ jobs: run: hatch run test-cov - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 - tests-ng: + tests-config-ng: needs: define-matrix strategy: fail-fast: true @@ -54,6 +52,8 @@ jobs: os: ${{ fromJSON(needs.define-matrix.outputs.os) }} python-version: ${{ fromJSON(needs.define-matrix.outputs.python) }} runs-on: ${{ matrix.os }} + env: + SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 steps: - uses: actions/checkout@v4 with: @@ -69,6 +69,3 @@ jobs: - name: Test with hatch run: hatch run test-cov - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 - env: - SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 - PYTEST_ADDOPTS: -k config_ng diff --git a/.github/workflows/test_e2e.yaml b/.github/workflows/test_e2e.yaml index f3a73c4340..6842460719 100644 --- a/.github/workflows/test_e2e.yaml +++ b/.github/workflows/test_e2e.yaml @@ -44,8 +44,6 @@ jobs: python-env: e2e hatch-run: e2e:test secrets: inherit - env: - PYTEST_ADDOPTS: -k legacy # Repo owner has commented /ok-to-test on a (fork-based) pull request e2e-fork: @@ -73,5 +71,3 @@ jobs: hatch-run: e2e:test job-name: e2e-fork secrets: inherit - env: - PYTEST_ADDOPTS: -k legacy diff --git a/.github/workflows/test_integration.yaml b/.github/workflows/test_integration.yaml index 9232dc687d..07ef4ace7e 100644 --- a/.github/workflows/test_integration.yaml +++ b/.github/workflows/test_integration.yaml @@ -45,8 +45,6 @@ jobs: python-env: integration hatch-run: integration:test secrets: inherit - env: - PYTEST_ADDOPTS: -k legacy integration-fork: needs: define-matrix @@ -73,5 +71,3 @@ jobs: hatch-run: integration:test job-name: integration-fork secrets: inherit - env: - PYTEST_ADDOPTS: -k legacy diff --git a/.github/workflows/test_trusted.yaml b/.github/workflows/test_trusted.yaml index 1ca2fbb228..a101d03631 100644 --- a/.github/workflows/test_trusted.yaml +++ b/.github/workflows/test_trusted.yaml @@ -23,8 +23,6 @@ permissions: jobs: tests-trusted: runs-on: ${{ inputs.runs-on }} - env: - PYTEST_ADDOPTS: -k legacy steps: - uses: actions/checkout@v4 with: @@ -53,7 +51,6 @@ jobs: runs-on: ${{ inputs.runs-on }} env: SNOWFLAKE_CLI_CONFIG_V2_ENABLED: 1 - PYTEST_ADDOPTS: -k config_ng steps: - uses: actions/checkout@v4 with: diff --git a/tests/conftest.py b/tests/conftest.py index 0390868308..4816ce9534 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -89,11 +89,31 @@ def matches( ) +class ConfigModeSnapshotExtension(AmberSnapshotExtension): + """Snapshot extension that includes config mode in snapshot file name.""" + + @classmethod + def _get_file_basename(cls, *, test_location, index): + """Generate snapshot filename with config mode suffix.""" + config_mode = ( + "config_ng" if os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") else "legacy" + ) + basename = super()._get_file_basename(test_location=test_location, index=index) + # Insert config mode before .ambr extension + return f"{basename}_{config_mode}" + + @pytest.fixture() def os_agnostic_snapshot(snapshot): return snapshot.use_extension(CustomSnapshotExtension) +@pytest.fixture() +def config_snapshot(snapshot): + """Config-mode-aware snapshot fixture for tests that differ between legacy and config_ng.""" + return snapshot.use_extension(ConfigModeSnapshotExtension) + + @pytest.fixture(autouse=True) # Global context and logging levels reset is required. # Without it, state from previous tests is visible in following tests. diff --git a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr b/tests_e2e/__snapshots__/test_import_snowsql_connections_config_ng.ambr similarity index 57% rename from tests_e2e/__snapshots__/test_import_snowsql_connections.ambr rename to tests_e2e/__snapshots__/test_import_snowsql_connections_config_ng.ambr index 62da1c75ed..cd40a90ca5 100644 --- a/tests_e2e/__snapshots__/test_import_snowsql_connections.ambr +++ b/tests_e2e/__snapshots__/test_import_snowsql_connections_config_ng.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_import_confirm_on_conflict_with_existing_cli_connection[config_ng] +# name: test_import_confirm_on_conflict_with_existing_cli_connection ''' [ { @@ -27,7 +27,7 @@ ] ''' # --- -# name: test_import_confirm_on_conflict_with_existing_cli_connection[config_ng].1 +# name: test_import_confirm_on_conflict_with_existing_cli_connection.1 ''' [ { @@ -108,90 +108,7 @@ ] ''' # --- -# name: test_import_confirm_on_conflict_with_existing_cli_connection[legacy] - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_confirm_on_conflict_with_existing_cli_connection[legacy].1 - ''' - [ - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - } - ] - ''' -# --- -# name: test_import_of_snowsql_connections[config_ng] +# name: test_import_of_snowsql_connections ''' [ { @@ -210,7 +127,7 @@ ] ''' # --- -# name: test_import_of_snowsql_connections[config_ng].1 +# name: test_import_of_snowsql_connections.1 ''' [ { @@ -291,78 +208,7 @@ ] ''' # --- -# name: test_import_of_snowsql_connections[legacy] - '[]' -# --- -# name: test_import_of_snowsql_connections[legacy].1 - ''' - [ - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - } - ] - ''' -# --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict[config_ng] +# name: test_import_prompt_for_different_default_connection_name_on_conflict ''' [ { @@ -381,7 +227,7 @@ ] ''' # --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict[config_ng].1 +# name: test_import_prompt_for_different_default_connection_name_on_conflict.1 ''' [ { @@ -462,78 +308,7 @@ ] ''' # --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict[legacy] - '[]' -# --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict[legacy].1 - ''' - [ - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": true - }, - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_reject_on_conflict_with_existing_cli_connection[config_ng] +# name: test_import_reject_on_conflict_with_existing_cli_connection ''' [ { @@ -561,7 +336,7 @@ ] ''' # --- -# name: test_import_reject_on_conflict_with_existing_cli_connection[config_ng].1 +# name: test_import_reject_on_conflict_with_existing_cli_connection.1 ''' [ { @@ -643,87 +418,3 @@ ] ''' # --- -# name: test_import_reject_on_conflict_with_existing_cli_connection[legacy] - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_reject_on_conflict_with_existing_cli_connection[legacy].1 - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - }, - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - } - ] - ''' -# --- diff --git a/tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr b/tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr new file mode 100644 index 0000000000..e56700cfeb --- /dev/null +++ b/tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr @@ -0,0 +1,310 @@ +# serializer version: 1 +# name: test_import_confirm_on_conflict_with_existing_cli_connection + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_confirm_on_conflict_with_existing_cli_connection.1 + ''' + [ + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + } + ] + ''' +# --- +# name: test_import_of_snowsql_connections + '[]' +# --- +# name: test_import_of_snowsql_connections.1 + ''' + [ + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + } + ] + ''' +# --- +# name: test_import_prompt_for_different_default_connection_name_on_conflict + '[]' +# --- +# name: test_import_prompt_for_different_default_connection_name_on_conflict.1 + ''' + [ + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": true + }, + { + "connection_name": "example", + "parameters": { + "account": "accountname", + "user": "username" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_reject_on_conflict_with_existing_cli_connection + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + } + ] + ''' +# --- +# name: test_import_reject_on_conflict_with_existing_cli_connection.1 + ''' + [ + { + "connection_name": "example", + "parameters": { + "user": "u1", + "schema": "public", + "authenticator": "SNOWFLAKE_JWT" + }, + "is_default": false + }, + { + "connection_name": "snowsql1", + "parameters": { + "account": "a1", + "user": "u1", + "host": "h1_override", + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1" + }, + "is_default": false + }, + { + "connection_name": "snowsql2", + "parameters": { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2" + }, + "is_default": false + }, + { + "connection_name": "snowsql3", + "parameters": { + "account": "a3", + "user": "u3", + "password": "****", + "host": "h3", + "database": "d3", + "schema": "public", + "warehouse": "w3", + "role": "r3" + }, + "is_default": false + }, + { + "connection_name": "default", + "parameters": { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin" + }, + "is_default": true + } + ] + ''' +# --- diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 328463d7b8..996d906506 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -210,31 +210,3 @@ def example_connection_config_file(test_root_path, prepare_test_config_file): yield prepare_test_config_file( SecurePath(test_root_path) / "config" / "example_connection.toml" ) - - -@pytest.fixture -def config_mode(request, monkeypatch): - """ - Fixture to switch between legacy and config_ng modes. - - When parameterized with ["legacy", "config_ng"], this fixture sets the - appropriate environment variable to enable/disable the new config system. - Each parameter value creates a separate test instance with its own snapshot. - - Usage: - @pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) - def test_something(config_mode, snapshot): - # Test runs twice: once with legacy, once with config_ng - # Each gets its own snapshot: test_something[legacy] and test_something[config_ng] - ... - """ - mode = getattr(request, "param", "config_ng") # default to config_ng - - if mode == "config_ng": - # Enable new config system - monkeypatch.setenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", "true") - else: - # Ensure new config system is disabled (legacy mode) - monkeypatch.delenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", raising=False) - - return mode diff --git a/tests_e2e/test_import_snowsql_connections.py b/tests_e2e/test_import_snowsql_connections.py index 38ac21a923..9de5e44df9 100644 --- a/tests_e2e/test_import_snowsql_connections.py +++ b/tests_e2e/test_import_snowsql_connections.py @@ -18,15 +18,14 @@ def _get_connections_list_output(snowcli, config_file) -> str: ) -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e def test_import_of_snowsql_connections( - snowcli, test_root_path, empty_config_file, snapshot, config_mode + snowcli, test_root_path, empty_config_file, config_snapshot ): - """Test connection import with both legacy and config_ng systems.""" + """Test connection import.""" # Initially should have empty or minimal connections list initial_output = _get_connections_list_output(snowcli, empty_config_file) - assert initial_output == snapshot + assert initial_output == config_snapshot # Import snowsql connections result = subprocess_run( @@ -46,18 +45,17 @@ def test_import_of_snowsql_connections( # After import, should have multiple connections final_output = _get_connections_list_output(snowcli, empty_config_file) - assert final_output == snapshot + assert final_output == config_snapshot -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e def test_import_prompt_for_different_default_connection_name_on_conflict( - snowcli, test_root_path, empty_config_file, snapshot, config_mode + snowcli, test_root_path, empty_config_file, config_snapshot ): """Test importing with different default connection name.""" # Initially should have empty or minimal connections list initial_output = _get_connections_list_output(snowcli, empty_config_file) - assert initial_output == snapshot + assert initial_output == config_snapshot # Import with different default connection name result = subprocess_run( @@ -80,24 +78,22 @@ def test_import_prompt_for_different_default_connection_name_on_conflict( # After import, snowsql2 should be the default final_output = _get_connections_list_output(snowcli, empty_config_file) - assert final_output == snapshot + assert final_output == config_snapshot -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e def test_import_confirm_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, - snapshot, - config_mode, + config_snapshot, ): """Test import with confirmation on conflict.""" # Initially should have example and integration connections initial_output = _get_connections_list_output( snowcli, example_connection_config_file ) - assert initial_output == snapshot + assert initial_output == config_snapshot # Import with confirmation (y) result = subprocess_run( @@ -118,24 +114,22 @@ def test_import_confirm_on_conflict_with_existing_cli_connection( # After import, example connection should be overwritten with snowsql data final_output = _get_connections_list_output(snowcli, example_connection_config_file) - assert final_output == snapshot + assert final_output == config_snapshot -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e def test_import_reject_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, - snapshot, - config_mode, + config_snapshot, ): """Test import with rejection on conflict.""" # Initially should have example and integration connections initial_output = _get_connections_list_output( snowcli, example_connection_config_file ) - assert initial_output == snapshot + assert initial_output == config_snapshot # Import with rejection (n) result = subprocess_run( @@ -157,18 +151,14 @@ def test_import_reject_on_conflict_with_existing_cli_connection( # After import, example connection should remain unchanged # But other connections should still be imported final_output = _get_connections_list_output(snowcli, example_connection_config_file) - assert final_output == snapshot + assert final_output == config_snapshot -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.e2e -def test_connection_imported_from_snowsql( - snowcli, test_root_path, empty_config_file, config_mode -): +def test_connection_imported_from_snowsql(snowcli, test_root_path, empty_config_file): """Test that imported connection works.""" - # In config_ng, an INTEGRATION connection may already exist via env vars. - # Confirm override explicitly to avoid interactive abort. - stdin = "y\n" if config_mode == "config_ng" else None + # Always provide confirmation to avoid interactive abort. + stdin = "y\n" result = subprocess_run( [ diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index 8f827efd90..965fed0990 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -308,32 +308,3 @@ def enable_snowpark_glob_support_feature_flag(): def global_setup(monkeypatch): width = 81 if IS_WINDOWS else 80 monkeypatch.setenv("COLUMNS", str(width)) - - -@pytest.fixture -def config_mode(request, monkeypatch): - """ - Fixture to switch between legacy and config_ng modes. - - When parameterized with ["legacy", "config_ng"], this fixture sets the - appropriate environment variable to enable/disable the new config system. - Each parameter value creates a separate test instance with its own snapshot. - - Usage: - @pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) - @pytest.mark.integration - def test_something(runner, config_mode, snapshot): - # Test runs twice: once with legacy, once with config_ng - # Each gets its own snapshot - ... - """ - mode = getattr(request, "param", "config_ng") # default to config_ng - - if mode == "config_ng": - # Enable new config system - monkeypatch.setenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", "true") - else: - # Ensure new config system is disabled (legacy mode) - monkeypatch.delenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED", raising=False) - - return mode diff --git a/tests_integration/plugin/__snapshots__/test_failing_plugin.ambr b/tests_integration/plugin/__snapshots__/test_broken_plugin_config_ng.ambr similarity index 70% rename from tests_integration/plugin/__snapshots__/test_failing_plugin.ambr rename to tests_integration/plugin/__snapshots__/test_broken_plugin_config_ng.ambr index abde5267c6..a030fe53c7 100644 --- a/tests_integration/plugin/__snapshots__/test_failing_plugin.ambr +++ b/tests_integration/plugin/__snapshots__/test_broken_plugin_config_ng.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_failing_plugin[config_ng] +# name: test_broken_command_path_plugin ''' [ { @@ -26,17 +26,3 @@ ''' # --- -# name: test_failing_plugin[legacy] - ''' - [ - { - "connection_name": "test", - "parameters": { - "account": "test" - }, - "is_default": false - } - ] - - ''' -# --- diff --git a/tests_integration/plugin/__snapshots__/test_broken_plugin_legacy.ambr b/tests_integration/plugin/__snapshots__/test_broken_plugin_legacy.ambr new file mode 100644 index 0000000000..87d2a1f15c --- /dev/null +++ b/tests_integration/plugin/__snapshots__/test_broken_plugin_legacy.ambr @@ -0,0 +1,15 @@ +# serializer version: 1 +# name: test_broken_command_path_plugin + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- diff --git a/tests_integration/plugin/__snapshots__/test_broken_plugin.ambr b/tests_integration/plugin/__snapshots__/test_failing_plugin_config_ng.ambr similarity index 68% rename from tests_integration/plugin/__snapshots__/test_broken_plugin.ambr rename to tests_integration/plugin/__snapshots__/test_failing_plugin_config_ng.ambr index cc63440005..2b51fdbfe1 100644 --- a/tests_integration/plugin/__snapshots__/test_broken_plugin.ambr +++ b/tests_integration/plugin/__snapshots__/test_failing_plugin_config_ng.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_broken_command_path_plugin[config_ng] +# name: test_failing_plugin ''' [ { @@ -26,17 +26,3 @@ ''' # --- -# name: test_broken_command_path_plugin[legacy] - ''' - [ - { - "connection_name": "test", - "parameters": { - "account": "test" - }, - "is_default": false - } - ] - - ''' -# --- diff --git a/tests_integration/plugin/__snapshots__/test_failing_plugin_legacy.ambr b/tests_integration/plugin/__snapshots__/test_failing_plugin_legacy.ambr new file mode 100644 index 0000000000..144e641e4c --- /dev/null +++ b/tests_integration/plugin/__snapshots__/test_failing_plugin_legacy.ambr @@ -0,0 +1,15 @@ +# serializer version: 1 +# name: test_failing_plugin + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- diff --git a/tests_integration/plugin/__snapshots__/test_override_by_external_plugins.ambr b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_config_ng.ambr similarity index 67% rename from tests_integration/plugin/__snapshots__/test_override_by_external_plugins.ambr rename to tests_integration/plugin/__snapshots__/test_override_by_external_plugins_config_ng.ambr index 1531ece165..3b8f510024 100644 --- a/tests_integration/plugin/__snapshots__/test_override_by_external_plugins.ambr +++ b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_config_ng.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_disabled_plugin_is_not_executed[config_ng] +# name: test_disabled_plugin_is_not_executed ''' [ { @@ -26,21 +26,7 @@ ''' # --- -# name: test_disabled_plugin_is_not_executed[legacy] - ''' - [ - { - "connection_name": "test", - "parameters": { - "account": "test" - }, - "is_default": false - } - ] - - ''' -# --- -# name: test_override_build_in_commands[config_ng] +# name: test_override_build_in_commands ''' Outside command code [ @@ -68,18 +54,3 @@ ''' # --- -# name: test_override_build_in_commands[legacy] - ''' - Outside command code - [ - { - "connection_name": "test", - "parameters": { - "account": "test" - }, - "is_default": false - } - ] - - ''' -# --- diff --git a/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_legacy.ambr b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_legacy.ambr new file mode 100644 index 0000000000..947fa54d21 --- /dev/null +++ b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_legacy.ambr @@ -0,0 +1,30 @@ +# serializer version: 1 +# name: test_disabled_plugin_is_not_executed + ''' + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- +# name: test_override_build_in_commands + ''' + Outside command code + [ + { + "connection_name": "test", + "parameters": { + "account": "test" + }, + "is_default": false + } + ] + + ''' +# --- diff --git a/tests_integration/plugin/test_broken_plugin.py b/tests_integration/plugin/test_broken_plugin.py index 931de53f5e..9d9c8dc02c 100644 --- a/tests_integration/plugin/test_broken_plugin.py +++ b/tests_integration/plugin/test_broken_plugin.py @@ -15,12 +15,11 @@ import pytest -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration def test_broken_command_path_plugin( - runner, test_root_path, _install_plugin, caplog, snapshot, config_mode + runner, test_root_path, _install_plugin, caplog, config_snapshot ): - """Test broken plugin with both legacy and config_ng systems.""" + """Test broken plugin.""" config_path = ( test_root_path / "config" / "plugin_tests" / "broken_plugin_config.toml" ) @@ -37,8 +36,7 @@ def test_broken_command_path_plugin( ) # Use snapshot to capture the output - # Each config_mode gets its own snapshot automatically - assert result.output == snapshot + assert result.output == config_snapshot @pytest.fixture(scope="module") diff --git a/tests_integration/plugin/test_failing_plugin.py b/tests_integration/plugin/test_failing_plugin.py index d26a94ab3f..32f0eebde1 100644 --- a/tests_integration/plugin/test_failing_plugin.py +++ b/tests_integration/plugin/test_failing_plugin.py @@ -15,12 +15,11 @@ import pytest -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration def test_failing_plugin( - runner, test_root_path, _install_plugin, caplog, snapshot, config_mode + runner, test_root_path, _install_plugin, caplog, config_snapshot ): - """Test failing plugin with both legacy and config_ng systems.""" + """Test failing plugin.""" config_path = ( test_root_path / "config" / "plugin_tests" / "failing_plugin_config.toml" ) @@ -35,8 +34,7 @@ def test_failing_plugin( ) # Use snapshot to capture the output - # Each config_mode gets its own snapshot automatically - assert result.output == snapshot + assert result.output == config_snapshot @pytest.fixture(scope="module") diff --git a/tests_integration/plugin/test_override_by_external_plugins.py b/tests_integration/plugin/test_override_by_external_plugins.py index 9e4ae487ac..951012fa29 100644 --- a/tests_integration/plugin/test_override_by_external_plugins.py +++ b/tests_integration/plugin/test_override_by_external_plugins.py @@ -15,12 +15,11 @@ import pytest -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration def test_override_build_in_commands( - runner, test_root_path, _install_plugin, caplog, snapshot, config_mode + runner, test_root_path, _install_plugin, caplog, config_snapshot ): - """Test plugin override attempt with both legacy and config_ng systems.""" + """Test plugin override attempt.""" config_path = ( test_root_path / "config" / "plugin_tests" / "override_plugin_config.toml" ) @@ -35,16 +34,14 @@ def test_override_build_in_commands( ) # Use snapshot to capture the output - # Each config_mode gets its own snapshot automatically - assert result.output == snapshot + assert result.output == config_snapshot -@pytest.mark.parametrize("config_mode", ["legacy", "config_ng"], indirect=True) @pytest.mark.integration def test_disabled_plugin_is_not_executed( - runner, test_root_path, _install_plugin, caplog, snapshot, config_mode + runner, test_root_path, _install_plugin, caplog, config_snapshot ): - """Test disabled plugin with both legacy and config_ng systems.""" + """Test disabled plugin.""" config_path = ( test_root_path / "config" @@ -57,8 +54,7 @@ def test_disabled_plugin_is_not_executed( ) # Use snapshot to capture the output - # Each config_mode gets its own snapshot automatically - assert result.output == snapshot + assert result.output == config_snapshot @pytest.fixture(scope="module") From 1947e9d3fab888116cd49248fbbbd1cb67a642df Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 13 Oct 2025 11:06:26 +0200 Subject: [PATCH 35/78] SNOW-2306184: config refactor - invalidate singleton between tests --- src/snowflake/cli/api/config.py | 10 +++++++++ src/snowflake/cli/api/config_provider.py | 28 ++++++++++++++++++++++++ tests/conftest.py | 12 +++++++++- 3 files changed, 49 insertions(+), 1 deletion(-) diff --git a/src/snowflake/cli/api/config.py b/src/snowflake/cli/api/config.py index af676c5dd3..6c260f97b1 100644 --- a/src/snowflake/cli/api/config.py +++ b/src/snowflake/cli/api/config.py @@ -221,6 +221,16 @@ def _config_file(): yield conf_file_cache _dump_config(conf_file_cache) + # Reset config provider cache after writing to ensure it re-reads on next access + try: + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + if hasattr(provider, "invalidate_cache"): + provider.invalidate_cache() + except Exception: + pass + def _read_config_file(): config_manager = get_config_manager() diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 2e4900144a..c4e812a641 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -16,6 +16,7 @@ import os from abc import ABC, abstractmethod +from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Optional if TYPE_CHECKING: @@ -192,9 +193,24 @@ def __init__(self) -> None: self._resolver: Optional[ConfigurationResolver] = None self._config_cache: Dict[str, Any] = {} self._initialized: bool = False + self._last_config_override: Optional[Path] = None def _ensure_initialized(self) -> None: """Lazily initialize the resolver on first use.""" + # Check if config_file_override has changed + try: + from snowflake.cli.api.cli_global_context import get_cli_context + + current_override = get_cli_context().config_file_override + + # If override changed, force re-initialization + if current_override != self._last_config_override: + self._initialized = False + self._config_cache.clear() + self._last_config_override = current_override + except Exception: + pass + if self._initialized: return @@ -247,6 +263,7 @@ def read_config(self) -> None: """ self._initialized = False self._config_cache.clear() + self._last_config_override = None # Reset cached override to force re-check self._ensure_initialized() # Resolve all configuration to populate cache @@ -525,6 +542,17 @@ def get_all_connections(self) -> dict: for name, config in connections_dict.items() } + def invalidate_cache(self) -> None: + """ + Invalidate the provider's cache, forcing it to re-read configuration on next access. + + This is useful when configuration files are modified externally. + """ + self._initialized = False + self._config_cache.clear() + if hasattr(self, "_last_config_override"): + self._last_config_override = None + def _is_alternative_config_enabled() -> bool: """ diff --git a/tests/conftest.py b/tests/conftest.py index 4816ce9534..9e0a7cb2d6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -124,13 +124,23 @@ def config_snapshot(snapshot): # In addition to its own CliContextManager, each test gets its own OpenConnectionCache # which is cleared after the test completes. def reset_global_context_and_setup_config_and_logging_levels( - request, test_snowcli_config + request, test_snowcli_config, monkeypatch ): # Reset config provider singleton to prevent test interference from snowflake.cli.api.config_provider import reset_config_provider reset_config_provider() + # Clear SNOWFLAKE_CONNECTIONS_* env vars for test isolation with config_ng + # These may be set in CI/dev environments and interfere with tests + import os + + for key in list(os.environ.keys()): + if key.startswith("SNOWFLAKE_CONNECTIONS_") or key.startswith( + "SNOWSQL_CONNECTIONS_" + ): + monkeypatch.delenv(key, raising=False) + with fork_cli_context(): connection_cache = OpenConnectionCache() cli_context_manager = get_cli_context_manager() From f4083747a186107d78bcf4e0694421eb18cb8210 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 13 Oct 2025 16:27:15 +0200 Subject: [PATCH 36/78] SNOW-2306184: config refactor - list connections --all --- .../cli/_plugins/connection/commands.py | 25 +- src/snowflake/cli/api/config_provider.py | 71 ++- tests/__snapshots__/test_help_messages.ambr | 3 + tests/test_config_provider_integration.py | 136 +++++- tests/test_connection.py | 46 ++ ..._import_snowsql_connections_config_ng.ambr | 420 ------------------ ...est_import_snowsql_connections_legacy.ambr | 310 ------------- tests_e2e/conftest.py | 21 + tests_e2e/test_import_snowsql_connections.py | 218 ++++++++- tests_integration/conftest.py | 22 + 10 files changed, 507 insertions(+), 765 deletions(-) delete mode 100644 tests_e2e/__snapshots__/test_import_snowsql_connections_config_ng.ambr delete mode 100644 tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr diff --git a/src/snowflake/cli/_plugins/connection/commands.py b/src/snowflake/cli/_plugins/connection/commands.py index f177a963be..f9bd8930b6 100644 --- a/src/snowflake/cli/_plugins/connection/commands.py +++ b/src/snowflake/cli/_plugins/connection/commands.py @@ -94,11 +94,32 @@ def _mask_sensitive_parameters(connection_params: dict): @app.command(name="list") -def list_connections(**options) -> CommandResult: +def list_connections( + all_sources: bool = typer.Option( + False, + "--all", + "-a", + help="Include connections from all sources (environment variables, SnowSQL config). " + "By default, only shows connections from configuration files.", + ), + **options, +) -> CommandResult: """ Lists configured connections. """ - connections = get_all_connections() + from snowflake.cli.api.config_provider import ( + _is_alternative_config_enabled, + get_config_provider_singleton, + ) + + # Use provider directly for config_ng to pass the flag + if _is_alternative_config_enabled(): + provider = get_config_provider_singleton() + connections = provider.get_all_connections(include_env_connections=all_sources) + else: + # Legacy provider ignores the flag + connections = get_all_connections() + default_connection = get_default_connection_name() result = ( { diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index c4e812a641..35dabf6cb7 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -67,8 +67,13 @@ def get_connection_dict(self, connection_name: str) -> dict: ... @abstractmethod - def get_all_connections(self) -> dict: - """Get all connection configurations.""" + def get_all_connections(self, include_env_connections: bool = False) -> dict: + """Get all connection configurations. + + Args: + include_env_connections: If True, include connections created from + environment variables. Default False. + """ ... def _transform_private_key_raw(self, connection_dict: dict) -> dict: @@ -171,9 +176,10 @@ def get_connection_dict(self, connection_name: str) -> dict: f"Connection {connection_name} is not configured" ) - def get_all_connections(self) -> dict: + def get_all_connections(self, include_env_connections: bool = False) -> dict: from snowflake.cli.api.config import get_all_connections + # Legacy provider ignores the flag since it never had env connections return get_all_connections() @@ -527,21 +533,78 @@ def _get_all_connections_dict(self) -> Dict[str, Dict[str, Any]]: return connections - def get_all_connections(self) -> dict: + def get_all_connections(self, include_env_connections: bool = False) -> dict: """ Get all connection configurations. + Args: + include_env_connections: If True, include connections created from + environment variables. Default False for + backward compatibility with legacy behavior. + Returns: Dictionary mapping connection names to ConnectionConfig objects """ from snowflake.cli.api.config import ConnectionConfig + if not include_env_connections: + # Only return connections from file sources (matching legacy behavior) + return self._get_file_based_connections() + + # Return all connections including environment-based ones connections_dict = self._get_all_connections_dict() return { name: ConnectionConfig.from_dict(config) for name, config in connections_dict.items() } + def _get_file_based_connections(self) -> dict: + """ + Get connections only from file sources. + + Excludes connections that exist solely due to environment variables + or CLI parameters. Matches legacy behavior. + + Returns: + Dictionary mapping connection names to ConnectionConfig objects + """ + from snowflake.cli.api.config import ConnectionConfig + + self._ensure_initialized() + + # Only query file sources: SnowSQL config, CLI config.toml, connections.toml + file_source_names = {"snowsql_config", "cli_config_toml", "connections_toml"} + + connections: Dict[str, Dict[str, Any]] = {} + connections_prefix = "connections." + + assert self._resolver is not None + for source in self._resolver._sources: # noqa: SLF001 + if source.source_name not in file_source_names: + continue + + try: + source_values = source.discover() + for key, config_value in source_values.items(): + if key.startswith(connections_prefix): + parts = key[len(connections_prefix) :].split(".", 1) + if len(parts) == 2: + conn_name, param_name = parts + if conn_name not in connections: + connections[conn_name] = {} + + # Skip internal markers + if param_name != "_empty_connection": + connections[conn_name][param_name] = config_value.value + except Exception: + # Silently skip sources that fail to discover + pass + + return { + name: ConnectionConfig.from_dict(config) + for name, config in connections.items() + } + def invalidate_cache(self) -> None: """ Invalidate the provider's cache, forcing it to re-read configuration on next access. diff --git a/tests/__snapshots__/test_help_messages.ambr b/tests/__snapshots__/test_help_messages.ambr index 25e07a8453..2b8175001a 100644 --- a/tests/__snapshots__/test_help_messages.ambr +++ b/tests/__snapshots__/test_help_messages.ambr @@ -4264,6 +4264,9 @@ Lists configured connections. +- Options --------------------------------------------------------------------+ + | --all -a Include connections from all sources (environment | + | variables, SnowSQL config). By default, only shows | + | connections from configuration files. | | --help -h Show this message and exit. | +------------------------------------------------------------------------------+ +- Global configuration -------------------------------------------------------+ diff --git a/tests/test_config_provider_integration.py b/tests/test_config_provider_integration.py index 6653ebb66a..836e27cbc8 100644 --- a/tests/test_config_provider_integration.py +++ b/tests/test_config_provider_integration.py @@ -168,6 +168,13 @@ def test_get_value_simple(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = {"account": "test_account"} provider._initialized = True + # Prevent re-initialization due to config_file_override check + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None value = provider.get_value(key="account") assert value == "test_account" @@ -181,6 +188,13 @@ def test_get_value_with_path(self): "connections.default.account": "test_account" } provider._initialized = True + # Prevent re-initialization due to config_file_override check + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None value = provider.get_value("connections", "default", key="account") assert value == "test_account" @@ -204,6 +218,13 @@ def test_get_section_root(self): config_data = {"key1": "value1", "key2": "value2"} mock_resolver.resolve.return_value = config_data provider._initialized = True + # Prevent re-initialization due to config_file_override check + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None section = provider.get_section() assert section == config_data @@ -219,6 +240,13 @@ def test_get_section_connections(self): "connections.prod.account": "prod_account", } provider._initialized = True + # Prevent re-initialization due to config_file_override check + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None section = provider.get_section("connections") assert "default" in section @@ -236,6 +264,13 @@ def test_get_section_specific_connection(self): "connections.default.user": "test_user", } provider._initialized = True + # Prevent re-initialization due to config_file_override check + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None section = provider.get_section("connections", "default") assert section == {"account": "test_account", "user": "test_user"} @@ -255,6 +290,13 @@ def test_get_connection_dict(self): "connections.default.password": "secret", } provider._initialized = True + # Prevent re-initialization due to config_file_override check + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None conn_dict = provider.get_connection_dict("default") assert conn_dict == { @@ -286,6 +328,13 @@ def test_get_all_connections_dict(self): "connections.prod.user": "prod_user", } provider._initialized = True + # Prevent re-initialization due to config_file_override check + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None all_conns = provider._get_all_connections_dict() assert "default" in all_conns @@ -304,22 +353,21 @@ def test_get_all_connections(self, mock_connection_config): """Test get_all_connections returns ConnectionConfig objects.""" provider = AlternativeConfigProvider() - with mock.patch.object(provider, "_resolver") as mock_resolver: - mock_resolver.resolve.return_value = { - "connections.default.account": "test_account", - "connections.default.user": "test_user", - } - provider._initialized = True + # Mock ConnectionConfig.from_dict + mock_config_instance = mock.Mock() + mock_connection_config.from_dict.return_value = mock_config_instance - # Mock ConnectionConfig.from_dict - mock_config_instance = mock.Mock() - mock_connection_config.from_dict.return_value = mock_config_instance + # Mock _get_file_based_connections to avoid resolver._sources access + with mock.patch.object( + provider, "_get_file_based_connections" + ) as mock_get_file_based: + mock_get_file_based.return_value = {"default": mock_config_instance} all_conns = provider.get_all_connections() assert "default" in all_conns assert all_conns["default"] == mock_config_instance - mock_connection_config.from_dict.assert_called_once() + mock_get_file_based.assert_called_once() class TestAlternativeConfigProviderWriteOperations: @@ -387,3 +435,71 @@ def test_provider_switching_via_environment(self): with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): provider = get_config_provider_singleton() assert isinstance(provider, AlternativeConfigProvider) + + +class TestAlternativeConfigProviderConnections: + """Tests for AlternativeConfigProvider connection filtering.""" + + def test_get_all_connections_excludes_env_by_default(self, monkeypatch): + """Test that get_all_connections excludes env-only connections by default.""" + monkeypatch.setenv(ALTERNATIVE_CONFIG_ENV_VAR, "1") + + # Set up environment variable for connection + monkeypatch.setenv("SNOWFLAKE_CONNECTIONS_ENVONLY_ACCOUNT", "test_account") + monkeypatch.setenv("SNOWFLAKE_CONNECTIONS_ENVONLY_USER", "test_user") + + reset_config_provider() + provider = get_config_provider_singleton() + + # Default: should not include env-only connection + connections = provider.get_all_connections(include_env_connections=False) + assert "envonly" not in connections + + # With flag: should include env-only connection + reset_config_provider() + all_connections = provider.get_all_connections(include_env_connections=True) + assert "envonly" in all_connections + assert all_connections["envonly"].account == "test_account" + assert all_connections["envonly"].user == "test_user" + + def test_get_all_connections_with_mixed_sources(self, monkeypatch): + """Test that file-based connections are included but env-only excluded by default.""" + monkeypatch.setenv(ALTERNATIVE_CONFIG_ENV_VAR, "1") + + # Set env variable for env-only connection + monkeypatch.setenv("SNOWFLAKE_CONNECTIONS_ENVCONN_ACCOUNT", "env_account") + + reset_config_provider() + provider = get_config_provider_singleton() + + # Without flag: should have file connections but not env-only connection + connections = provider.get_all_connections(include_env_connections=False) + # Test fixture connections should be present (from test.toml) + assert len(connections) > 0 + assert "envconn" not in connections + + # With flag: should have both file and env connections + reset_config_provider() + all_connections = provider.get_all_connections(include_env_connections=True) + assert "envconn" in all_connections + # Should have more connections when including env + assert len(all_connections) >= len(connections) + + def test_legacy_provider_ignores_include_env_flag(self, monkeypatch): + """Test that LegacyConfigProvider ignores the include_env_connections flag.""" + # Ensure legacy provider is used + monkeypatch.delenv(ALTERNATIVE_CONFIG_ENV_VAR, raising=False) + + reset_config_provider() + provider = get_config_provider_singleton() + + assert isinstance(provider, LegacyConfigProvider) + + # Both calls should return the same result (flag is ignored) + connections_default = provider.get_all_connections( + include_env_connections=False + ) + connections_all = provider.get_all_connections(include_env_connections=True) + + # Should be same connections (legacy doesn't filter) + assert set(connections_default.keys()) == set(connections_all.keys()) diff --git a/tests/test_connection.py b/tests/test_connection.py index d6612d25de..5bbf32d928 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -451,6 +451,52 @@ def test_connection_list_does_not_print_too_many_env_variables( ] +@mock.patch.dict( + os.environ, + { + "SNOWFLAKE_CLI_CONFIG_V2_ENABLED": "1", + "SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT": "test_account", + "SNOWFLAKE_CONNECTIONS_INTEGRATION_USER": "test_user", + }, + clear=True, +) +@mock.patch("snowflake.cli._plugins.connection.commands.get_default_connection_name") +def test_connection_list_all_flag_includes_env_connections( + mock_get_default_conn_name, runner +): + """Test that --all flag shows environment-based connections in config_ng mode.""" + from snowflake.cli.api.config_provider import reset_config_provider + + mock_get_default_conn_name.return_value = "empty" + + # Reset config provider to pick up new environment + reset_config_provider() + + # Without --all: should not show env-only connections + result = runner.invoke(["connection", "list", "--format", "json"]) + assert result.exit_code == 0, result.output + connections = json.loads(result.output) + connection_names = {c["connection_name"] for c in connections} + assert "integration" not in connection_names + + # Reset for second call + reset_config_provider() + + # With --all: should show env-based connections + result_all = runner.invoke(["connection", "list", "--all", "--format", "json"]) + assert result_all.exit_code == 0, result_all.output + connections_all = json.loads(result_all.output) + connection_names_all = {c["connection_name"] for c in connections_all} + assert "integration" in connection_names_all + + # Verify integration connection has expected parameters + integration_conn = next( + c for c in connections_all if c["connection_name"] == "integration" + ) + assert integration_conn["parameters"]["account"] == "test_account" + assert integration_conn["parameters"]["user"] == "test_user" + + def test_second_connection_not_update_default_connection(runner, os_agnostic_snapshot): with NamedTemporaryFile("w+", suffix=".toml") as tmp_file: tmp_file.write( diff --git a/tests_e2e/__snapshots__/test_import_snowsql_connections_config_ng.ambr b/tests_e2e/__snapshots__/test_import_snowsql_connections_config_ng.ambr deleted file mode 100644 index cd40a90ca5..0000000000 --- a/tests_e2e/__snapshots__/test_import_snowsql_connections_config_ng.ambr +++ /dev/null @@ -1,420 +0,0 @@ -# serializer version: 1 -# name: test_import_confirm_on_conflict_with_existing_cli_connection - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_confirm_on_conflict_with_existing_cli_connection.1 - ''' - [ - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_of_snowsql_connections - ''' - [ - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_of_snowsql_connections.1 - ''' - [ - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict - ''' - [ - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict.1 - ''' - [ - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": true - }, - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": false - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_reject_on_conflict_with_existing_cli_connection - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_reject_on_conflict_with_existing_cli_connection.1 - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - }, - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- diff --git a/tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr b/tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr deleted file mode 100644 index e56700cfeb..0000000000 --- a/tests_e2e/__snapshots__/test_import_snowsql_connections_legacy.ambr +++ /dev/null @@ -1,310 +0,0 @@ -# serializer version: 1 -# name: test_import_confirm_on_conflict_with_existing_cli_connection - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_confirm_on_conflict_with_existing_cli_connection.1 - ''' - [ - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - } - ] - ''' -# --- -# name: test_import_of_snowsql_connections - '[]' -# --- -# name: test_import_of_snowsql_connections.1 - ''' - [ - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - } - ] - ''' -# --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict - '[]' -# --- -# name: test_import_prompt_for_different_default_connection_name_on_conflict.1 - ''' - [ - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": true - }, - { - "connection_name": "example", - "parameters": { - "account": "accountname", - "user": "username" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_reject_on_conflict_with_existing_cli_connection - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - } - ] - ''' -# --- -# name: test_import_reject_on_conflict_with_existing_cli_connection.1 - ''' - [ - { - "connection_name": "example", - "parameters": { - "user": "u1", - "schema": "public", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false - }, - { - "connection_name": "snowsql1", - "parameters": { - "account": "a1", - "user": "u1", - "host": "h1_override", - "database": "d1", - "schema": "public", - "warehouse": "w1", - "role": "r1" - }, - "is_default": false - }, - { - "connection_name": "snowsql2", - "parameters": { - "account": "a2", - "user": "u2", - "host": "h2", - "port": 1234, - "database": "d2", - "schema": "public", - "warehouse": "w2", - "role": "r2" - }, - "is_default": false - }, - { - "connection_name": "snowsql3", - "parameters": { - "account": "a3", - "user": "u3", - "password": "****", - "host": "h3", - "database": "d3", - "schema": "public", - "warehouse": "w3", - "role": "r3" - }, - "is_default": false - }, - { - "connection_name": "default", - "parameters": { - "account": "default_connection_account", - "user": "default_connection_user", - "host": "localhost", - "database": "default_connection_database_override", - "schema": "public", - "warehouse": "default_connection_warehouse", - "role": "accountadmin" - }, - "is_default": true - } - ] - ''' -# --- diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 996d906506..56373d4c66 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -25,6 +25,7 @@ from snowflake.cli import __about__ from snowflake.cli.api.constants import PYTHON_3_12 from snowflake.cli.api.secure_path import SecurePath +from syrupy.extensions.amber import AmberSnapshotExtension from tests_common import IS_WINDOWS @@ -35,6 +36,26 @@ ] +class ConfigModeSnapshotExtension(AmberSnapshotExtension): + """Snapshot extension that includes config mode in snapshot file name.""" + + @classmethod + def _get_file_basename(cls, *, test_location, index): + """Generate snapshot filename with config mode suffix.""" + config_mode = ( + "config_ng" if os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") else "legacy" + ) + basename = super()._get_file_basename(test_location=test_location, index=index) + # Insert config mode before .ambr extension + return f"{basename}_{config_mode}" + + +@pytest.fixture() +def config_snapshot(snapshot): + """Config-mode-aware snapshot fixture for tests that differ between legacy and config_ng.""" + return snapshot.use_extension(ConfigModeSnapshotExtension) + + def _clean_output(text: str): """ Replacing util to clean up console output. Typer is using rich.Panel to show the --help content. diff --git a/tests_e2e/test_import_snowsql_connections.py b/tests_e2e/test_import_snowsql_connections.py index 9de5e44df9..b254c41ec4 100644 --- a/tests_e2e/test_import_snowsql_connections.py +++ b/tests_e2e/test_import_snowsql_connections.py @@ -1,3 +1,5 @@ +import json + import pytest from tests_e2e.conftest import subprocess_check_output, subprocess_run @@ -18,14 +20,73 @@ def _get_connections_list_output(snowcli, config_file) -> str: ) +def _parse_connections(output: str) -> list: + """Parse connection list JSON output.""" + return json.loads(output) + + +def _assert_connection_structure(connection: dict) -> None: + """Assert that a connection has the expected structure.""" + assert "connection_name" in connection + assert "parameters" in connection + assert "is_default" in connection + assert isinstance(connection["parameters"], dict) + assert isinstance(connection["is_default"], bool) + + +def _assert_connections_present(connections: list, expected_names: set) -> None: + """Assert that specific connections are present in the list.""" + actual_names = {conn["connection_name"] for conn in connections} + assert expected_names.issubset( + actual_names + ), f"Expected connections {expected_names} not found. Got: {actual_names}" + + +def _assert_connection_parameters( + connections: list, connection_name: str, expected_params: dict +) -> None: + """Assert that a specific connection has expected parameters.""" + conn = next( + (c for c in connections if c["connection_name"] == connection_name), None + ) + assert conn is not None, f"Connection '{connection_name}' not found" + + # Check each expected parameter + for key, value in expected_params.items(): + assert ( + key in conn["parameters"] + ), f"Parameter '{key}' not found in connection '{connection_name}'" + assert conn["parameters"][key] == value, ( + f"Parameter '{key}' mismatch in connection '{connection_name}': " + f"expected {value}, got {conn['parameters'][key]}" + ) + + +def _assert_default_connection(connections: list, expected_name: str) -> None: + """Assert which connection is marked as default.""" + default_connections = [c for c in connections if c["is_default"]] + assert ( + len(default_connections) == 1 + ), f"Expected exactly one default connection, found {len(default_connections)}" + assert default_connections[0]["connection_name"] == expected_name + + @pytest.mark.e2e -def test_import_of_snowsql_connections( - snowcli, test_root_path, empty_config_file, config_snapshot -): - """Test connection import.""" +def test_import_of_snowsql_connections(snowcli, test_root_path, empty_config_file): + """Test connection import. + + Verifies that connections are imported from SnowSQL config files and + appear in the connection list. Environment-based connections are not + shown by default (matching legacy behavior). + """ # Initially should have empty or minimal connections list initial_output = _get_connections_list_output(snowcli, empty_config_file) - assert initial_output == config_snapshot + initial_connections = _parse_connections(initial_output) + + # In isolated e2e tests, should start empty + assert ( + len(initial_connections) == 0 + ), f"Expected no file-based connections initially, found: {initial_connections}" # Import snowsql connections result = subprocess_run( @@ -45,17 +106,59 @@ def test_import_of_snowsql_connections( # After import, should have multiple connections final_output = _get_connections_list_output(snowcli, empty_config_file) - assert final_output == config_snapshot + final_connections = _parse_connections(final_output) + + # Validate all connections have proper structure + for conn in final_connections: + _assert_connection_structure(conn) + + # Assert expected connections are present + expected_connections = {"snowsql1", "snowsql2", "example", "snowsql3", "default"} + _assert_connections_present(final_connections, expected_connections) + + # Validate default connection + _assert_default_connection(final_connections, "default") + + # Validate specific connection parameters (from snowsql config files) + _assert_connection_parameters( + final_connections, + "snowsql1", + { + "account": "a1", + "user": "u1", + "host": "h1_override", # overridden in overriding_config + "database": "d1", + "schema": "public", + "warehouse": "w1", + "role": "r1", + }, + ) + + _assert_connection_parameters( + final_connections, + "default", + { + "account": "default_connection_account", + "user": "default_connection_user", + "host": "localhost", + "database": "default_connection_database_override", # overridden + "schema": "public", + "warehouse": "default_connection_warehouse", + "role": "accountadmin", + }, + ) @pytest.mark.e2e def test_import_prompt_for_different_default_connection_name_on_conflict( - snowcli, test_root_path, empty_config_file, config_snapshot + snowcli, test_root_path, empty_config_file ): """Test importing with different default connection name.""" # Initially should have empty or minimal connections list initial_output = _get_connections_list_output(snowcli, empty_config_file) - assert initial_output == config_snapshot + initial_connections = _parse_connections(initial_output) + + assert len(initial_connections) == 0 # Import with different default connection name result = subprocess_run( @@ -78,7 +181,34 @@ def test_import_prompt_for_different_default_connection_name_on_conflict( # After import, snowsql2 should be the default final_output = _get_connections_list_output(snowcli, empty_config_file) - assert final_output == config_snapshot + final_connections = _parse_connections(final_output) + + # Validate all connections have proper structure + for conn in final_connections: + _assert_connection_structure(conn) + + # Assert expected connections are present + expected_connections = {"snowsql1", "snowsql2", "example", "snowsql3", "default"} + _assert_connections_present(final_connections, expected_connections) + + # Validate that snowsql2 is the default (not "default") + _assert_default_connection(final_connections, "snowsql2") + + # Validate snowsql2 parameters + _assert_connection_parameters( + final_connections, + "snowsql2", + { + "account": "a2", + "user": "u2", + "host": "h2", + "port": 1234, + "database": "d2", + "schema": "public", + "warehouse": "w2", + "role": "r2", + }, + ) @pytest.mark.e2e @@ -86,16 +216,18 @@ def test_import_confirm_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, - config_snapshot, ): """Test import with confirmation on conflict.""" - # Initially should have example and integration connections + # Initially should have example connection initial_output = _get_connections_list_output( snowcli, example_connection_config_file ) - assert initial_output == config_snapshot + initial_connections = _parse_connections(initial_output) + + # Should have the example connection + _assert_connections_present(initial_connections, {"example"}) - # Import with confirmation (y) + # Import with confirmation (y) - this will overwrite "example" connection result = subprocess_run( [ snowcli, @@ -114,7 +246,28 @@ def test_import_confirm_on_conflict_with_existing_cli_connection( # After import, example connection should be overwritten with snowsql data final_output = _get_connections_list_output(snowcli, example_connection_config_file) - assert final_output == config_snapshot + final_connections = _parse_connections(final_output) + + # Validate all connections have proper structure + for conn in final_connections: + _assert_connection_structure(conn) + + # Assert all expected connections are present (including overwritten example) + expected_connections = {"example", "snowsql1", "snowsql2", "snowsql3", "default"} + _assert_connections_present(final_connections, expected_connections) + + # Validate default connection + _assert_default_connection(final_connections, "default") + + # Validate that "example" was overwritten with snowsql config values + _assert_connection_parameters( + final_connections, + "example", + { + "account": "accountname", + "user": "username", + }, + ) @pytest.mark.e2e @@ -122,16 +275,24 @@ def test_import_reject_on_conflict_with_existing_cli_connection( snowcli, test_root_path, example_connection_config_file, - config_snapshot, ): """Test import with rejection on conflict.""" - # Initially should have example and integration connections + # Initially should have example connection initial_output = _get_connections_list_output( snowcli, example_connection_config_file ) - assert initial_output == config_snapshot + initial_connections = _parse_connections(initial_output) + + # Should have the example connection with original values + _assert_connections_present(initial_connections, {"example"}) + + # Get initial example connection parameters + initial_example = next( + c for c in initial_connections if c["connection_name"] == "example" + ) + initial_example_params = initial_example["parameters"].copy() - # Import with rejection (n) + # Import with rejection (n) - should NOT overwrite "example" connection result = subprocess_run( [ snowcli, @@ -151,7 +312,26 @@ def test_import_reject_on_conflict_with_existing_cli_connection( # After import, example connection should remain unchanged # But other connections should still be imported final_output = _get_connections_list_output(snowcli, example_connection_config_file) - assert final_output == config_snapshot + final_connections = _parse_connections(final_output) + + # Validate all connections have proper structure + for conn in final_connections: + _assert_connection_structure(conn) + + # Assert all expected connections are present + expected_connections = {"example", "snowsql1", "snowsql2", "snowsql3", "default"} + _assert_connections_present(final_connections, expected_connections) + + # Validate default connection + _assert_default_connection(final_connections, "default") + + # Validate that "example" connection was NOT overwritten (kept original values) + final_example = next( + c for c in final_connections if c["connection_name"] == "example" + ) + assert ( + final_example["parameters"] == initial_example_params + ), "Example connection should not have been overwritten after rejection" @pytest.mark.e2e diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index 965fed0990..92f24a8c8c 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -31,6 +31,7 @@ import pytest import yaml +from syrupy.extensions.amber import AmberSnapshotExtension from typer import Typer from typer.testing import CliRunner @@ -54,6 +55,27 @@ "tests_integration.snowflake_connector", ] + +class ConfigModeSnapshotExtension(AmberSnapshotExtension): + """Snapshot extension that includes config mode in snapshot file name.""" + + @classmethod + def _get_file_basename(cls, *, test_location, index): + """Generate snapshot filename with config mode suffix.""" + config_mode = ( + "config_ng" if os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") else "legacy" + ) + basename = super()._get_file_basename(test_location=test_location, index=index) + # Insert config mode before .ambr extension + return f"{basename}_{config_mode}" + + +@pytest.fixture() +def config_snapshot(snapshot): + """Config-mode-aware snapshot fixture for tests that differ between legacy and config_ng.""" + return snapshot.use_extension(ConfigModeSnapshotExtension) + + TEST_DIR = Path(__file__).parent DEFAULT_TEST_CONFIG = "connection_configs.toml" WORLD_READABLE_CONFIG = "world_readable.toml" From 79f5627abdd39f52ee1abbd982d57630e6f8e1c3 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 13 Oct 2025 16:59:18 +0200 Subject: [PATCH 37/78] SNOW-2306184: config refactor - snapshot COLUMNS=200 --- tests_e2e/__snapshots__/test_installation.ambr | 4 ++++ tests_e2e/conftest.py | 4 +++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/tests_e2e/__snapshots__/test_installation.ambr b/tests_e2e/__snapshots__/test_installation.ambr index 00d86dd971..f9e733c7f4 100644 --- a/tests_e2e/__snapshots__/test_installation.ambr +++ b/tests_e2e/__snapshots__/test_installation.ambr @@ -58,6 +58,7 @@ |--------------------+---------| | multilingual-hello | True | +------------------------------+ + ''' # --- # name: test_disabling_and_enabling_command.2 @@ -70,6 +71,7 @@ |--------------------+---------| | multilingual-hello | False | +------------------------------+ + ''' # --- # name: test_disabling_and_enabling_command.4 @@ -167,6 +169,7 @@ |--------------------+---------| | multilingual-hello | True | +------------------------------+ + ''' # --- # name: test_disabling_and_enabling_command.8 @@ -251,5 +254,6 @@ |------------------| | 2.3 | +------------------+ + ''' # --- diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 56373d4c66..617910e7f7 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -110,9 +110,11 @@ def test_root_path(): def disable_colors_and_styles_in_output(monkeypatch): """ Colors and styles in output cause mismatches in asserts, - this environment variable turn off styling + this environment variable turn off styling. + Also set consistent terminal width to avoid snapshot mismatches. """ monkeypatch.setenv("TERM", "unknown") + monkeypatch.setenv("COLUMNS", "200") @pytest.fixture(scope="session") From 58133fce5e47034b9b225fb48def80e9caa13202 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 13 Oct 2025 17:36:07 +0200 Subject: [PATCH 38/78] SNOW-2306184: config refactor - snapshot fix 2 --- tests_e2e/__snapshots__/test_installation.ambr | 4 ---- tests_e2e/conftest.py | 3 ++- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/tests_e2e/__snapshots__/test_installation.ambr b/tests_e2e/__snapshots__/test_installation.ambr index f9e733c7f4..00d86dd971 100644 --- a/tests_e2e/__snapshots__/test_installation.ambr +++ b/tests_e2e/__snapshots__/test_installation.ambr @@ -58,7 +58,6 @@ |--------------------+---------| | multilingual-hello | True | +------------------------------+ - ''' # --- # name: test_disabling_and_enabling_command.2 @@ -71,7 +70,6 @@ |--------------------+---------| | multilingual-hello | False | +------------------------------+ - ''' # --- # name: test_disabling_and_enabling_command.4 @@ -169,7 +167,6 @@ |--------------------+---------| | multilingual-hello | True | +------------------------------+ - ''' # --- # name: test_disabling_and_enabling_command.8 @@ -254,6 +251,5 @@ |------------------| | 2.3 | +------------------+ - ''' # --- diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 617910e7f7..6d10647b26 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -114,7 +114,8 @@ def disable_colors_and_styles_in_output(monkeypatch): Also set consistent terminal width to avoid snapshot mismatches. """ monkeypatch.setenv("TERM", "unknown") - monkeypatch.setenv("COLUMNS", "200") + width = 81 if IS_WINDOWS else 80 + monkeypatch.setenv("COLUMNS", str(width)) @pytest.fixture(scope="session") From e11a531d96d5860e9160c7bfcd87302ae546b1c4 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 13 Oct 2025 20:40:03 +0200 Subject: [PATCH 39/78] SNOW-2306184: config refactor - snapshot fix 3 --- .../test_broken_plugin_config_ng.ambr | 13 ---------- .../test_failing_plugin_config_ng.ambr | 13 ---------- ...verride_by_external_plugins_config_ng.ambr | 26 ------------------- 3 files changed, 52 deletions(-) diff --git a/tests_integration/plugin/__snapshots__/test_broken_plugin_config_ng.ambr b/tests_integration/plugin/__snapshots__/test_broken_plugin_config_ng.ambr index a030fe53c7..87d2a1f15c 100644 --- a/tests_integration/plugin/__snapshots__/test_broken_plugin_config_ng.ambr +++ b/tests_integration/plugin/__snapshots__/test_broken_plugin_config_ng.ambr @@ -8,19 +8,6 @@ "account": "test" }, "is_default": false - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false } ] diff --git a/tests_integration/plugin/__snapshots__/test_failing_plugin_config_ng.ambr b/tests_integration/plugin/__snapshots__/test_failing_plugin_config_ng.ambr index 2b51fdbfe1..144e641e4c 100644 --- a/tests_integration/plugin/__snapshots__/test_failing_plugin_config_ng.ambr +++ b/tests_integration/plugin/__snapshots__/test_failing_plugin_config_ng.ambr @@ -8,19 +8,6 @@ "account": "test" }, "is_default": false - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false } ] diff --git a/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_config_ng.ambr b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_config_ng.ambr index 3b8f510024..947fa54d21 100644 --- a/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_config_ng.ambr +++ b/tests_integration/plugin/__snapshots__/test_override_by_external_plugins_config_ng.ambr @@ -8,19 +8,6 @@ "account": "test" }, "is_default": false - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false } ] @@ -36,19 +23,6 @@ "account": "test" }, "is_default": false - }, - { - "connection_name": "integration", - "parameters": { - "account": "po52878", - "user": "mraba", - "host": "po52878.snowflakecomputing.com", - "database": "SNOWCLI_DB", - "warehouse": "xsmall", - "role": "integration_tests", - "authenticator": "SNOWFLAKE_JWT" - }, - "is_default": false } ] From 9228ec56301282395735ee62b78f9d1e84cb2259 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 13 Oct 2025 20:56:24 +0200 Subject: [PATCH 40/78] SNOW-2306184: config refactor - show-config-sources tests --- tests/helpers/test_show_config_sources.py | 201 ++++++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 tests/helpers/test_show_config_sources.py diff --git a/tests/helpers/test_show_config_sources.py b/tests/helpers/test_show_config_sources.py new file mode 100644 index 0000000000..78dabfb0c6 --- /dev/null +++ b/tests/helpers/test_show_config_sources.py @@ -0,0 +1,201 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from unittest import mock + +from snowflake.cli.api.config_provider import ALTERNATIVE_CONFIG_ENV_VAR + +COMMAND = "show-config-sources" + + +class TestHiddenLogic: + """ + Test the logic that determines if the command should be hidden. + + Note: The 'hidden' parameter in Typer decorators is evaluated at module import time, + so we test the logic itself rather than the runtime visibility in help output. + """ + + def test_hidden_logic_with_truthy_values(self): + """Test that the hidden logic correctly identifies truthy values.""" + truthy_values = ["1", "true", "yes", "on", "TRUE", "Yes", "ON"] + for value in truthy_values: + # This is the logic used in the command decorator + is_hidden = value.lower() not in ("1", "true", "yes", "on") + assert ( + not is_hidden + ), f"Value '{value}' should make command visible (not hidden)" + + def test_hidden_logic_with_falsy_values(self): + """Test that the hidden logic correctly identifies falsy values.""" + falsy_values = ["", "0", "false", "no", "off", "random"] + for value in falsy_values: + # This is the logic used in the command decorator + is_hidden = value.lower() not in ("1", "true", "yes", "on") + assert is_hidden, f"Value '{value}' should make command hidden" + + +class TestCommandFunctionality: + """Test that the command functions correctly when called.""" + + @mock.patch.dict(os.environ, {}, clear=True) + def test_command_unavailable_without_env_var(self, runner): + """Command should indicate resolution logging is unavailable without env var.""" + result = runner.invoke(["helpers", COMMAND]) + assert result.exit_code == 0 + assert "Configuration resolution logging is not available" in result.output + assert ALTERNATIVE_CONFIG_ENV_VAR in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + def test_command_unavailable_message_when_logging_not_available( + self, mock_is_available, runner + ): + """Command should show unavailable message when resolution logging is not available.""" + mock_is_available.return_value = False + result = runner.invoke(["helpers", COMMAND]) + assert result.exit_code == 0 + assert "Configuration resolution logging is not available" in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + def test_command_shows_summary_without_arguments( + self, mock_explain, mock_is_available, runner + ): + """Command should show configuration summary when called without arguments.""" + mock_is_available.return_value = True + result = runner.invoke(["helpers", COMMAND]) + assert result.exit_code == 0 + mock_explain.assert_called_once_with(key=None, verbose=False) + assert "Configuration resolution summary displayed above" in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + def test_command_shows_specific_key(self, mock_explain, mock_is_available, runner): + """Command should show resolution for specific key when provided.""" + mock_is_available.return_value = True + result = runner.invoke(["helpers", COMMAND, "account"]) + assert result.exit_code == 0 + mock_explain.assert_called_once_with(key="account", verbose=False) + assert "Showing resolution for key: account" in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + def test_command_shows_details_with_flag( + self, mock_explain, mock_is_available, runner + ): + """Command should show detailed resolution when --show-details flag is used.""" + mock_is_available.return_value = True + result = runner.invoke(["helpers", COMMAND, "--show-details"]) + assert result.exit_code == 0 + mock_explain.assert_called_once_with(key=None, verbose=True) + assert "Configuration resolution summary displayed above" in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + def test_command_shows_details_with_short_flag( + self, mock_explain, mock_is_available, runner + ): + """Command should show detailed resolution when -d flag is used.""" + mock_is_available.return_value = True + result = runner.invoke(["helpers", COMMAND, "-d"]) + assert result.exit_code == 0 + mock_explain.assert_called_once_with(key=None, verbose=True) + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + def test_command_shows_key_with_details( + self, mock_explain, mock_is_available, runner + ): + """Command should show detailed resolution for specific key.""" + mock_is_available.return_value = True + result = runner.invoke(["helpers", COMMAND, "user", "--show-details"]) + assert result.exit_code == 0 + mock_explain.assert_called_once_with(key="user", verbose=True) + assert "Showing resolution for key: user" in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.export_resolution_history") + def test_command_exports_to_file_success( + self, mock_export, mock_is_available, runner, tmp_path + ): + """Command should export resolution history to file when --export is used.""" + mock_is_available.return_value = True + mock_export.return_value = True + export_file = tmp_path / "config_debug.json" + + result = runner.invoke(["helpers", COMMAND, "--export", str(export_file)]) + assert result.exit_code == 0 + mock_export.assert_called_once_with(export_file) + assert "Resolution history exported to:" in result.output + assert str(export_file) in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.export_resolution_history") + def test_command_exports_to_file_with_short_flag( + self, mock_export, mock_is_available, runner, tmp_path + ): + """Command should export resolution history to file when -e is used.""" + mock_is_available.return_value = True + mock_export.return_value = True + export_file = tmp_path / "debug.json" + + result = runner.invoke(["helpers", COMMAND, "-e", str(export_file)]) + assert result.exit_code == 0 + mock_export.assert_called_once_with(export_file) + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") + @mock.patch("snowflake.cli.api.config_ng.export_resolution_history") + def test_command_export_failure( + self, mock_export, mock_is_available, runner, tmp_path + ): + """Command should show error message when export fails.""" + mock_is_available.return_value = True + mock_export.return_value = False + export_file = tmp_path / "config_debug.json" + + result = runner.invoke(["helpers", COMMAND, "--export", str(export_file)]) + assert result.exit_code == 0 + assert "Failed to export resolution history" in result.output + + +class TestCommandHelp: + """Test the command help output.""" + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + def test_command_help_message(self, runner): + """Command help should display correctly.""" + result = runner.invoke(["helpers", COMMAND, "--help"]) + assert result.exit_code == 0 + assert "Show where configuration values come from" in result.output + assert "--show-details" in result.output + assert "--export" in result.output + assert "Examples:" in result.output + + @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) + def test_command_help_shows_key_argument(self, runner): + """Command help should show the optional key argument.""" + result = runner.invoke(["helpers", COMMAND, "--help"]) + assert result.exit_code == 0 + assert "KEY" in result.output or "key" in result.output.lower() + assert "account" in result.output or "user" in result.output From 1d54c4ec042f64171642dcedd553f0de258febad Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 14 Oct 2025 16:22:56 +0200 Subject: [PATCH 41/78] SNOW-2306184: config refactor - more merging tests --- src/snowflake/cli/api/config_ng/__init__.py | 2 + src/snowflake/cli/api/config_ng/sources.py | 198 ++-- src/snowflake/cli/api/config_provider.py | 126 +- tests/config_ng/README_TESTING.md | 221 ---- tests/config_ng/configs/cli.env | 1 - tests/config_ng/configs/config | 4 - tests/config_ng/configs/config.toml | 18 - tests/config_ng/configs/connections.toml | 18 - tests/config_ng/configs/snowsql.env | 1 - tests/config_ng/conftest.py | 550 ++------- tests/config_ng/test_configuration.py | 1182 ++++++++++++++++--- 11 files changed, 1279 insertions(+), 1042 deletions(-) delete mode 100644 tests/config_ng/README_TESTING.md delete mode 100644 tests/config_ng/configs/cli.env delete mode 100644 tests/config_ng/configs/config delete mode 100644 tests/config_ng/configs/config.toml delete mode 100644 tests/config_ng/configs/connections.toml delete mode 100644 tests/config_ng/configs/snowsql.env diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index 5d8c8442d3..d767e7341b 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -48,6 +48,7 @@ CliEnvironment, CliParameters, ConnectionsConfigFile, + ConnectionSpecificEnvironment, SnowSQLConfigFile, SnowSQLEnvironment, ) @@ -60,6 +61,7 @@ "ConfigurationResolver", "ConfigValue", "ConnectionsConfigFile", + "ConnectionSpecificEnvironment", "explain_configuration", "export_resolution_history", "format_summary_for_display", diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 8ad443e062..e9e48b3a5d 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -448,78 +448,69 @@ def supports_key(self, key: str) -> bool: return False -class CliEnvironment(ValueSource): +# Base configuration keys that can be set via environment +_ENV_CONFIG_KEYS = [ + "account", + "user", + "password", + "database", + "schema", + "role", + "warehouse", + "protocol", + "host", + "port", + "region", + "authenticator", + "workload_identity_provider", + "private_key_file", + "private_key_path", # Used by integration tests + "private_key_raw", # Used by integration tests + "private_key_passphrase", # Private key passphrase for encrypted keys + "token", # OAuth token + "session_token", # Session token for session-based authentication + "master_token", # Master token for advanced authentication + "token_file_path", + "oauth_client_id", + "oauth_client_secret", + "oauth_authorization_url", + "oauth_token_request_url", + "oauth_redirect_uri", + "oauth_scope", + "oauth_enable_pkce", # Fixed typo: was "oatuh_enable_pkce" + "oauth_enable_refresh_tokens", + "oauth_enable_single_use_refresh_tokens", + "client_store_temporary_credential", +] + + +class ConnectionSpecificEnvironment(ValueSource): """ - CLI environment variables source. - - Discovers SNOWFLAKE_* environment variables with two patterns: - 1. General: SNOWFLAKE_ACCOUNT (applies to all connections) - 2. Connection-specific: SNOWFLAKE_CONNECTIONS__ACCOUNT (overrides general) + Connection-specific environment variables source. - Connection-specific variables take precedence within this source. + Discovers SNOWFLAKE_CONNECTIONS__ environment variables. + Returns prefixed keys: connections.{name}.{key} Examples: - SNOWFLAKE_ACCOUNT -> account (general) - SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT -> account (for "integration" connection) - SNOWFLAKE_USER -> user - SNOWFLAKE_CONNECTIONS_DEV_USER -> user (for "dev" connection) + SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT=x -> connections.integration.account=x + SNOWFLAKE_CONNECTIONS_DEV_USER=y -> connections.dev.user=y """ - # Base configuration keys that can be set via environment - CONFIG_KEYS = [ - "account", - "user", - "password", - "database", - "schema", - "role", - "warehouse", - "protocol", - "host", - "port", - "region", - "authenticator", - "workload_identity_provider", - "private_key_file", - "private_key_path", # Used by integration tests - "private_key_raw", # Used by integration tests - "private_key_passphrase", # Private key passphrase for encrypted keys - "token", # OAuth token - "session_token", # Session token for session-based authentication - "master_token", # Master token for advanced authentication - "token_file_path", - "oauth_client_id", - "oauth_client_secret", - "oauth_authorization_url", - "oauth_token_request_url", - "oauth_redirect_uri", - "oauth_scope", - "oauth_enable_pkce", # Fixed typo: was "oatuh_enable_pkce" - "oauth_enable_refresh_tokens", - "oauth_enable_single_use_refresh_tokens", - "client_store_temporary_credential", - ] - @property def source_name(self) -> str: - return "cli_env" + return "connection_specific_env" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ - Discover SNOWFLAKE_* environment variables. - Returns both general (flat) and connection-specific (prefixed) keys. + Discover SNOWFLAKE_CONNECTIONS_* environment variables. + Returns connection-specific (prefixed) keys only. - Patterns: - 1. SNOWFLAKE_ACCOUNT=x -> account=x (flat key) - 2. SNOWFLAKE_CONNECTIONS_INTEGRATION_ACCOUNT=y -> connections.integration.account=y + Pattern: SNOWFLAKE_CONNECTIONS__=value -> connections.{name}.{key}=value """ values: Dict[str, ConfigValue] = {} # Scan all environment variables for env_name, env_value in os.environ.items(): - if not env_name.startswith("SNOWFLAKE_"): - continue - # Check for connection-specific pattern: SNOWFLAKE_CONNECTIONS__ if env_name.startswith("SNOWFLAKE_CONNECTIONS_"): # Extract connection name and config key @@ -530,7 +521,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: conn_name = conn_name_upper.lower() config_key = config_key_upper.lower() - if config_key in self.CONFIG_KEYS: + if config_key in _ENV_CONFIG_KEYS: full_key = f"connections.{conn_name}.{config_key}" if key is None or full_key == key: values[full_key] = ConfigValue( @@ -540,40 +531,79 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: raw_value=f"{env_name}={env_value}", ) + return values + + def supports_key(self, key: str) -> bool: + # Check if key matches pattern connections.{name}.{param} + if key.startswith("connections."): + parts = key.split(".", 2) + if len(parts) == 3: + _, conn_name, config_key = parts + env_var = ( + f"SNOWFLAKE_CONNECTIONS_{conn_name.upper()}_{config_key.upper()}" + ) + return os.getenv(env_var) is not None + return False + + +class CliEnvironment(ValueSource): + """ + CLI general environment variables source. + + Discovers general SNOWFLAKE_* environment variables (not connection-specific). + Returns flat keys that apply to all connections. + + Examples: + SNOWFLAKE_ACCOUNT -> account (general, applies to all connections) + SNOWFLAKE_USER -> user + SNOWFLAKE_PASSWORD -> password + """ + + @property + def source_name(self) -> str: + return "cli_env" + + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + """ + Discover general SNOWFLAKE_* environment variables. + Returns general (flat) keys only. + + Pattern: SNOWFLAKE_=value -> {key}=value + """ + values: Dict[str, ConfigValue] = {} + + # Scan all environment variables + for env_name, env_value in os.environ.items(): + if not env_name.startswith("SNOWFLAKE_"): + continue + + # Skip connection-specific variables + if env_name.startswith("SNOWFLAKE_CONNECTIONS_"): + continue + # Check for general pattern: SNOWFLAKE_ - elif not env_name.startswith("SNOWFLAKE_CONNECTIONS_"): - config_key_upper = env_name[len("SNOWFLAKE_") :] - config_key = config_key_upper.lower() - - if config_key in self.CONFIG_KEYS: - if key is None or config_key == key: - values[config_key] = ConfigValue( - key=config_key, - value=env_value, - source_name=self.source_name, - raw_value=f"{env_name}={env_value}", - ) + config_key_upper = env_name[len("SNOWFLAKE_") :] + config_key = config_key_upper.lower() + + if config_key in _ENV_CONFIG_KEYS: + if key is None or config_key == key: + values[config_key] = ConfigValue( + key=config_key, + value=env_value, + source_name=self.source_name, + raw_value=f"{env_name}={env_value}", + ) return values def supports_key(self, key: str) -> bool: - discovered = self.discover() - if key in discovered: - return True - - # Check general var - if os.getenv(f"SNOWFLAKE_{key.upper()}") is not None: - return True - - # Check connection-specific var - if hasattr(self, "_connection_name") and self._connection_name: - conn_var = ( - f"SNOWFLAKE_CONNECTIONS_{self._connection_name.upper()}_{key.upper()}" - ) - if os.getenv(conn_var) is not None: - return True + # Only support flat keys (not prefixed with connections.) + if "." in key: + return False - return False + # Check if the general env var exists + env_var = f"SNOWFLAKE_{key.upper()}" + return os.getenv(env_var) is not None class CliParameters(ValueSource): diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 35dabf6cb7..c2e2897a68 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -227,6 +227,7 @@ def _ensure_initialized(self) -> None: CliParameters, ConfigurationResolver, ConnectionsConfigFile, + ConnectionSpecificEnvironment, SnowSQLConfigFile, SnowSQLEnvironment, ) @@ -251,9 +252,11 @@ def _ensure_initialized(self) -> None: ConnectionsConfigFile(), # 4. SnowSQL environment variables (SNOWSQL_*) SnowSQLEnvironment(), - # 5. CLI environment variables (SNOWFLAKE_*) + # 5. Connection-specific environment variables (SNOWFLAKE_CONNECTIONS_*) + ConnectionSpecificEnvironment(), + # 6. General CLI environment variables (SNOWFLAKE_*) CliEnvironment(), - # 6. CLI command-line arguments (highest priority) + # 7. CLI command-line arguments (highest priority) CliParameters(cli_context=cli_context_dict), ] @@ -401,20 +404,30 @@ def section_exists(self, *path) -> bool: for key in self._config_cache.keys() ) + # Source priority levels (higher number = higher priority) + _SOURCE_PRIORITIES = { + "snowsql_config": 1, + "cli_config_toml": 2, + "connections_toml": 3, + "snowsql_env": 4, + "connection_specific_env": 5, + "cli_env": 6, + "cli_arguments": 7, + } + def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: """ Get connection configuration by name. - Behavior is controlled by SNOWFLAKE_CLI_CONNECTIONS_TOML_REPLACE environment variable: - - If set to "true" (default): connections.toml completely replaces connections - from config.toml (legacy behavior) - - If set to "false": connections.toml values are merged with config.toml values + Merges configuration from all sources (files and environment variables) + based on the 7-level precedence order. For each parameter, the value + from the highest-priority source wins. Args: connection_name: Name of the connection Returns: - Dictionary of connection parameters from file sources only + Dictionary of connection parameters with all sources merged """ self._ensure_initialized() @@ -425,56 +438,55 @@ def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: connection_dict: Dict[str, Any] = {} connection_prefix = f"connections.{connection_name}." - # Check if replacement behavior is enabled (default: true for backward compatibility) - import os - - replace_behavior = os.environ.get( - "SNOWFLAKE_CLI_CONNECTIONS_TOML_REPLACE", "true" - ).lower() in ("true", "1", "yes", "on") - - if replace_behavior: - # Legacy replacement behavior: if connections.toml has the connection, - # use ONLY values from connections.toml - has_connections_toml = False - if self._resolver is not None: - for key in self._config_cache.keys(): - if key.startswith(connection_prefix): - # Check resolution history to see if this came from connections.toml - history = self._resolver.get_resolution_history(key) - if history and history.selected_entry: - if ( - history.selected_entry.config_value.source_name - == "connections_toml" - ): - has_connections_toml = True - break - - if has_connections_toml: - # Use ONLY connections.toml values (replacement behavior) - for key, value in self._config_cache.items(): - if key.startswith(connection_prefix): - # Check if this specific value comes from connections.toml - if self._resolver is not None: - history = self._resolver.get_resolution_history(key) - if history and history.selected_entry: - if ( - history.selected_entry.config_value.source_name - == "connections_toml" - ): - param_name = key[len(connection_prefix) :] - connection_dict[param_name] = value - else: - # No connections.toml, use merged values from other sources - for key, value in self._config_cache.items(): - if key.startswith(connection_prefix): - param_name = key[len(connection_prefix) :] - connection_dict[param_name] = value - else: - # New merging behavior: merge all sources normally - for key, value in self._config_cache.items(): - if key.startswith(connection_prefix): - param_name = key[len(connection_prefix) :] - connection_dict[param_name] = value + # Collect all parameter names from both prefixed and flat keys + param_names = set() + + # Get param names from prefixed keys (file sources, connection-specific env) + for key in self._config_cache.keys(): + if key.startswith(connection_prefix): + param_name = key[len(connection_prefix) :] + param_names.add(param_name) + + # Get param names from flat keys (general env vars, SnowSQL env, CLI params) + # Skip internal CLI arguments that aren't connection parameters + for key in self._config_cache.keys(): + if "." not in key and key not in ("enable_diag", "temporary_connection"): + param_names.add(key) + + # For each parameter, determine the best value based on source priority + for param_name in param_names: + prefixed_key = f"{connection_prefix}{param_name}" + flat_key = param_name + + best_value = None + best_priority = -1 + + # Check prefixed key (from files and connection-specific env) + if prefixed_key in self._config_cache: + value = self._config_cache[prefixed_key] + if self._resolver is not None: + history = self._resolver.get_resolution_history(prefixed_key) + if history and history.selected_entry: + source = history.selected_entry.config_value.source_name + priority = self._SOURCE_PRIORITIES.get(source, 0) + if priority > best_priority: + best_value = value + best_priority = priority + + # Check flat key (from general env vars, SnowSQL env, CLI params) + if flat_key in self._config_cache: + value = self._config_cache[flat_key] + if self._resolver is not None: + history = self._resolver.get_resolution_history(flat_key) + if history and history.selected_entry: + source = history.selected_entry.config_value.source_name + priority = self._SOURCE_PRIORITIES.get(source, 0) + if priority > best_priority: + best_value = value + best_priority = priority + + if best_value is not None: + connection_dict[param_name] = best_value if not connection_dict: from snowflake.cli.api.exceptions import MissingConfigurationError diff --git a/tests/config_ng/README_TESTING.md b/tests/config_ng/README_TESTING.md deleted file mode 100644 index 2b79f59600..0000000000 --- a/tests/config_ng/README_TESTING.md +++ /dev/null @@ -1,221 +0,0 @@ - - -# Configuration Merging Test Framework - -## Overview - -This test framework provides an easy-to-use, readable way to test merged configuration from multiple sources in the Snowflake CLI. - -## Features - -### Configuration Sources - -The framework supports testing all configuration sources: - -1. **SnowSQLConfig**: SnowSQL INI-style config files (`.snowsql/config`) -2. **SnowSQLEnvs**: SnowSQL environment variables (`SNOWSQL_*`) -3. **CliConfig**: CLI TOML config files (`.snowflake/config.toml`) -4. **CliEnvs**: CLI environment variables (`SNOWFLAKE_*`) -5. **CliParams**: CLI command-line parameters (`--account`, `--user`, etc.) -6. **ConnectionsToml**: Connections TOML files (`.snowflake/connections.toml`) - -### Configuration Priority - -The framework correctly tests the precedence order: -1. CLI parameters (highest) -2. CLI environment variables (`SNOWFLAKE_*`) -3. SnowSQL environment variables (`SNOWSQL_*`) -4. CLI config files -5. Connections TOML -6. SnowSQL config files (lowest) - -## Usage - -### Basic Example - -```python -from tests.config_ng.conftest import ( - CliConfig, - CliEnvs, - CliParams, - SnowSQLConfig, - SnowSQLEnvs, - config_sources, -) - -def test_configuration_merging(): - sources = ( - SnowSQLConfig("config"), - SnowSQLEnvs("snowsql.env"), - CliConfig("config.toml"), - CliEnvs("cli.env"), - CliParams("--account", "test-account", "--user", "alice"), - ) - - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() - - # CLI params have highest priority - assert merged["account"] == "test-account" - assert merged["user"] == "alice" -``` - -### Testing Specific Connections - -```python -def test_specific_connection(): - sources = (ConnectionsToml("connections.toml"),) - - with config_sources(sources, connection="prod") as ctx: - merged = ctx.get_merged_config() - assert merged["account"] == "prod-account" -``` - -### Using FinalConfig for Readability - -```python -from textwrap import dedent - -from tests.config_ng.conftest import FinalConfig - -# From dictionary -expected = FinalConfig(config_dict={ - "account": "test-account", - "user": "alice", -}) - -# From TOML string (more readable for complex configs) -# Use dedent to avoid indentation issues -expected = FinalConfig(toml_string=dedent(""" - [connections.prod] - account = "prod-account" - user = "prod-user" - password = "secret" - """)) - -# Compare with merged config -assert merged == expected -``` - -### Accessing Resolution History - -```python -with config_sources(sources) as ctx: - resolver = ctx.get_resolver() - config = resolver.resolve() - - # Check which source won - history = resolver.get_resolution_history("account") - assert history.selected_entry.config_value.source_name == "cli_arguments" - - # Get resolution summary - summary = resolver.get_history_summary() - print(f"Total keys resolved: {summary['total_keys_resolved']}") - print(f"Keys with overrides: {summary['keys_with_overrides']}") -``` - -## Test File Structure - -### Required Directory Structure - -``` -tests/config_ng/ -├── conftest.py # Test framework implementation -├── test_configuration.py # Example tests -└── configs/ # Test configuration files - ├── config # SnowSQL config - ├── snowsql.env # SnowSQL environment variables - ├── config.toml # CLI config - ├── cli.env # CLI environment variables - └── connections.toml # Connections config -``` - -### Configuration Files - -Create test configuration files in `tests/config_ng/configs/`: - -**config** (SnowSQL format): -```ini -[connections.a] -accountname = account-a -user = user -password = password -``` - -**config.toml** (CLI format): -```toml -[connections.a] -account = "account-a" -username = "user" -password = "abc" -``` - -**cli.env**: -```bash -SNOWFLAKE_USER=Alice -``` - -**snowsql.env**: -```bash -SNOWSQL_USER=Bob -``` - -## Implementation Details - -### Context Manager - -The `config_sources` context manager: -- Creates temporary directories for config files -- Writes config files to proper locations -- Sets environment variables -- Cleans up after test completion - -### ConfigSourcesContext - -Provides methods: -- `get_merged_config()`: Returns the merged configuration dictionary -- `get_resolver()`: Returns the ConfigurationResolver for advanced testing - -## Running Tests - -```bash -# Run with timeout -timeout 30 hatch env run -- pytest tests/config_ng/test_configuration.py -v -p no:warnings - -# Run all config_ng tests -timeout 60 hatch env run -- pytest tests/config_ng/ -v -p no:warnings - -# Run with pre-commit checks -hatch env run -- pre-commit run --files tests/config_ng/conftest.py tests/config_ng/test_configuration.py -``` - -## Benefits - -1. **Readable**: Tests clearly express intent with descriptive source objects -2. **Isolated**: Each test runs in a clean temporary environment -3. **Comprehensive**: Tests all configuration sources and their interactions -4. **Type-safe**: Full mypy type checking support -5. **Maintainable**: Centralized logic in `conftest.py` -6. **Flexible**: Easy to add new test scenarios - -## Examples from Tests - -See `test_configuration.py` for complete examples: -- `test_all_sources_merged`: Tests complete precedence chain -- `test_cli_envs_override_snowsql_envs`: Tests environment variable precedence -- `test_config_files_precedence`: Tests file precedence -- `test_resolution_history_tracking`: Tests resolution debugging features diff --git a/tests/config_ng/configs/cli.env b/tests/config_ng/configs/cli.env deleted file mode 100644 index 23724a9a0c..0000000000 --- a/tests/config_ng/configs/cli.env +++ /dev/null @@ -1 +0,0 @@ -SNOWFLAKE_USER=Alice diff --git a/tests/config_ng/configs/config b/tests/config_ng/configs/config deleted file mode 100644 index 0880c2b8a4..0000000000 --- a/tests/config_ng/configs/config +++ /dev/null @@ -1,4 +0,0 @@ -[connections.a] -accountname = account-a -user = user -password = password diff --git a/tests/config_ng/configs/config.toml b/tests/config_ng/configs/config.toml deleted file mode 100644 index ae3d5a5203..0000000000 --- a/tests/config_ng/configs/config.toml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -[connections.a] -account = "account-a" -username = "user" -password = "abc" diff --git a/tests/config_ng/configs/connections.toml b/tests/config_ng/configs/connections.toml deleted file mode 100644 index 5183da2262..0000000000 --- a/tests/config_ng/configs/connections.toml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright (c) 2024 Snowflake Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -[connections.b] -account = "account-a" -username = "user" -password = "abc" diff --git a/tests/config_ng/configs/snowsql.env b/tests/config_ng/configs/snowsql.env deleted file mode 100644 index a900b8f68d..0000000000 --- a/tests/config_ng/configs/snowsql.env +++ /dev/null @@ -1 +0,0 @@ -SNOWSQL_USER=Bob diff --git a/tests/config_ng/conftest.py b/tests/config_ng/conftest.py index f17df1a9ef..adb09d3f96 100644 --- a/tests/config_ng/conftest.py +++ b/tests/config_ng/conftest.py @@ -13,467 +13,131 @@ # limitations under the License. """ -Configuration testing utilities for testing merged configuration from multiple sources. +Configuration testing utilities for config_ng tests. -This module provides fixtures and utilities for testing configuration resolution -from various sources (SnowSQL config, CLI config, environment variables, CLI params). +Provides fixtures for setting up temporary configuration environments. """ +import copy import os import tempfile from contextlib import contextmanager -from dataclasses import dataclass from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from textwrap import dedent +from typing import Dict, Optional import pytest -import tomlkit - -if TYPE_CHECKING: - from snowflake.cli.api.config_ng import ConfigurationResolver - - -@dataclass -class SnowSQLConfig: - """ - Represents SnowSQL INI-style config file content. - - Args: - filename: Name of the config file in the configs/ directory - """ - - filename: str - - -@dataclass -class SnowSQLEnvs: - """ - Represents SnowSQL environment variables from a file. - - Args: - filename: Name of the env file in the configs/ directory - """ - - filename: str - - -@dataclass -class CliConfig: - """ - Represents CLI TOML config file content. - - Args: - filename: Name of the config.toml file in the configs/ directory - """ - - filename: str - - -@dataclass -class CliEnvs: - """ - Represents CLI environment variables from a file. - - Args: - filename: Name of the env file in the configs/ directory - """ - - filename: str - - -@dataclass -class CliParams: - """ - Represents CLI command-line parameters. - - Args: - args: Variable length list of CLI arguments (e.g., "--account", "value", "--user", "alice") - """ - - args: Tuple[str, ...] - - def __init__(self, *args: str): - object.__setattr__(self, "args", args) - - def to_dict(self) -> Dict[str, Any]: - """ - Convert CLI arguments to a dictionary. - - Returns: - Dictionary with parsed CLI arguments - """ - result: Dict[str, Any] = {} - i = 0 - while i < len(self.args): - if self.args[i].startswith("--"): - key = self.args[i][2:].replace("-", "_") - if i + 1 < len(self.args) and not self.args[i + 1].startswith("--"): - result[key] = self.args[i + 1] - i += 2 - else: - result[key] = True - i += 1 - else: - i += 1 - return result - - -@dataclass -class ConnectionsToml: - """ - Represents connections.toml file content. - - Args: - filename: Name of the connections.toml file in the configs/ directory - """ - - filename: str - - -@dataclass -class FinalConfig: - """ - Represents the expected final merged configuration. - - Args: - config_dict: Dictionary of expected configuration values - connection: Optional connection name to test (default: None for all connections) - toml_string: Optional TOML string representation for easy reading - """ - - config_dict: Dict[str, Any] - connection: Optional[str] = None - toml_string: Optional[str] = None - - def __init__( - self, - config_dict: Optional[Dict[str, Any]] = None, - connection: Optional[str] = None, - toml_string: Optional[str] = None, - ): - """ - Initialize FinalConfig from either a dict or TOML string. - """ - if toml_string: - parsed = tomlkit.parse(toml_string) - object.__setattr__(self, "config_dict", dict(parsed)) - elif config_dict: - object.__setattr__(self, "config_dict", config_dict) - else: - object.__setattr__(self, "config_dict", {}) - - object.__setattr__(self, "connection", connection) - object.__setattr__(self, "toml_string", toml_string) - - def __eq__(self, other): - """Compare FinalConfig with another FinalConfig or dict.""" - if isinstance(other, FinalConfig): - return self.config_dict == other.config_dict - if isinstance(other, dict): - return self.config_dict == other - return False - - def __repr__(self): - """String representation for debugging.""" - if self.toml_string: - return f"FinalConfig(connection={self.connection}):\n{self.toml_string}" - return f"FinalConfig({self.config_dict})" - - -class ConfigSourcesContext: - """ - Context manager for setting up configuration sources in a temporary environment. - - This class: - - Creates temporary directories for config files - - Writes config files from source definitions - - Sets environment variables - - Manages cleanup - """ - - def __init__( - self, - sources: Tuple[Any, ...], - configs_dir: Path, - connection_name: Optional[str] = None, - ): - """ - Initialize the config sources context. - - Args: - sources: Tuple of source definitions (SnowSQLConfig, CliConfig, etc.) - configs_dir: Path to directory containing config file templates - connection_name: Optional connection name to resolve - """ - self.sources = sources - self.configs_dir = configs_dir - self.connection_name = connection_name or "a" - - self.temp_dir: Optional[Path] = None - self.snowsql_dir: Optional[Path] = None - self.snowflake_dir: Optional[Path] = None - self.original_env: Dict[str, Optional[str]] = {} - self.env_vars_to_set: Dict[str, str] = {} - self.cli_args_dict: Dict[str, Any] = {} - - self.snowsql_config_path: Optional[Path] = None - self.cli_config_path: Optional[Path] = None - self.connections_toml_path: Optional[Path] = None - - def __enter__(self): - """Set up the configuration environment.""" - self.temp_dir = Path(tempfile.mkdtemp()) - self.snowsql_dir = self.temp_dir / ".snowsql" - self.snowflake_dir = self.temp_dir / ".snowflake" - - self.snowsql_dir.mkdir(exist_ok=True) - self.snowflake_dir.mkdir(exist_ok=True) - - # Process sources - for source in self.sources: - if isinstance(source, SnowSQLConfig): - self._setup_snowsql_config(source) - elif isinstance(source, SnowSQLEnvs): - self._setup_snowsql_envs(source) - elif isinstance(source, CliConfig): - self._setup_cli_config(source) - elif isinstance(source, CliEnvs): - self._setup_cli_envs(source) - elif isinstance(source, CliParams): - self._setup_cli_params(source) - elif isinstance(source, ConnectionsToml): - self._setup_connections_toml(source) - - # Set environment variables - for key, value in self.env_vars_to_set.items(): - self.original_env[key] = os.environ.get(key) - os.environ[key] = value - - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """Clean up the configuration environment.""" - # Restore original environment variables - for key, original_value in self.original_env.items(): - if original_value is None: - os.environ.pop(key, None) - else: - os.environ[key] = original_value - - # Clean up temp directory - if self.temp_dir: - import shutil - - shutil.rmtree(self.temp_dir, ignore_errors=True) - - def _setup_snowsql_config(self, source: SnowSQLConfig): - """Set up SnowSQL config file.""" - assert self.snowsql_dir is not None - config_content = (self.configs_dir / source.filename).read_text() - self.snowsql_config_path = self.snowsql_dir / "config" - self.snowsql_config_path.write_text(config_content) - - def _setup_snowsql_envs(self, source: SnowSQLEnvs): - """Set up SnowSQL environment variables from file.""" - env_file = self.configs_dir / source.filename - for line in env_file.read_text().splitlines(): - line = line.strip() - if line and not line.startswith("#") and "=" in line: - key, value = line.split("=", 1) - self.env_vars_to_set[key.strip()] = value.strip() - - def _setup_cli_config(self, source: CliConfig): - """Set up CLI config.toml file.""" - assert self.snowflake_dir is not None - config_content = (self.configs_dir / source.filename).read_text() - self.cli_config_path = self.snowflake_dir / "config.toml" - self.cli_config_path.write_text(config_content) - - def _setup_cli_envs(self, source: CliEnvs): - """Set up CLI environment variables from file.""" - env_file = self.configs_dir / source.filename - for line in env_file.read_text().splitlines(): - line = line.strip() - if line and not line.startswith("#") and "=" in line: - key, value = line.split("=", 1) - self.env_vars_to_set[key.strip()] = value.strip() - - def _setup_cli_params(self, source: CliParams): - """Set up CLI parameters.""" - self.cli_args_dict = source.to_dict() - - def _setup_connections_toml(self, source: ConnectionsToml): - """Set up connections.toml file.""" - assert self.snowflake_dir is not None - config_content = (self.configs_dir / source.filename).read_text() - self.connections_toml_path = self.snowflake_dir / "connections.toml" - self.connections_toml_path.write_text(config_content) - - def get_resolver(self) -> "ConfigurationResolver": - """ - Create a ConfigurationResolver with all configured sources. - - Returns: - ConfigurationResolver instance with all sources configured - """ - from snowflake.cli.api.config_ng import ( - CliConfigFile, - CliEnvironment, - CliParameters, - ConfigurationResolver, - ConnectionsConfigFile, - SnowSQLConfigFile, - SnowSQLEnvironment, - ) - - sources_list: List[Any] = [] - - # Create sources in precedence order (lowest to highest) - - # 1. SnowSQL config files (lowest priority) - if configured - if self.snowsql_config_path and self.snowsql_config_path.exists(): - # Create a custom SnowSQL source that reads from our test path - class TestSnowSQLConfig(SnowSQLConfigFile): - def __init__(self, config_path: Path): - super().__init__() - self._config_files = [config_path] - - sources_list.append(TestSnowSQLConfig(self.snowsql_config_path)) - - # 2. CLI config.toml - if configured - if self.cli_config_path and self.cli_config_path.exists(): - # Create a custom CLI config source that reads from our test path - class TestCliConfig(CliConfigFile): - def __init__(self, config_path: Path): - super().__init__() - self._search_paths = [config_path] - - sources_list.append(TestCliConfig(self.cli_config_path)) - - # 3. Connections.toml - if configured - if self.connections_toml_path and self.connections_toml_path.exists(): - # Create a custom connections source that reads from our test path - class TestConnectionsConfig(ConnectionsConfigFile): - def __init__(self, config_path: Path): - super().__init__() - self._file_path = config_path - - sources_list.append(TestConnectionsConfig(self.connections_toml_path)) - - # 4. SnowSQL environment variables - sources_list.append(SnowSQLEnvironment()) - - # 5. CLI environment variables - sources_list.append(CliEnvironment()) - - # 6. CLI arguments (highest priority) - if configured - if self.cli_args_dict: - sources_list.append(CliParameters(cli_context=self.cli_args_dict)) - - return ConfigurationResolver(sources=sources_list, track_history=True) - - def get_merged_config(self) -> Dict[str, Any]: - """ - Get the merged configuration from all sources. - - Extracts connection-specific values for the configured connection. - - Returns: - Dictionary with resolved configuration values (flat keys) - """ - resolver = self.get_resolver() - all_config = resolver.resolve() - - # Extract connection-specific values similar to _get_connection_dict_internal - connection_dict: Dict[str, Any] = {} - - # First, get connection-specific keys (from file sources) - connection_prefix = f"connections.{self.connection_name}." - for key, value in all_config.items(): - if key.startswith(connection_prefix): - # Extract parameter name - param_name = key[len(connection_prefix) :] - connection_dict[param_name] = value - - # Then, overlay flat keys (from env/CLI sources) - these have higher priority - for key, value in all_config.items(): - if "." not in key: # Flat key like "account", "user" - connection_dict[key] = value - - return connection_dict @contextmanager -def config_sources( - sources: Tuple[Any, ...], - configs_dir: Optional[Path] = None, - connection: Optional[str] = None, -): +def _temp_environment(env_vars: Dict[str, str]): """ - Context manager for testing merged configuration from multiple sources. + Context manager for temporarily setting environment variables. + + Saves the entire environment, applies new variables, then restores + the original environment completely on exit. Args: - sources: Tuple of source definitions (SnowSQLConfig, CliConfig, etc.) - configs_dir: Path to directory containing config file templates (defaults to ./configs/) - connection: Optional connection name to resolve (defaults to "a") + env_vars: Dictionary of environment variables to set Yields: - ConfigSourcesContext instance for accessing merged configuration - - Example: - sources = ( - SnowSQLConfig('config'), - SnowSQLEnvs('snowsql.env'), - CliConfig('config.toml'), - CliEnvs('cli.env'), - CliParams("--account", "test_account", "--user", "alice"), - ConnectionsToml('connections.toml'), - ) - - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() - assert merged["account"] == "test_account" - """ - if configs_dir is None: - configs_dir = Path(__file__).parent / "configs" - - context = ConfigSourcesContext(sources, configs_dir, connection) - with context as ctx: - yield ctx - - -@pytest.fixture -def merged_cli_config(): - """ - Fixture that provides a function to get the merged CLI configuration. - - This should be used inside a config_sources context manager. - - Returns: - Function that returns the merged configuration dictionary + None """ - - def _get_merged_config(ctx: ConfigSourcesContext) -> Dict[str, Any]: - """Get merged configuration from context.""" - return ctx.get_merged_config() - - return _get_merged_config + original_env = copy.deepcopy(dict(os.environ)) + try: + os.environ.update(env_vars) + yield + finally: + os.environ.clear() + os.environ.update(original_env) @pytest.fixture -def make_cli_instance(): - """ - Fixture that provides a function to create a CLI instance. - - Note: This is a placeholder for future implementation if needed. - For now, we work directly with the resolver. - - Returns: - Function that creates a CLI instance (placeholder) - """ - - def _make_cli(): - """Create CLI instance placeholder.""" - return None - - return _make_cli +def config_ng_setup(): + """ + Fixture that provides a context manager for setting up config_ng test environments. + + Returns a context manager function that: + 1. Creates temp SNOWFLAKE_HOME + 2. Writes config files + 3. Sets env vars + 4. Enables config_ng + 5. Resets provider + 6. Yields (test can now call get_connection_dict()) + 7. Cleans up + + Usage: + def test_something(config_ng_setup): + with config_ng_setup( + cli_config="[connections.test]\\naccount = 'test'", + env_vars={"SNOWFLAKE_USER": "alice"} + ): + from snowflake.cli.api.config import get_connection_dict + conn = get_connection_dict("test") + assert conn["account"] == "test" + + Args (to returned context manager): + snowsql_config: SnowSQL INI config content (will be dedented) + cli_config: CLI TOML config content (will be dedented) + connections_toml: Connections TOML content (will be dedented) + env_vars: Environment variables to set + """ + + @contextmanager + def _setup( + snowsql_config: Optional[str] = None, + cli_config: Optional[str] = None, + connections_toml: Optional[str] = None, + env_vars: Optional[Dict[str, str]] = None, + ): + with tempfile.TemporaryDirectory() as tmpdir: + snowflake_home = Path(tmpdir) / ".snowflake" + snowflake_home.mkdir() + + # Write config files if provided + if snowsql_config: + (snowflake_home / "config").write_text(dedent(snowsql_config)) + if cli_config: + (snowflake_home / "config.toml").write_text(dedent(cli_config)) + if connections_toml: + (snowflake_home / "connections.toml").write_text( + dedent(connections_toml) + ) + + # Prepare environment variables + env_to_set = { + "SNOWFLAKE_HOME": str(snowflake_home), + "SNOWFLAKE_CLI_CONFIG_V2_ENABLED": "true", + } + if env_vars: + env_to_set.update(env_vars) + + # Set up environment and run test + with _temp_environment(env_to_set): + # Clear config_file_override to use SNOWFLAKE_HOME instead + from snowflake.cli.api.cli_global_context import ( + get_cli_context_manager, + ) + + cli_ctx_mgr = get_cli_context_manager() + original_config_override = cli_ctx_mgr.config_file_override + cli_ctx_mgr.config_file_override = None + + try: + # Reset config provider to use new config + from snowflake.cli.api.config_provider import reset_config_provider + + reset_config_provider() + + yield + + finally: + # Restore config_file_override + if original_config_override is not None: + cli_ctx_mgr = get_cli_context_manager() + cli_ctx_mgr.config_file_override = original_config_override + + # Reset config provider + reset_config_provider() + + return _setup diff --git a/tests/config_ng/test_configuration.py b/tests/config_ng/test_configuration.py index 63f0e6ac3c..7faad86c89 100644 --- a/tests/config_ng/test_configuration.py +++ b/tests/config_ng/test_configuration.py @@ -13,238 +13,1030 @@ # limitations under the License. """ -Tests for merged configuration from multiple sources. - -These tests verify that configuration values are properly merged from: -- SnowSQL config files (.snowsql/config) -- SnowSQL environment variables (SNOWSQL_*) -- CLI config files (.snowflake/config.toml) -- CLI environment variables (SNOWFLAKE_*) -- CLI command-line parameters -- Connections TOML files (.snowflake/connections.toml) +Simplified tests for config_ng using minimal mocking. + +This test file demonstrates a simpler approach to testing configuration +resolution by: +1. Setting up temporary SNOWFLAKE_HOME with config files +2. Setting environment variables directly +3. Calling get_connection_dict() to test the actual public API +4. Minimal mocking - only using the real config_ng system + +Uses the config_ng_setup fixture from conftest.py. """ -from textwrap import dedent - -from .conftest import ( - CliConfig, - CliEnvs, - CliParams, - ConnectionsToml, - FinalConfig, - SnowSQLConfig, - SnowSQLEnvs, - config_sources, -) - - -class TestConfigurationMerging: - """Test configuration merging from multiple sources.""" - - def test_all_sources_merged(self): - """ - Test that all configuration sources are properly merged. - - Priority order (highest to lowest): - 1. CLI parameters - 2. CLI environment variables - 3. SnowSQL environment variables - 4. CLI config.toml - 5. Connections.toml - 6. SnowSQL config - """ - sources = ( - SnowSQLConfig("config"), - SnowSQLEnvs("snowsql.env"), - CliConfig("config.toml"), - CliEnvs("cli.env"), - CliParams("--account", "cli-account", "--user", "cli-user"), - ConnectionsToml("connections.toml"), - ) - expected = FinalConfig( - config_dict={ - "account": "cli-account", - "user": "cli-user", - "password": "abc", - } - ) +# Tests for all 7 precedence levels - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() - # CLI params have highest priority - assert merged["account"] == "cli-account" - assert merged["user"] == "cli-user" +def test_level1_snowsql_config(config_ng_setup): + """Base level: SnowSQL config provides values""" + snowsql_config = """ + [connections.test] + accountname = from-snowsql + user = test-user + password = test-password + """ - # Password comes from config files - assert merged.get("password") == "abc" + with config_ng_setup(snowsql_config=snowsql_config): + from snowflake.cli.api.config import get_connection_dict - def test_cli_envs_override_snowsql_envs(self): - """Test that CLI environment variables override SnowSQL environment variables.""" - sources = ( - SnowSQLEnvs("snowsql.env"), - CliEnvs("cli.env"), - ) + conn = get_connection_dict("test") + assert conn["account"] == "from-snowsql" + assert conn["user"] == "test-user" + assert conn["password"] == "test-password" - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() - # CLI env (SNOWFLAKE_USER=Alice) overrides - # SnowSQL env (SNOWSQL_USER=Bob) - assert merged["user"] == "Alice" +def test_level2_cli_config_overrides_snowsql(config_ng_setup): + """CLI config.toml overrides SnowSQL config""" + snowsql_config = """ + [connections.test] + accountname = from-snowsql + user = snowsql-user + """ - def test_cli_params_override_all(self): - """Test that CLI parameters override all other sources.""" - sources = ( - SnowSQLConfig("config"), - CliConfig("config.toml"), - CliParams("--account", "override-account"), - ) + cli_config = """ + [connections.test] + account = "from-cli-config" + user = "cli-config-user" + """ - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() + with config_ng_setup(snowsql_config=snowsql_config, cli_config=cli_config): + from snowflake.cli.api.config import get_connection_dict - # CLI params override everything - assert merged["account"] == "override-account" + conn = get_connection_dict("test") + assert conn["account"] == "from-cli-config" + assert conn["user"] == "cli-config-user" - def test_config_files_precedence(self): - """Test precedence among configuration files.""" - sources = ( - SnowSQLConfig("config"), - CliConfig("config.toml"), - ) - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() +def test_level3_connections_toml_overrides_cli_config(config_ng_setup): + """connections.toml overrides cli config.toml""" + cli_config = """ + [connections.test] + account = "from-cli-config" + warehouse = "cli-warehouse" + """ - # CLI config.toml has higher priority than SnowSQL config - # Both have account-a, but config.toml should win - assert merged["account"] == "account-a" - assert merged["username"] == "user" + connections_toml = """ + [connections.test] + account = "from-connections-toml" + warehouse = "connections-warehouse" + """ - def test_connections_toml_separate_connection(self): - """Test that connections.toml can have separate connections.""" - sources = (ConnectionsToml("connections.toml"),) + with config_ng_setup(cli_config=cli_config, connections_toml=connections_toml): + from snowflake.cli.api.config import get_connection_dict - # Test connection 'b' which only exists in connections.toml - with config_sources(sources, connection="b") as ctx: - merged = ctx.get_merged_config() + conn = get_connection_dict("test") + assert conn["account"] == "from-connections-toml" + assert conn["warehouse"] == "connections-warehouse" - assert merged["account"] == "account-a" - assert merged["username"] == "user" - assert merged["password"] == "abc" - def test_empty_sources(self): - """Test that empty sources return minimal configuration.""" - sources = () +def test_level4_snowsql_env_overrides_connections_toml(config_ng_setup): + """SNOWSQL_* env vars override connections.toml""" + connections_toml = """ + [connections.test] + account = "from-connections-toml" + database = "connections-db" + """ - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() + env_vars = {"SNOWSQL_ACCOUNT": "from-snowsql-env", "SNOWSQL_DATABASE": "env-db"} - # May contain default keys like 'home', but no connection-specific keys - assert "account" not in merged - assert "user" not in merged - assert "password" not in merged + with config_ng_setup(connections_toml=connections_toml, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict - def test_only_cli_params(self): - """Test configuration with only CLI parameters.""" - sources = (CliParams("--account", "test-account", "--user", "test-user"),) + conn = get_connection_dict("test") + assert conn["account"] == "from-snowsql-env" + assert conn["database"] == "env-db" - with config_sources(sources) as ctx: - merged = ctx.get_merged_config() - assert merged["account"] == "test-account" - assert merged["user"] == "test-user" +def test_level5_connection_specific_env_overrides_snowsql_env(config_ng_setup): + """SNOWFLAKE_CONNECTIONS_* overrides SNOWSQL_*""" + env_vars = { + "SNOWSQL_ACCOUNT": "from-snowsql-env", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "from-conn-specific-env", + "SNOWFLAKE_CONNECTIONS_TEST_ROLE": "conn-specific-role", + } - def test_final_config_from_dict(self): - """Test FinalConfig creation from dictionary.""" - expected = FinalConfig(config_dict={"account": "test", "user": "alice"}) + with config_ng_setup(env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict - assert expected.config_dict == {"account": "test", "user": "alice"} - assert expected == {"account": "test", "user": "alice"} + conn = get_connection_dict("test") + assert conn["account"] == "from-conn-specific-env" + assert conn["role"] == "conn-specific-role" - def test_final_config_from_toml_string(self): - """Test FinalConfig creation from TOML string for readability.""" - toml_string = dedent( - """ - [connections.prod] - account = "prod-account" - user = "prod-user" - password = "secret" - """ - ) - expected = FinalConfig(toml_string=toml_string) +def test_level6_general_env_overrides_connection_specific(config_ng_setup): + """SNOWFLAKE_* overrides SNOWFLAKE_CONNECTIONS_*""" + env_vars = { + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "from-conn-specific", + "SNOWFLAKE_ACCOUNT": "from-general-env", + "SNOWFLAKE_SCHEMA": "general-schema", + } - assert "connections" in expected.config_dict - assert expected.config_dict["connections"]["prod"]["account"] == "prod-account" + with config_ng_setup(env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict - def test_final_config_equality(self): - """Test FinalConfig equality comparison.""" - config1 = FinalConfig(config_dict={"account": "test", "user": "alice"}) - config2 = FinalConfig(config_dict={"account": "test", "user": "alice"}) - config3 = FinalConfig(config_dict={"account": "test", "user": "bob"}) + conn = get_connection_dict("test") + assert conn["account"] == "from-general-env" + assert conn["schema"] == "general-schema" - assert config1 == config2 - assert config1 != config3 - assert config1 == {"account": "test", "user": "alice"} +def test_complete_7_level_chain(config_ng_setup): + """All 7 levels with different keys showing complete precedence""" + snowsql_config = """ + [connections.test] + accountname = level1 + user = level1-user + """ -class TestConfigurationResolution: - """Test configuration resolution details.""" + cli_config = """ + [connections.test] + account = "level2" + password = "level2-pass" + """ - def test_resolution_history_tracking(self): - """Test that resolution history is tracked correctly.""" - sources = ( - SnowSQLConfig("config"), - CliConfig("config.toml"), - CliParams("--account", "cli-account"), - ) + connections_toml = """ + [connections.test] + account = "level3" + warehouse = "level3-wh" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "level4", + "SNOWSQL_DATABASE": "level4-db", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "level5", + "SNOWFLAKE_CONNECTIONS_TEST_ROLE": "level5-role", + "SNOWFLAKE_ACCOUNT": "level6", + "SNOWFLAKE_SCHEMA": "level6-schema", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") - with config_sources(sources) as ctx: - resolver = ctx.get_resolver() - config = resolver.resolve() - - # Check that account was overridden (flat key from CLI) - assert config["account"] == "cli-account" - - # Also check that connection-specific key exists (from file sources) - assert config.get("connections.a.account") == "account-a" - - # Check resolution history for flat key (from CLI params) - cli_history = resolver.get_resolution_history("account") - assert cli_history is not None - assert ( - len(cli_history.entries) == 1 - ) # Only CLI param provides flat "account" - assert cli_history.selected_entry - assert ( - cli_history.selected_entry.config_value.source_name == "cli_arguments" - ) - - # Check resolution history for prefixed key (from file sources) - file_history = resolver.get_resolution_history("connections.a.account") - assert file_history is not None - assert len(file_history.entries) >= 1 # Config files provide prefixed key - - def test_resolution_summary(self): - """Test that resolution summary provides useful statistics.""" - sources = ( - SnowSQLConfig("config"), - CliConfig("config.toml"), - CliParams("--account", "cli-account"), + # Level 6 should win for account (general env) + assert conn["account"] == "level6" + + # Level 6 provides schema (only level with it) + assert conn["schema"] == "level6-schema" + + # Level 5 provides role (highest level with it) + assert conn["role"] == "level5-role" + + # Level 4 provides database + assert conn["database"] == "level4-db" + + # Level 3 provides warehouse + assert conn["warehouse"] == "level3-wh" + + # Level 2 provides password + assert conn["password"] == "level2-pass" + + # Level 1 provides user + assert conn["user"] == "level1-user" + + +def test_get_connection_dict_uses_config_ng_when_enabled(config_ng_setup): + """Validate that get_connection_dict delegates to config_ng when flag is set""" + + cli_config = """ + [connections.test] + account = "test-account" + user = "test-user" + """ + + with config_ng_setup(cli_config=cli_config): + from snowflake.cli.api.config import get_connection_dict + from snowflake.cli.api.config_provider import ( + AlternativeConfigProvider, + get_config_provider_singleton, ) - with config_sources(sources) as ctx: - resolver = ctx.get_resolver() - resolver.resolve() + # Verify we're using AlternativeConfigProvider + provider = get_config_provider_singleton() + assert isinstance(provider, AlternativeConfigProvider) + + # Verify resolution works + conn = get_connection_dict("test") + + assert conn["account"] == "test-account" + assert conn["user"] == "test-user" + + +def test_precedence_with_multiple_connections(config_ng_setup): + """Test that precedence works correctly for multiple connections""" + cli_config = """ + [connections.conn1] + account = "conn1-account" + user = "conn1-user" + + [connections.conn2] + account = "conn2-account" + user = "conn2-user" + """ + + env_vars = { + "SNOWFLAKE_CONNECTIONS_CONN1_ACCOUNT": "conn1-env", + "SNOWFLAKE_SCHEMA": "common-schema", + } + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + # conn1 should have env override + conn1 = get_connection_dict("conn1") + assert conn1["account"] == "conn1-env" # From connection-specific env + assert conn1["user"] == "conn1-user" # From config file + assert conn1["schema"] == "common-schema" # From general env + + # conn2 should use config values + conn2 = get_connection_dict("conn2") + assert conn2["account"] == "conn2-account" # From config file + assert conn2["user"] == "conn2-user" # From config file + assert conn2["schema"] == "common-schema" # From general env + + +def test_snowsql_key_mapping(config_ng_setup): + """Test that SnowSQL key names are properly mapped to CLI names""" + snowsql_config = """ + [connections.test] + accountname = test-account + username = test-user + dbname = test-db + schemaname = test-schema + rolename = test-role + warehousename = test-warehouse + pwd = test-password + """ + + with config_ng_setup(snowsql_config=snowsql_config): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + # All SnowSQL names should be mapped to CLI names + assert conn["account"] == "test-account" + assert conn["user"] == "test-user" + assert conn["database"] == "test-db" + assert conn["schema"] == "test-schema" + assert conn["role"] == "test-role" + assert conn["warehouse"] == "test-warehouse" + assert conn["password"] == "test-password" + + +def test_empty_config_files(config_ng_setup): + """Test behavior with empty/missing config files""" + # Only set env vars, no config files + env_vars = { + "SNOWFLAKE_ACCOUNT": "env-only-account", + "SNOWFLAKE_USER": "env-only-user", + } + + with config_ng_setup(env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("default") + assert conn["account"] == "env-only-account" + assert conn["user"] == "env-only-user" + + +# Group 1: Non-Adjacent 2-Source Tests + + +def test_snowsql_config_with_snowsql_env_direct(config_ng_setup): + """Test SnowSQL env overrides SnowSQL config when intermediate sources absent""" + snowsql_config = """ + [connections.test] + accountname = from-config + user = config-user + database = config-db + """ + + env_vars = {"SNOWSQL_ACCOUNT": "from-env", "SNOWSQL_DATABASE": "env-db"} + + with config_ng_setup(snowsql_config=snowsql_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + # Level 4 (SnowSQL env) wins for account and database + # Level 1 (SnowSQL config) wins for user + expected = { + "account": "from-env", + "user": "config-user", + "database": "env-db", + } + assert conn == expected + + +def test_snowsql_config_with_general_env_direct(config_ng_setup): + """Test general env overrides SnowSQL config across all intermediate levels""" + snowsql_config = """ + [connections.test] + accountname = from-config + user = config-user + warehouse = config-warehouse + """ + + env_vars = {"SNOWFLAKE_ACCOUNT": "from-env", "SNOWFLAKE_WAREHOUSE": "env-warehouse"} + + with config_ng_setup(snowsql_config=snowsql_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "from-env", + "user": "config-user", + "warehouse": "env-warehouse", + } + assert conn == expected + + +def test_cli_config_with_general_env_direct(config_ng_setup): + """Test general env overrides CLI config when intermediate sources absent""" + cli_config = """ + [connections.test] + account = "from-cli" + user = "cli-user" + role = "cli-role" + """ + + env_vars = {"SNOWFLAKE_ACCOUNT": "from-env", "SNOWFLAKE_ROLE": "env-role"} + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "from-env", + "user": "cli-user", + "role": "env-role", + } + assert conn == expected + + +def test_connections_toml_with_general_env_direct(config_ng_setup): + """Test general env overrides Connections TOML directly""" + connections_toml = """ + [connections.test] + account = "from-toml" + user = "toml-user" + schema = "toml-schema" + """ + + env_vars = {"SNOWFLAKE_ACCOUNT": "from-env", "SNOWFLAKE_SCHEMA": "env-schema"} + + with config_ng_setup(connections_toml=connections_toml, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "from-env", + "user": "toml-user", + "schema": "env-schema", + } + assert conn == expected + + +# Group 2: Strategic 3-Source Tests + + +def test_all_file_sources_precedence(config_ng_setup): + """Test precedence among all three file-based sources""" + snowsql_config = """ + [connections.test] + accountname = from-snowsql + user = snowsql-user + warehouse = snowsql-warehouse + password = snowsql-pass + """ + + cli_config = """ + [connections.test] + account = "from-cli" + user = "cli-user" + password = "cli-pass" + """ + + connections_toml = """ + [connections.test] + account = "from-connections" + password = "connections-pass" + """ + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "from-connections", # Level 3 wins + "user": "cli-user", # Level 2 wins + "warehouse": "snowsql-warehouse", # Level 1 only source + "password": "connections-pass", # Level 3 wins + } + assert conn == expected + + +def test_all_env_sources_precedence(config_ng_setup): + """Test precedence among all three environment variable types""" + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql-env", + "SNOWSQL_DATABASE": "snowsql-db", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "conn-specific", + "SNOWFLAKE_CONNECTIONS_TEST_ROLE": "conn-role", + "SNOWFLAKE_ACCOUNT": "general-env", + "SNOWFLAKE_SCHEMA": "general-schema", + } + + with config_ng_setup(env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "general-env", # Level 6 wins + "schema": "general-schema", # Level 6 only source + "role": "conn-role", # Level 5 only source + "database": "snowsql-db", # Level 4 only source + } + assert conn == expected + + +def test_file_and_env_mix_with_gaps(config_ng_setup): + """Test precedence with gaps in source chain""" + snowsql_config = """ + [connections.test] + accountname = snowsql-account + user = snowsql-user + """ + + connections_toml = """ + [connections.test] + account = "toml-account" + warehouse = "toml-warehouse" + """ + + env_vars = {"SNOWFLAKE_ACCOUNT": "env-account"} + + with config_ng_setup( + snowsql_config=snowsql_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "env-account", # Level 6 wins + "user": "snowsql-user", # Level 1 only source + "warehouse": "toml-warehouse", # Level 3 only source + } + assert conn == expected + + +def test_cli_config_with_two_env_types(config_ng_setup): + """Test CLI config as base with two env override types""" + cli_config = """ + [connections.test] + account = "cli-account" + user = "cli-user" + database = "cli-db" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql-env", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "conn-specific", + "SNOWFLAKE_CONNECTIONS_TEST_DATABASE": "conn-db", + } + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "conn-specific", # Level 5 wins + "user": "cli-user", # Level 2 only source + "database": "conn-db", # Level 5 wins + } + assert conn == expected + + +# Group 3: 4-Source Combinations + + +def test_all_files_plus_snowsql_env(config_ng_setup): + """Test all file sources with SnowSQL environment override""" + snowsql_config = """ + [connections.test] + accountname = snowsql-config + user = snowsql-user + """ + + cli_config = """ + [connections.test] + account = "cli-config" + warehouse = "cli-warehouse" + """ + + connections_toml = """ + [connections.test] + account = "toml-account" + database = "toml-db" + """ + + env_vars = {"SNOWSQL_ACCOUNT": "env-account"} + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "env-account", # Level 4 wins + "user": "snowsql-user", # Level 1 only + "warehouse": "cli-warehouse", # Level 2 only + "database": "toml-db", # Level 3 only + } + assert conn == expected + + +def test_all_files_plus_general_env(config_ng_setup): + """Test all file sources with general environment override""" + snowsql_config = """ + [connections.test] + accountname = snowsql-config + user = snowsql-user + """ + + cli_config = """ + [connections.test] + account = "cli-config" + role = "cli-role" + """ + + connections_toml = """ + [connections.test] + account = "toml-account" + warehouse = "toml-warehouse" + """ + + env_vars = { + "SNOWFLAKE_ACCOUNT": "env-account", + "SNOWFLAKE_WAREHOUSE": "env-warehouse", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "env-account", # Level 6 wins + "user": "snowsql-user", # Level 1 only + "role": "cli-role", # Level 2 only + "warehouse": "env-warehouse", # Level 6 wins + } + assert conn == expected + + +def test_cli_config_with_all_env_types(config_ng_setup): + """Test single file source with all three environment types""" + cli_config = """ + [connections.test] + account = "cli-account" + user = "cli-user" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql-env", + "SNOWSQL_DATABASE": "snowsql-db", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "conn-specific", + "SNOWFLAKE_CONNECTIONS_TEST_ROLE": "conn-role", + "SNOWFLAKE_ACCOUNT": "general-env", + "SNOWFLAKE_SCHEMA": "general-schema", + } + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "general-env", # Level 6 wins + "user": "cli-user", # Level 2 only + "database": "snowsql-db", # Level 4 only + "role": "conn-role", # Level 5 only + "schema": "general-schema", # Level 6 only + } + assert conn == expected + + +def test_two_files_two_envs_with_gap(config_ng_setup): + """Test non-adjacent file sources with non-adjacent env sources""" + snowsql_config = """ + [connections.test] + accountname = snowsql-config + user = snowsql-user + """ + + connections_toml = """ + [connections.test] + account = "toml-account" + warehouse = "toml-warehouse" + """ + + env_vars = { + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "conn-specific", + "SNOWFLAKE_CONNECTIONS_TEST_DATABASE": "conn-db", + "SNOWFLAKE_ACCOUNT": "general-env", + "SNOWFLAKE_SCHEMA": "general-schema", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "general-env", # Level 6 wins + "user": "snowsql-user", # Level 1 only + "warehouse": "toml-warehouse", # Level 3 only + "database": "conn-db", # Level 5 only + "schema": "general-schema", # Level 6 only + } + assert conn == expected + + +# Group 4: 5-Source Combinations + + +def test_all_files_plus_two_env_types(config_ng_setup): + """Test all file sources with two environment override types""" + snowsql_config = """ + [connections.test] + accountname = snowsql-config + user = snowsql-user + """ + + cli_config = """ + [connections.test] + account = "cli-config" + password = "cli-password" + """ + + connections_toml = """ + [connections.test] + account = "toml-account" + warehouse = "toml-warehouse" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql-env", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "conn-specific", + "SNOWFLAKE_CONNECTIONS_TEST_WAREHOUSE": "conn-warehouse", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "conn-specific", # Level 5 wins + "user": "snowsql-user", # Level 1 only + "password": "cli-password", # Level 2 only + "warehouse": "conn-warehouse", # Level 5 wins + } + assert conn == expected + + +def test_two_files_all_envs(config_ng_setup): + """Test two file sources with all three environment types""" + snowsql_config = """ + [connections.test] + accountname = snowsql-config + user = snowsql-user + """ + + cli_config = """ + [connections.test] + account = "cli-config" + password = "cli-password" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql-env", + "SNOWSQL_DATABASE": "snowsql-db", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "conn-specific", + "SNOWFLAKE_CONNECTIONS_TEST_ROLE": "conn-role", + "SNOWFLAKE_ACCOUNT": "general-env", + "SNOWFLAKE_WAREHOUSE": "general-warehouse", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "general-env", # Level 6 wins + "user": "snowsql-user", # Level 1 only + "password": "cli-password", # Level 2 only + "database": "snowsql-db", # Level 4 only + "role": "conn-role", # Level 5 only + "warehouse": "general-warehouse", # Level 6 only + } + assert conn == expected + + +def test_connections_toml_with_all_env_types(config_ng_setup): + """Test Connections TOML with all environment override types""" + connections_toml = """ + [connections.test] + account = "toml-account" + user = "toml-user" + warehouse = "toml-warehouse" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql-env", + "SNOWSQL_DATABASE": "snowsql-db", + "SNOWFLAKE_CONNECTIONS_TEST_WAREHOUSE": "conn-warehouse", + "SNOWFLAKE_CONNECTIONS_TEST_ROLE": "conn-role", + "SNOWFLAKE_ACCOUNT": "general-env", + "SNOWFLAKE_SCHEMA": "general-schema", + } + + with config_ng_setup(connections_toml=connections_toml, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "general-env", # Level 6 wins + "user": "toml-user", # Level 3 only + "warehouse": "conn-warehouse", # Level 5 wins + "database": "snowsql-db", # Level 4 only + "role": "conn-role", # Level 5 only + "schema": "general-schema", # Level 6 only + } + assert conn == expected + + +def test_snowsql_and_connections_with_all_envs(config_ng_setup): + """Test two non-adjacent file sources with all environment types""" + snowsql_config = """ + [connections.test] + accountname = snowsql-config + user = snowsql-user + password = snowsql-password + """ + + connections_toml = """ + [connections.test] + account = "toml-account" + warehouse = "toml-warehouse" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "snowsql-env", + "SNOWSQL_WAREHOUSE": "snowsql-warehouse", + "SNOWFLAKE_CONNECTIONS_TEST_PASSWORD": "conn-password", + "SNOWFLAKE_CONNECTIONS_TEST_ROLE": "conn-role", + "SNOWFLAKE_ACCOUNT": "general-env", + "SNOWFLAKE_DATABASE": "general-db", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "general-env", # Level 6 wins + "user": "snowsql-user", # Level 1 only + "password": "conn-password", # Level 5 wins + "warehouse": "snowsql-warehouse", # Level 4 wins (overrides level 3) + "role": "conn-role", # Level 5 only + "database": "general-db", # Level 6 only + } + assert conn == expected + + +# Group 5: Edge Cases + + +def test_multiple_connections_different_source_patterns(config_ng_setup): + """Test that different connections can have different active sources""" + cli_config = """ + [connections.conn1] + account = "conn1-cli" + user = "conn1-user" + + [connections.conn2] + account = "conn2-cli" + user = "conn2-user" + """ + + connections_toml = """ + [connections.conn1] + warehouse = "conn1-warehouse" + + [connections.conn3] + account = "conn3-toml" + user = "conn3-user" + """ + + env_vars = { + "SNOWFLAKE_CONNECTIONS_CONN1_ACCOUNT": "conn1-env", + "SNOWFLAKE_CONNECTIONS_CONN2_DATABASE": "conn2-db", + "SNOWFLAKE_SCHEMA": "common-schema", + } + + with config_ng_setup( + cli_config=cli_config, connections_toml=connections_toml, env_vars=env_vars + ): + from snowflake.cli.api.config import get_connection_dict + + conn1 = get_connection_dict("conn1") + expected1 = { + "account": "conn1-env", # Connection-specific env wins + "user": "conn1-user", # CLI config + "warehouse": "conn1-warehouse", # Connections TOML + "schema": "common-schema", # General env + } + assert conn1 == expected1 + + conn2 = get_connection_dict("conn2") + expected2 = { + "account": "conn2-cli", # CLI config + "user": "conn2-user", # CLI config + "database": "conn2-db", # Connection-specific env + "schema": "common-schema", # General env + } + assert conn2 == expected2 + + conn3 = get_connection_dict("conn3") + expected3 = { + "account": "conn3-toml", # Connections TOML + "user": "conn3-user", # Connections TOML + "schema": "common-schema", # General env + } + assert conn3 == expected3 + + +def test_snowsql_key_mapping_with_precedence(config_ng_setup): + """Test SnowSQL legacy key names work correctly across precedence levels""" + snowsql_config = """ + [connections.test] + accountname = snowsql-account + username = snowsql-user + dbname = snowsql-db + schemaname = snowsql-schema + rolename = snowsql-role + warehousename = snowsql-warehouse + """ + + cli_config = """ + [connections.test] + account = "cli-account" + database = "cli-db" + """ + + env_vars = { + "SNOWFLAKE_ACCOUNT": "env-account", + "SNOWFLAKE_SCHEMA": "env-schema", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "env-account", # Level 6 wins + "user": "snowsql-user", # Level 1 only (mapped from username) + "database": "cli-db", # Level 2 wins + "schema": "env-schema", # Level 6 wins + "role": "snowsql-role", # Level 1 only (mapped from rolename) + "warehouse": "snowsql-warehouse", # Level 1 only (mapped from warehousename) + } + assert conn == expected + + +def test_empty_intermediate_sources_dont_break_chain(config_ng_setup): + """Test that empty config files don't prevent higher sources from working""" + snowsql_config = """ + [connections.test] + accountname = snowsql-account + user = snowsql-user + """ + + # Empty CLI config and connections.toml + cli_config = "" + connections_toml = "" + + env_vars = {"SNOWFLAKE_ACCOUNT": "env-account"} + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + expected = { + "account": "env-account", # Level 6 wins + "user": "snowsql-user", # Level 1 only + } + assert conn == expected + + +def test_account_parameter_across_all_sources(config_ng_setup): + """Test account parameter defined in all sources follows precedence""" + snowsql_config = """ + [connections.test] + accountname = level1-account + """ + + cli_config = """ + [connections.test] + account = "level2-account" + """ + + connections_toml = """ + [connections.test] + account = "level3-account" + """ + + env_vars = { + "SNOWSQL_ACCOUNT": "level4-account", + "SNOWFLAKE_CONNECTIONS_TEST_ACCOUNT": "level5-account", + "SNOWFLAKE_ACCOUNT": "level6-account", + } - summary = resolver.get_history_summary() + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict - assert summary["total_keys_resolved"] > 0 - assert "source_usage" in summary - assert "source_wins" in summary + conn = get_connection_dict("test") - # CLI should have won for account - assert summary["source_wins"].get("cli_arguments", 0) >= 1 + # Only account should be present since all sources only provide account + expected = { + "account": "level6-account", # Level 6 (general env) wins + } + assert conn == expected From 3a6a2989d63f1e3928475bb264d26de8158bcb68 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 10:37:13 +0200 Subject: [PATCH 42/78] SNOW-2306184: config refactor - connections toml merging split --- src/snowflake/cli/api/config_provider.py | 12 +++++- tests/test_config.py | 49 +++++++++++++++++++++++- 2 files changed, 58 insertions(+), 3 deletions(-) diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index c2e2897a68..ff662b0fe7 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -448,9 +448,17 @@ def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: param_names.add(param_name) # Get param names from flat keys (general env vars, SnowSQL env, CLI params) - # Skip internal CLI arguments that aren't connection parameters + # Skip internal CLI arguments and global settings that aren't connection parameters for key in self._config_cache.keys(): - if "." not in key and key not in ("enable_diag", "temporary_connection"): + if "." not in key and key not in ( + "enable_diag", + "temporary_connection", + "default_connection_name", + "connection_name", + "diag_log_path", + "diag_allowlist_path", + "mfa_passcode", + ): param_names.add(key) # For each parameter, determine the best value based on source priority diff --git a/tests/test_config.py b/tests/test_config.py index b8747cfbb3..5742676c8a 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -34,6 +34,11 @@ from tests_common import IS_WINDOWS +def is_config_ng_enabled(): + """Check if config_ng is enabled via environment variable""" + return os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") == "true" + + def test_empty_config_file_is_created_if_not_present(): from snowflake.cli.api.utils.path_utils import path_resolver @@ -383,7 +388,12 @@ def assert_correct_connections_loaded(): assert_correct_connections_loaded() -def test_connections_toml_override_config_toml( +# Legacy version - skip when config_ng is enabled +@pytest.mark.skipif( + is_config_ng_enabled(), + reason="Legacy behavior: connections.toml replaces all connections from config.toml", +) +def test_connections_toml_override_config_toml_legacy( test_snowcli_config, snowflake_home, config_manager ): connections_toml = snowflake_home / "connections.toml" @@ -394,12 +404,49 @@ def test_connections_toml_override_config_toml( ) config_init(test_snowcli_config) + # Legacy: Only connections from connections.toml are present assert get_default_connection_dict() == {"database": "overridden_database"} assert config_manager["connections"] == { "default": {"database": "overridden_database"} } +# Config_ng version - skip when config_ng is NOT enabled +@pytest.mark.skipif( + not is_config_ng_enabled(), + reason="Config_ng behavior: connections.toml merges with config.toml per-key", +) +def test_connections_toml_override_config_toml_config_ng( + test_snowcli_config, snowflake_home, config_manager +): + """Test config_ng behavior: connections.toml merges with config.toml per-key""" + connections_toml = snowflake_home / "connections.toml" + connections_toml.write_text( + """[default] + database = "overridden_database" + """ + ) + config_init(test_snowcli_config) + + # Config_ng: Merged - database from connections.toml, other keys from config.toml + # The key difference from legacy: keys from config.toml are preserved + default_conn = get_default_connection_dict() + + # Key from connections.toml (level 3) overrides + assert default_conn["database"] == "overridden_database" + + # Keys from config.toml (level 2) are preserved + assert default_conn["schema"] == "test_public" + assert default_conn["role"] == "test_role" + assert default_conn["warehouse"] == "xs" + assert default_conn["password"] == "dummy_password" + + # Verify other connections from config.toml are also accessible + full_conn = get_connection_dict("full") + assert full_conn["account"] == "dev_account" + assert full_conn["user"] == "dev_user" + + parametrize_chmod = pytest.mark.parametrize( "chmod", [ From ad0a8bb00554bda407c682efa37109a05955f361 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 11:02:40 +0200 Subject: [PATCH 43/78] SNOW-2306184: config refactor - cleanup 1 --- .../plugin_tests/logs/snowflake-cli.log | 2760 ----------------- 1 file changed, 2760 deletions(-) delete mode 100644 tests_integration/config/plugin_tests/logs/snowflake-cli.log diff --git a/tests_integration/config/plugin_tests/logs/snowflake-cli.log b/tests_integration/config/plugin_tests/logs/snowflake-cli.log deleted file mode 100644 index 0c36c105d0..0000000000 --- a/tests_integration/config/plugin_tests/logs/snowflake-cli.log +++ /dev/null @@ -1,2760 +0,0 @@ -2025-10-10 09:44:38 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 09:44:38 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 09:44:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 09:44:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 09:50:55 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 09:50:55 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 09:50:57 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 09:50:57 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:06:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:06:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:06:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:06:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:06:09 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:06:11 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:06:13 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:06:13 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:06:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:06:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:06:50 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:06:50 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:06:52 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:06:52 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:06:54 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:06:54 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:06:56 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:06:56 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:06:58 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:07:01 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:07:09 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:07:09 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:07:10 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:07:10 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:07:14 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:07:14 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:07:16 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:07:16 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:07:18 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:07:20 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:08:25 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:08:25 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:08:27 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:08:27 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:08:29 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:08:32 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:08:34 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:08:34 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:08:36 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:08:36 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:09:09 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:09:09 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:09:11 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:09:11 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:09:13 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:09:15 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:09:18 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:09:18 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:09:21 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:09:21 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:09:37 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:09:40 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:09:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:09:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:09:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:09:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:09:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:09:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:09:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:09:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:10:24 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:10:26 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:10:28 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:10:28 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:10:30 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:10:30 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:10:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:10:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:10:35 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:10:35 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:18:18 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:18:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:18:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:18:22 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:18:22 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:18:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:18:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:18:35 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:18:35 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:18:37 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:18:37 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:18:39 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:18:39 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:18:41 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:18:43 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:19:35 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:19:37 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:19:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:19:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:19:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:19:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:19:45 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:19:45 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:19:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:19:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:41:06 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:41:06 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:41:08 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:41:08 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:45:18 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:45:18 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:45:21 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:45:21 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:45:23 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:48:13 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:48:13 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:48:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:48:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:48:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:48:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:48:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:48:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:48:22 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:48:24 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:48:47 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:48:47 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:48:49 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:48:49 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:49:01 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:49:01 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:49:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:49:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:49:17 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load - module = import_module(self.module) - File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - File "", line 1050, in _gcd_import - File "", line 1027, in _find_and_load - File "", line 1006, in _find_and_load_unlocked - File "", line 688, in _load_unlocked - File "", line 883, in exec_module - File "", line 241, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:49:19 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load - module = import_module(self.module) - File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - File "", line 1050, in _gcd_import - File "", line 1027, in _find_and_load - File "", line 1006, in _find_and_load_unlocked - File "", line 688, in _load_unlocked - File "", line 883, in exec_module - File "", line 241, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:49:22 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:49:22 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:49:24 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:49:24 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:49:26 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:49:26 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:49:27 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:49:27 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:51:50 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:51:50 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:51:52 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:51:52 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:51:54 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:52:10 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:52:13 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 10:52:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:52:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:52:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 10:52:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 10:52:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:52:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 10:52:22 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 10:52:22 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:04:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:04:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:04:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:04:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:05:25 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:05:27 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:05:30 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:05:30 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:05:32 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:05:32 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:05:34 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:05:34 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:05:36 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:05:36 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:05:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:05:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:05:47 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:05:49 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:05:49 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:21:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:21:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:21:20 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:21:20 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:21:29 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:21:29 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:21:31 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:21:31 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:21:58 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:21:58 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:21:58 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:21:58 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:22:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:22:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:22:17 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:22:17 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:22:19 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load - module = import_module(self.module) - File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - File "", line 1050, in _gcd_import - File "", line 1027, in _find_and_load - File "", line 1006, in _find_and_load_unlocked - File "", line 688, in _load_unlocked - File "", line 883, in exec_module - File "", line 241, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:22:21 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load - module = import_module(self.module) - File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - File "", line 1050, in _gcd_import - File "", line 1027, in _find_and_load - File "", line 1006, in _find_and_load_unlocked - File "", line 688, in _load_unlocked - File "", line 883, in exec_module - File "", line 241, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:22:24 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:22:24 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:22:26 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:22:26 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:23:24 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:23:24 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:23:26 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:23:26 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:23:28 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:23:28 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:23:30 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load - module = import_module(self.module) - File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - File "", line 1050, in _gcd_import - File "", line 1027, in _find_and_load - File "", line 1006, in _find_and_load_unlocked - File "", line 688, in _load_unlocked - File "", line 883, in exec_module - File "", line 241, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:23:41 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/importlib_metadata/__init__.py", line 221, in load - module = import_module(self.module) - File "/Users/mraba/.pyenv/versions/3.10.18/lib/python3.10/importlib/__init__.py", line 126, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - File "", line 1050, in _gcd_import - File "", line 1027, in _find_and_load - File "", line 1006, in _find_and_load_unlocked - File "", line 688, in _load_unlocked - File "", line 883, in exec_module - File "", line 241, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/snowflake-cli/lib/python3.10/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:23:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:23:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:23:44 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:23:44 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:23:46 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:23:46 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:28:29 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:28:29 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:28:31 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:28:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:28:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:28:34 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:28:34 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:32:09 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:32:09 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:32:11 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:32:11 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:32:13 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:32:13 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:32:15 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:32:32 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:32:32 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:32:32 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:32:32 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:32:34 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:32:37 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:32:37 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:33:59 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:33:59 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:34:01 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:34:01 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:34:04 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:34:04 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:34:06 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:34:11 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:34:11 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:34:13 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:34:15 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:34:15 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:34:16 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:34:16 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:36:33 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:36:33 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:36:35 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:36:35 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:36:38 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:36:38 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:36:40 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:37:40 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:37:40 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:37:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:37:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:37:42 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:37:42 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:37:44 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:37:53 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:37:53 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:37:55 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:37:58 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:37:58 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:38:00 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:38:00 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:38:57 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: broken_plugin -2025-10-10 11:38:57 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [broken_plugin]: Invalid command path [snow broken run]. Command group [broken] does not exist. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 61, in _add_plugin_to_typer - parent_group = self._find_typer_group_at_path( - current_level_group=self._main_typer_command_group, - remaining_parent_path_segments=command_spec.parent_command_path.path_segments, - command_spec=command_spec, - ) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 144, in _find_typer_group_at_path - raise RuntimeError( - ...<2 lines>... - ) -RuntimeError: Invalid command path [snow broken run]. Command group [broken] does not exist. -2025-10-10 11:39:00 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:39:00 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:39:02 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:39:02 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - ~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - ~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( - f"Cannot add command [{command_spec.full_command_path}] because it already exists." - ) -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:39:04 ERROR [snowflake.cli._app.commands_registration.command_plugins_loader] Cannot register plugin [failing_plugin]: Some error in plugin -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/command_plugins_loader.py", line 63, in register_external_plugins - self._plugin_manager.load_setuptools_entrypoints( - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ - SNOWCLI_COMMAND_PLUGIN_NAMESPACE, plugin_name - ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - ) - ^ - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/pluggy/_manager.py", line 416, in load_setuptools_entrypoints - plugin = ep.load() - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/metadata/__init__.py", line 179, in load - module = import_module(match.group('module')) - File "/opt/homebrew/Cellar/python@3.13/3.13.7/Frameworks/Python.framework/Versions/3.13/lib/python3.13/importlib/__init__.py", line 88, in import_module - return _bootstrap._gcd_import(name[level:], package, level) - ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - File "", line 1387, in _gcd_import - File "", line 1360, in _find_and_load - File "", line 1331, in _find_and_load_unlocked - File "", line 935, in _load_unlocked - File "", line 1026, in exec_module - File "", line 488, in _call_with_frames_removed - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/plugin_spec.py", line 21, in - from snowflakecli.test_plugins.failing_plugin import commands - File "/Users/mraba/sources/snowflake-cli/.hatch/integration/lib/python3.13/site-packages/snowflakecli/test_plugins/failing_plugin/commands.py", line 19, in - raise RuntimeError("Some error in plugin") -RuntimeError: Some error in plugin -2025-10-10 11:40:05 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:40:05 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. -2025-10-10 11:40:07 INFO [snowflake.cli._app.commands_registration.command_plugins_loader] Loaded external plugin: override -2025-10-10 11:40:07 ERROR [snowflake.cli._app.commands_registration.typer_registration] Cannot register plugin [override]: Cannot add command [snow connection list] because it already exists. -Traceback (most recent call last): - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 40, in register_commands - self._add_plugin_to_typer(plugin.command_spec) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 66, in _add_plugin_to_typer - self._validate_command_spec(command_spec, parent_group) - File "/Users/mraba/sources/snowflake-cli/src/snowflake/cli/_app/commands_registration/typer_registration.py", line 106, in _validate_command_spec - raise RuntimeError( -RuntimeError: Cannot add command [snow connection list] because it already exists. From bef03fafc91818f044f9f945a865a2d401a0efa9 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 11:05:04 +0200 Subject: [PATCH 44/78] SNOW-2306184: config refactor - cleanup 2 --- .github/workflows/test_integration.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test_integration.yaml b/.github/workflows/test_integration.yaml index 07ef4ace7e..0ef3b78e84 100644 --- a/.github/workflows/test_integration.yaml +++ b/.github/workflows/test_integration.yaml @@ -46,6 +46,7 @@ jobs: hatch-run: integration:test secrets: inherit + # Repo owner has commented /ok-to-test on a (fork-based) pull request integration-fork: needs: define-matrix strategy: From 5adb52a59fd1e9592f57a865d2836bb76fff9b64 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 11:11:23 +0200 Subject: [PATCH 45/78] SNOW-2306184: config refactor - cleanup 3 --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ae0560a9d5..6b0bc51d14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -198,8 +198,6 @@ markers = [ "integration_experimental: experimental integration test", "no_qa: mark test as not to be run in QA", "qa_only: mark test as to be run only in QA", - "config_ng: mark test as using new config system", - "legacy: mark test as using legacy config system", ] From 7edbdc75adb26cd846a54108d84d6a9026ee0e03 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 11:34:36 +0200 Subject: [PATCH 46/78] SNOW-2306184: config refactor - cleanup 4 --- src/snowflake/cli/api/config_provider.py | 4 ++-- tests/test_config.py | 10 +++------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index ff662b0fe7..8bc515b154 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -637,7 +637,7 @@ def invalidate_cache(self) -> None: self._last_config_override = None -def _is_alternative_config_enabled() -> bool: +def is_alternative_config_enabled() -> bool: """ Check if alternative configuration handling is enabled via environment variable. Does not use the built-in feature flags mechanism. @@ -655,7 +655,7 @@ def get_config_provider() -> ConfigProvider: Factory function to get the appropriate configuration provider based on environment variable. """ - if _is_alternative_config_enabled(): + if is_alternative_config_enabled(): return AlternativeConfigProvider() return LegacyConfigProvider() diff --git a/tests/test_config.py b/tests/test_config.py index 5742676c8a..bae804c7d9 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -28,17 +28,13 @@ get_env_variable_name, set_config_value, ) +from snowflake.cli.api.config_provider import is_alternative_config_enabled from snowflake.cli.api.exceptions import MissingConfigurationError from tests.testing_utils.files_and_dirs import assert_file_permissions_are_strict from tests_common import IS_WINDOWS -def is_config_ng_enabled(): - """Check if config_ng is enabled via environment variable""" - return os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") == "true" - - def test_empty_config_file_is_created_if_not_present(): from snowflake.cli.api.utils.path_utils import path_resolver @@ -390,7 +386,7 @@ def assert_correct_connections_loaded(): # Legacy version - skip when config_ng is enabled @pytest.mark.skipif( - is_config_ng_enabled(), + is_alternative_config_enabled(), reason="Legacy behavior: connections.toml replaces all connections from config.toml", ) def test_connections_toml_override_config_toml_legacy( @@ -413,7 +409,7 @@ def test_connections_toml_override_config_toml_legacy( # Config_ng version - skip when config_ng is NOT enabled @pytest.mark.skipif( - not is_config_ng_enabled(), + not is_alternative_config_enabled(), reason="Config_ng behavior: connections.toml merges with config.toml per-key", ) def test_connections_toml_override_config_toml_config_ng( From 6c7173532cc6117be5c9181582938ca1606aeec3 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 11:44:26 +0200 Subject: [PATCH 47/78] SNOW-2306184: config refactor - cleanup 5 --- tests/conftest.py | 20 -------------------- tests_common/__init__.py | 2 ++ tests_common/conftest.py | 21 +++++++++++++++++++++ tests_e2e/conftest.py | 21 --------------------- tests_integration/conftest.py | 21 --------------------- 5 files changed, 23 insertions(+), 62 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9e0a7cb2d6..65073bf5af 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -89,31 +89,11 @@ def matches( ) -class ConfigModeSnapshotExtension(AmberSnapshotExtension): - """Snapshot extension that includes config mode in snapshot file name.""" - - @classmethod - def _get_file_basename(cls, *, test_location, index): - """Generate snapshot filename with config mode suffix.""" - config_mode = ( - "config_ng" if os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") else "legacy" - ) - basename = super()._get_file_basename(test_location=test_location, index=index) - # Insert config mode before .ambr extension - return f"{basename}_{config_mode}" - - @pytest.fixture() def os_agnostic_snapshot(snapshot): return snapshot.use_extension(CustomSnapshotExtension) -@pytest.fixture() -def config_snapshot(snapshot): - """Config-mode-aware snapshot fixture for tests that differ between legacy and config_ng.""" - return snapshot.use_extension(ConfigModeSnapshotExtension) - - @pytest.fixture(autouse=True) # Global context and logging levels reset is required. # Without it, state from previous tests is visible in following tests. diff --git a/tests_common/__init__.py b/tests_common/__init__.py index b975752952..eb42887326 100644 --- a/tests_common/__init__.py +++ b/tests_common/__init__.py @@ -17,3 +17,5 @@ from tests_common.path_utils import * IS_WINDOWS = platform.system() == "Windows" + +__all__ = ["IS_WINDOWS", "ConfigModeSnapshotExtension", "config_snapshot"] diff --git a/tests_common/conftest.py b/tests_common/conftest.py index 1b889f02ed..734506a470 100644 --- a/tests_common/conftest.py +++ b/tests_common/conftest.py @@ -23,6 +23,7 @@ import pytest import yaml +from syrupy.extensions.amber import AmberSnapshotExtension from snowflake.cli._plugins.streamlit.streamlit_entity import StreamlitEntity from snowflake.cli._plugins.streamlit.streamlit_entity_model import StreamlitEntityModel @@ -144,3 +145,23 @@ def _update(snowflake_yml_path: Path, parameter_path: str, value=None): sys.version_info >= PYTHON_3_12, reason="requires python3.11 or lower", ) + + +class ConfigModeSnapshotExtension(AmberSnapshotExtension): + """Snapshot extension that includes config mode in snapshot file name.""" + + @classmethod + def _get_file_basename(cls, *, test_location, index): + """Generate snapshot filename with config mode suffix.""" + config_mode = ( + "config_ng" if os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") else "legacy" + ) + basename = super()._get_file_basename(test_location=test_location, index=index) + # Insert config mode before .ambr extension + return f"{basename}_{config_mode}" + + +@pytest.fixture() +def config_snapshot(snapshot): + """Config-mode-aware snapshot fixture for tests that differ between legacy and config_ng.""" + return snapshot.use_extension(ConfigModeSnapshotExtension) diff --git a/tests_e2e/conftest.py b/tests_e2e/conftest.py index 6d10647b26..d0190e7fb9 100644 --- a/tests_e2e/conftest.py +++ b/tests_e2e/conftest.py @@ -25,7 +25,6 @@ from snowflake.cli import __about__ from snowflake.cli.api.constants import PYTHON_3_12 from snowflake.cli.api.secure_path import SecurePath -from syrupy.extensions.amber import AmberSnapshotExtension from tests_common import IS_WINDOWS @@ -36,26 +35,6 @@ ] -class ConfigModeSnapshotExtension(AmberSnapshotExtension): - """Snapshot extension that includes config mode in snapshot file name.""" - - @classmethod - def _get_file_basename(cls, *, test_location, index): - """Generate snapshot filename with config mode suffix.""" - config_mode = ( - "config_ng" if os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") else "legacy" - ) - basename = super()._get_file_basename(test_location=test_location, index=index) - # Insert config mode before .ambr extension - return f"{basename}_{config_mode}" - - -@pytest.fixture() -def config_snapshot(snapshot): - """Config-mode-aware snapshot fixture for tests that differ between legacy and config_ng.""" - return snapshot.use_extension(ConfigModeSnapshotExtension) - - def _clean_output(text: str): """ Replacing util to clean up console output. Typer is using rich.Panel to show the --help content. diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index 92f24a8c8c..896dcec7c0 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -31,7 +31,6 @@ import pytest import yaml -from syrupy.extensions.amber import AmberSnapshotExtension from typer import Typer from typer.testing import CliRunner @@ -56,26 +55,6 @@ ] -class ConfigModeSnapshotExtension(AmberSnapshotExtension): - """Snapshot extension that includes config mode in snapshot file name.""" - - @classmethod - def _get_file_basename(cls, *, test_location, index): - """Generate snapshot filename with config mode suffix.""" - config_mode = ( - "config_ng" if os.getenv("SNOWFLAKE_CLI_CONFIG_V2_ENABLED") else "legacy" - ) - basename = super()._get_file_basename(test_location=test_location, index=index) - # Insert config mode before .ambr extension - return f"{basename}_{config_mode}" - - -@pytest.fixture() -def config_snapshot(snapshot): - """Config-mode-aware snapshot fixture for tests that differ between legacy and config_ng.""" - return snapshot.use_extension(ConfigModeSnapshotExtension) - - TEST_DIR = Path(__file__).parent DEFAULT_TEST_CONFIG = "connection_configs.toml" WORLD_READABLE_CONFIG = "world_readable.toml" From fd3d744de547be76f53a7673a0ec952ef8850852 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 14:43:54 +0200 Subject: [PATCH 48/78] SNOW-2306184: config refactor - connections.toml legacy behaviour --- .../cli/_plugins/connection/commands.py | 4 +- src/snowflake/cli/api/config.py | 30 +++--------- src/snowflake/cli/api/config_ng/resolver.py | 32 ++++++++++++- src/snowflake/cli/api/config_ng/sources.py | 36 ++++++++++++++ src/snowflake/cli/api/config_provider.py | 8 +++- tests/config_ng/test_configuration.py | 14 +++--- tests/test_config.py | 47 ++----------------- 7 files changed, 91 insertions(+), 80 deletions(-) diff --git a/src/snowflake/cli/_plugins/connection/commands.py b/src/snowflake/cli/_plugins/connection/commands.py index f9bd8930b6..7d39c12e8f 100644 --- a/src/snowflake/cli/_plugins/connection/commands.py +++ b/src/snowflake/cli/_plugins/connection/commands.py @@ -108,12 +108,12 @@ def list_connections( Lists configured connections. """ from snowflake.cli.api.config_provider import ( - _is_alternative_config_enabled, get_config_provider_singleton, + is_alternative_config_enabled, ) # Use provider directly for config_ng to pass the flag - if _is_alternative_config_enabled(): + if is_alternative_config_enabled(): provider = get_config_provider_singleton() connections = provider.get_all_connections(include_env_connections=all_sources) else: diff --git a/src/snowflake/cli/api/config.py b/src/snowflake/cli/api/config.py index 6c260f97b1..77c9aac461 100644 --- a/src/snowflake/cli/api/config.py +++ b/src/snowflake/cli/api/config.py @@ -322,35 +322,17 @@ def config_section_exists(*path) -> bool: def get_all_connections() -> dict[str, ConnectionConfig]: - # Use config provider if available - try: - from snowflake.cli.api.config_provider import get_config_provider_singleton + from snowflake.cli.api.config_provider import get_config_provider_singleton - provider = get_config_provider_singleton() - return provider.get_all_connections() - except Exception: - # Fall back to legacy implementation - return { - k: ConnectionConfig.from_dict(connection_dict) - for k, connection_dict in get_config_section("connections").items() - } + provider = get_config_provider_singleton() + return provider.get_all_connections() def get_connection_dict(connection_name: str) -> dict: - # Use config provider if available - try: - from snowflake.cli.api.config_provider import get_config_provider_singleton + from snowflake.cli.api.config_provider import get_config_provider_singleton - provider = get_config_provider_singleton() - return provider.get_connection_dict(connection_name) - except Exception: - # Fall back to legacy implementation - try: - return get_config_section(CONNECTIONS_SECTION, connection_name) - except KeyError: - raise MissingConfigurationError( - f"Connection {connection_name} is not configured" - ) + provider = get_config_provider_singleton() + return provider.get_connection_dict(connection_name) def get_default_connection_name() -> str: diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index 09ffbc3f63..d44811de77 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -349,7 +349,8 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A 1. Iterate sources in order (lowest to highest priority) 2. Record all discovered values in history 3. For connection keys (connections.{name}.{param}): - - Merge connection-by-connection: later sources extend/overwrite individual params + - If connections.toml defines a connection, it REPLACES cli_config_toml only + - SnowSQL config, environment vars, and CLI parameters still override 4. For flat keys: later sources overwrite earlier sources 5. Mark which value was selected 6. Return final resolved values @@ -365,6 +366,19 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A # Track connection values separately for intelligent merging connections: Dict[str, Dict[str, ConfigValue]] = defaultdict(dict) + # Identify sources that connections.toml replaces + # connections.toml only replaces cli_config_toml, not SnowSQL config + cli_config_source = "cli_config_toml" + connections_file_source = None + connections_to_replace: set[str] = set() + + # First pass: find connections.toml and identify connections to replace + for source in self._sources: + if hasattr(source, "is_connections_file") and source.is_connections_file: + connections_file_source = source + connections_to_replace = source.get_defined_connections() + break + # Process sources in order (first = lowest priority, last = highest) for source in self._sources: try: @@ -384,6 +398,22 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A param = parts[2] param_key = f"connections.{conn_name}.{param}" + # Replacement logic: Skip cli_config_toml if connection is in connections.toml + # SnowSQL config is NOT replaced by connections.toml + is_cli_config = source.source_name == cli_config_source + connection_in_connections_toml = ( + conn_name in connections_to_replace + ) + + if is_cli_config and connection_in_connections_toml: + # Skip this value - connections.toml replaces cli_config_toml + log.debug( + "Skipping %s from %s (replaced by connections.toml)", + param_key, + source.source_name, + ) + continue + # Merge at parameter level: later source overwrites/extends connections[conn_name][param_key] = config_value else: diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index e9e48b3a5d..0c9cf021e6 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -312,6 +312,42 @@ def __init__(self): def source_name(self) -> str: return "connections_toml" + @property + def is_connections_file(self) -> bool: + """Mark this as the dedicated connections file source.""" + return True + + def get_defined_connections(self) -> set[str]: + """ + Return set of connection names that are defined in connections.toml. + This is used by the resolver to implement replacement behavior. + """ + if not self._file_path.exists(): + return set() + + try: + with open(self._file_path, "rb") as f: + data = tomllib.load(f) + + connection_names = set() + + # Check for direct connection sections (legacy format) + for section_name, section_data in data.items(): + if isinstance(section_data, dict) and section_name != "connections": + connection_names.add(section_name) + + # Check for nested [connections] section format + connections_section = data.get("connections", {}) + if isinstance(connections_section, dict): + for conn_name in connections_section.keys(): + connection_names.add(conn_name) + + return connection_names + + except Exception as e: + log.debug("Failed to read connections.toml: %s", e) + return set() + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Read connections.toml if it exists. diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 8bc515b154..df7081403f 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -177,10 +177,14 @@ def get_connection_dict(self, connection_name: str) -> dict: ) def get_all_connections(self, include_env_connections: bool = False) -> dict: - from snowflake.cli.api.config import get_all_connections + from snowflake.cli.api.config import ConnectionConfig, get_config_section # Legacy provider ignores the flag since it never had env connections - return get_all_connections() + connections = get_config_section("connections") + return { + name: ConnectionConfig.from_dict(self._transform_private_key_raw(config)) + for name, config in connections.items() + } class AlternativeConfigProvider(ConfigProvider): diff --git a/tests/config_ng/test_configuration.py b/tests/config_ng/test_configuration.py index 7faad86c89..976a2fd341 100644 --- a/tests/config_ng/test_configuration.py +++ b/tests/config_ng/test_configuration.py @@ -195,8 +195,8 @@ def test_complete_7_level_chain(config_ng_setup): # Level 3 provides warehouse assert conn["warehouse"] == "level3-wh" - # Level 2 provides password - assert conn["password"] == "level2-pass" + # Level 2 (cli_config) is skipped because connections.toml defines this connection + # password from cli_config is NOT present # Level 1 provides user assert conn["user"] == "level1-user" @@ -444,7 +444,7 @@ def test_all_file_sources_precedence(config_ng_setup): expected = { "account": "from-connections", # Level 3 wins - "user": "cli-user", # Level 2 wins + "user": "snowsql-user", # Level 1 only (cli_config skipped) "warehouse": "snowsql-warehouse", # Level 1 only source "password": "connections-pass", # Level 3 wins } @@ -575,7 +575,7 @@ def test_all_files_plus_snowsql_env(config_ng_setup): expected = { "account": "env-account", # Level 4 wins "user": "snowsql-user", # Level 1 only - "warehouse": "cli-warehouse", # Level 2 only + # warehouse from cli_config is skipped (connections.toml replaces cli_config) "database": "toml-db", # Level 3 only } assert conn == expected @@ -619,7 +619,7 @@ def test_all_files_plus_general_env(config_ng_setup): expected = { "account": "env-account", # Level 6 wins "user": "snowsql-user", # Level 1 only - "role": "cli-role", # Level 2 only + # role from cli_config is skipped (connections.toml replaces cli_config) "warehouse": "env-warehouse", # Level 6 wins } assert conn == expected @@ -739,7 +739,7 @@ def test_all_files_plus_two_env_types(config_ng_setup): expected = { "account": "conn-specific", # Level 5 wins "user": "snowsql-user", # Level 1 only - "password": "cli-password", # Level 2 only + # password from cli_config is skipped (connections.toml replaces cli_config) "warehouse": "conn-warehouse", # Level 5 wins } assert conn == expected @@ -904,7 +904,7 @@ def test_multiple_connections_different_source_patterns(config_ng_setup): conn1 = get_connection_dict("conn1") expected1 = { "account": "conn1-env", # Connection-specific env wins - "user": "conn1-user", # CLI config + # user from cli_config is skipped (connections.toml replaces cli_config) "warehouse": "conn1-warehouse", # Connections TOML "schema": "common-schema", # General env } diff --git a/tests/test_config.py b/tests/test_config.py index bae804c7d9..9e3c2ba165 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -28,7 +28,6 @@ get_env_variable_name, set_config_value, ) -from snowflake.cli.api.config_provider import is_alternative_config_enabled from snowflake.cli.api.exceptions import MissingConfigurationError from tests.testing_utils.files_and_dirs import assert_file_permissions_are_strict @@ -384,12 +383,7 @@ def assert_correct_connections_loaded(): assert_correct_connections_loaded() -# Legacy version - skip when config_ng is enabled -@pytest.mark.skipif( - is_alternative_config_enabled(), - reason="Legacy behavior: connections.toml replaces all connections from config.toml", -) -def test_connections_toml_override_config_toml_legacy( +def test_connections_toml_override_config_toml( test_snowcli_config, snowflake_home, config_manager ): connections_toml = snowflake_home / "connections.toml" @@ -400,49 +394,14 @@ def test_connections_toml_override_config_toml_legacy( ) config_init(test_snowcli_config) - # Legacy: Only connections from connections.toml are present + # Both legacy and config_ng: Only connections from connections.toml are present + # connections.toml REPLACES config.toml connections (not merge) assert get_default_connection_dict() == {"database": "overridden_database"} assert config_manager["connections"] == { "default": {"database": "overridden_database"} } -# Config_ng version - skip when config_ng is NOT enabled -@pytest.mark.skipif( - not is_alternative_config_enabled(), - reason="Config_ng behavior: connections.toml merges with config.toml per-key", -) -def test_connections_toml_override_config_toml_config_ng( - test_snowcli_config, snowflake_home, config_manager -): - """Test config_ng behavior: connections.toml merges with config.toml per-key""" - connections_toml = snowflake_home / "connections.toml" - connections_toml.write_text( - """[default] - database = "overridden_database" - """ - ) - config_init(test_snowcli_config) - - # Config_ng: Merged - database from connections.toml, other keys from config.toml - # The key difference from legacy: keys from config.toml are preserved - default_conn = get_default_connection_dict() - - # Key from connections.toml (level 3) overrides - assert default_conn["database"] == "overridden_database" - - # Keys from config.toml (level 2) are preserved - assert default_conn["schema"] == "test_public" - assert default_conn["role"] == "test_role" - assert default_conn["warehouse"] == "xs" - assert default_conn["password"] == "dummy_password" - - # Verify other connections from config.toml are also accessible - full_conn = get_connection_dict("full") - assert full_conn["account"] == "dev_account" - assert full_conn["user"] == "dev_user" - - parametrize_chmod = pytest.mark.parametrize( "chmod", [ From c081f9475a5a1515c3dfa601c663d5e6fa1f55ed Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 16:31:09 +0200 Subject: [PATCH 49/78] SNOW-2306184: config refactor - improve resolution raport --- .../cli/_plugins/helpers/commands.py | 20 +-- .../cli/api/config_ng/resolution_logger.py | 38 +++++ src/snowflake/cli/api/config_ng/resolver.py | 135 +++++++++++++++++- tests/helpers/test_show_config_sources.py | 79 +++++++--- 4 files changed, 236 insertions(+), 36 deletions(-) diff --git a/src/snowflake/cli/_plugins/helpers/commands.py b/src/snowflake/cli/_plugins/helpers/commands.py index 0e35ae7abd..e1363fc55d 100644 --- a/src/snowflake/cli/_plugins/helpers/commands.py +++ b/src/snowflake/cli/_plugins/helpers/commands.py @@ -375,10 +375,12 @@ def show_config_sources( Set SNOWFLAKE_CLI_CONFIG_V2_ENABLED=true to enable it. """ from snowflake.cli.api.config_ng import ( - explain_configuration, export_resolution_history, is_resolution_logging_available, ) + from snowflake.cli.api.config_ng.resolution_logger import ( + get_configuration_explanation_results, + ) if not is_resolution_logging_available(): return MessageResult( @@ -401,18 +403,4 @@ def show_config_sources( f"and can be attached to support tickets." ) - # Show resolution information - explain_configuration(key=key, verbose=show_details) - - if key: - return MessageResult( - f"\n✅ Showing resolution for key: {key}\n" - f"Use --show-details to see the complete resolution chain." - ) - else: - return MessageResult( - "\n✅ Configuration resolution summary displayed above.\n" - "Use a specific key (e.g., 'snow helpers show-config-sources account') " - "to see detailed resolution for that key.\n" - "Use --show-details to see complete resolution chains for all keys." - ) + return get_configuration_explanation_results(key=key, verbose=show_details) diff --git a/src/snowflake/cli/api/config_ng/resolution_logger.py b/src/snowflake/cli/api/config_ng/resolution_logger.py index df88003a56..ed05ee40dc 100644 --- a/src/snowflake/cli/api/config_ng/resolution_logger.py +++ b/src/snowflake/cli/api/config_ng/resolution_logger.py @@ -31,6 +31,12 @@ get_config_provider_singleton, ) from snowflake.cli.api.console import cli_console +from snowflake.cli.api.output.types import ( + CollectionResult, + CommandResult, + MessageResult, + MultipleResults, +) if TYPE_CHECKING: from snowflake.cli.api.config_ng.resolver import ConfigurationResolver @@ -304,3 +310,35 @@ def explain_configuration(key: Optional[str] = None, verbose: bool = False) -> N if verbose: resolver.print_all_chains() + + +def get_configuration_explanation_results( + key: Optional[str] = None, verbose: bool = False +) -> CommandResult: + """ + Build CommandResult(s) representing a fixed-column sources table and optional + masked history message, suitable for Snow's output formats. + + Returns: + - CollectionResult for the table (always) + - If verbose is True, MultipleResults with the table and a MessageResult + containing the masked resolution history (for the key or all keys) + """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + provider = get_config_provider_singleton() + provider.read_config() + + resolver = get_resolver() + if resolver is None: + return MessageResult( + "Configuration resolution logging is not available. " + f"Set {ALTERNATIVE_CONFIG_ENV_VAR}=true to enable it." + ) + + table_result: CollectionResult = resolver.build_sources_table(key) + if not verbose: + return table_result + + history_message: MessageResult = resolver.format_history_message(key) + return MultipleResults([table_result, history_message]) diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index d44811de77..6ec6df7293 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -27,7 +27,7 @@ from collections import defaultdict from datetime import datetime from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Tuple from snowflake.cli.api.config_ng.core import ( ConfigValue, @@ -35,6 +35,7 @@ ResolutionHistory, ) from snowflake.cli.api.console import cli_console +from snowflake.cli.api.output.types import CollectionResult, MessageResult if TYPE_CHECKING: from snowflake.cli.api.config_ng.core import ValueSource @@ -62,6 +63,40 @@ "token_file_path", } +# Fixed table columns ordered from most important (left) to least (right) +SourceColumn = Literal[ + "params", + "global_envs", + "connections_env", + "snowsql_env", + "connections.toml", + "config.toml", + "snowsql", +] + +TABLE_COLUMNS: Tuple[str, ...] = ( + "key", + "value", + "params", + "global_envs", + "connections_env", + "snowsql_env", + "connections.toml", + "config.toml", + "snowsql", +) + +# Mapping of internal source names to fixed table columns +SOURCE_TO_COLUMN: Dict[str, SourceColumn] = { + "cli_arguments": "params", + "cli_env": "global_envs", + "connection_specific_env": "connections_env", + "snowsql_env": "snowsql_env", + "connections_toml": "connections.toml", + "cli_config_toml": "config.toml", + "snowsql_config": "snowsql", +} + def _should_mask_value(key: str) -> bool: """ @@ -508,6 +543,104 @@ def get_history_summary(self) -> dict: """ return self._history_tracker.get_summary() + def build_sources_table(self, key: Optional[str] = None) -> CollectionResult: + """ + Build a tabular view of configuration sources per key. + + Columns (left to right): key, value, params, env, connections.toml, cli_config.toml, snowsql. + - value: masked final selected value for the key + - presence columns: "+" if a given source provided a value for the key, empty otherwise + """ + # Ensure history is populated + if key is None and not self._history_tracker.get_all_histories(): + # Resolve all keys to populate history + self.resolve() + elif key is not None and self._history_tracker.get_history(key) is None: + # Resolve only the specific key + self.resolve(key=key) + + histories = ( + {key: self._history_tracker.get_history(key)} + if key is not None + else self._history_tracker.get_all_histories() + ) + + def _row_items(): + for k, history in histories.items(): + if history is None: + continue + # Initialize row with fixed columns + row: Dict[str, Any] = {c: "" for c in TABLE_COLUMNS} + row["key"] = k + + # Final value (masked) + masked_final = _mask_sensitive_value(k, history.final_value) + row["value"] = masked_final + + # Mark presence per source + for entry in history.entries: + source_column = SOURCE_TO_COLUMN.get(entry.config_value.source_name) + if source_column is not None: + row[source_column] = "+" + + # Ensure result preserves the column order + ordered_row = {column: row[column] for column in TABLE_COLUMNS} + yield ordered_row + + return CollectionResult(_row_items()) + + def format_history_message(self, key: Optional[str] = None) -> MessageResult: + """ + Build a masked, human-readable history of merging as a single message. + If key is None, returns concatenated histories for all keys. + """ + histories = ( + {key: self.get_resolution_history(key)} + if key is not None + else self.get_all_histories() + ) + + if not histories: + return MessageResult("No resolution history available") + + lines: List[str] = [] + for k in sorted(histories.keys()): + history = histories[k] + if history is None: + continue + lines.append(f"{k} resolution chain ({len(history.entries)} sources):") + for i, entry in enumerate(history.entries, 1): + cv = entry.config_value + status_text = ( + "(SELECTED)" + if entry.was_used + else ( + f"(overridden by {entry.overridden_by})" + if entry.overridden_by + else "(not used)" + ) + ) + + masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_raw = ( + _mask_sensitive_value(cv.key, cv.raw_value) + if cv.raw_value is not None + else None + ) + value_display = f'"{masked_value}"' + if masked_raw is not None and cv.raw_value != cv.value: + value_display = f'"{masked_raw}" → {masked_value}' + + lines.append(f" {i}. {cv.source_name}: {value_display} {status_text}") + + if history.default_used: + masked_default = _mask_sensitive_value(k, history.final_value) + lines.append(f" Default value used: {masked_default}") + + lines.append("") + + return MessageResult("\n".join(lines).rstrip()) + def format_resolution_chain(self, key: str) -> str: """ Format the resolution chain for a key (debugging helper). diff --git a/tests/helpers/test_show_config_sources.py b/tests/helpers/test_show_config_sources.py index 78dabfb0c6..a2e5484531 100644 --- a/tests/helpers/test_show_config_sources.py +++ b/tests/helpers/test_show_config_sources.py @@ -71,65 +71,106 @@ def test_command_unavailable_message_when_logging_not_available( @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") - @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + @mock.patch( + "snowflake.cli.api.config_ng.resolution_logger.get_configuration_explanation_results" + ) def test_command_shows_summary_without_arguments( - self, mock_explain, mock_is_available, runner + self, mock_get_results, mock_is_available, runner ): """Command should show configuration summary when called without arguments.""" + from snowflake.cli.api.output.types import CollectionResult + mock_is_available.return_value = True + mock_get_results.return_value = CollectionResult([]) result = runner.invoke(["helpers", COMMAND]) assert result.exit_code == 0 - mock_explain.assert_called_once_with(key=None, verbose=False) - assert "Configuration resolution summary displayed above" in result.output + mock_get_results.assert_called_once_with(key=None, verbose=False) @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") - @mock.patch("snowflake.cli.api.config_ng.explain_configuration") - def test_command_shows_specific_key(self, mock_explain, mock_is_available, runner): + @mock.patch( + "snowflake.cli.api.config_ng.resolution_logger.get_configuration_explanation_results" + ) + def test_command_shows_specific_key( + self, mock_get_results, mock_is_available, runner + ): """Command should show resolution for specific key when provided.""" + from snowflake.cli.api.output.types import CollectionResult + mock_is_available.return_value = True + mock_get_results.return_value = CollectionResult([]) result = runner.invoke(["helpers", COMMAND, "account"]) assert result.exit_code == 0 - mock_explain.assert_called_once_with(key="account", verbose=False) - assert "Showing resolution for key: account" in result.output + mock_get_results.assert_called_once_with(key="account", verbose=False) @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") - @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + @mock.patch( + "snowflake.cli.api.config_ng.resolution_logger.get_configuration_explanation_results" + ) def test_command_shows_details_with_flag( - self, mock_explain, mock_is_available, runner + self, mock_get_results, mock_is_available, runner ): """Command should show detailed resolution when --show-details flag is used.""" + from snowflake.cli.api.output.types import ( + CollectionResult, + MessageResult, + MultipleResults, + ) + mock_is_available.return_value = True + mock_get_results.return_value = MultipleResults( + [CollectionResult([]), MessageResult("test history")] + ) result = runner.invoke(["helpers", COMMAND, "--show-details"]) assert result.exit_code == 0 - mock_explain.assert_called_once_with(key=None, verbose=True) - assert "Configuration resolution summary displayed above" in result.output + mock_get_results.assert_called_once_with(key=None, verbose=True) @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") - @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + @mock.patch( + "snowflake.cli.api.config_ng.resolution_logger.get_configuration_explanation_results" + ) def test_command_shows_details_with_short_flag( - self, mock_explain, mock_is_available, runner + self, mock_get_results, mock_is_available, runner ): """Command should show detailed resolution when -d flag is used.""" + from snowflake.cli.api.output.types import ( + CollectionResult, + MessageResult, + MultipleResults, + ) + mock_is_available.return_value = True + mock_get_results.return_value = MultipleResults( + [CollectionResult([]), MessageResult("test history")] + ) result = runner.invoke(["helpers", COMMAND, "-d"]) assert result.exit_code == 0 - mock_explain.assert_called_once_with(key=None, verbose=True) + mock_get_results.assert_called_once_with(key=None, verbose=True) @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") - @mock.patch("snowflake.cli.api.config_ng.explain_configuration") + @mock.patch( + "snowflake.cli.api.config_ng.resolution_logger.get_configuration_explanation_results" + ) def test_command_shows_key_with_details( - self, mock_explain, mock_is_available, runner + self, mock_get_results, mock_is_available, runner ): """Command should show detailed resolution for specific key.""" + from snowflake.cli.api.output.types import ( + CollectionResult, + MessageResult, + MultipleResults, + ) + mock_is_available.return_value = True + mock_get_results.return_value = MultipleResults( + [CollectionResult([]), MessageResult("test history")] + ) result = runner.invoke(["helpers", COMMAND, "user", "--show-details"]) assert result.exit_code == 0 - mock_explain.assert_called_once_with(key="user", verbose=True) - assert "Showing resolution for key: user" in result.output + mock_get_results.assert_called_once_with(key="user", verbose=True) @mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}, clear=True) @mock.patch("snowflake.cli.api.config_ng.is_resolution_logging_available") From 66af182fc9bc29d3ef73344635bc99455e0a2c3b Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 16:47:28 +0200 Subject: [PATCH 50/78] SNOW-2306184: config refactor - improve connections env parsing --- src/snowflake/cli/api/config_ng/sources.py | 44 ++++++++++++++-------- src/snowflake/cli/api/config_provider.py | 2 +- tests/config_ng/test_env_parsing.py | 28 ++++++++++++++ 3 files changed, 57 insertions(+), 17 deletions(-) create mode 100644 tests/config_ng/test_env_parsing.py diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 0c9cf021e6..25320d7671 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -549,23 +549,35 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: for env_name, env_value in os.environ.items(): # Check for connection-specific pattern: SNOWFLAKE_CONNECTIONS__ if env_name.startswith("SNOWFLAKE_CONNECTIONS_"): - # Extract connection name and config key + # Extract remainder after the prefix remainder = env_name[len("SNOWFLAKE_CONNECTIONS_") :] - parts = remainder.split("_", 1) - if len(parts) == 2: - conn_name_upper, config_key_upper = parts - conn_name = conn_name_upper.lower() - config_key = config_key_upper.lower() - - if config_key in _ENV_CONFIG_KEYS: - full_key = f"connections.{conn_name}.{config_key}" - if key is None or full_key == key: - values[full_key] = ConfigValue( - key=full_key, - value=env_value, - source_name=self.source_name, - raw_value=f"{env_name}={env_value}", - ) + + # Find the longest matching key suffix from known config keys to + # correctly handle underscores both in connection names and keys + match: tuple[str, str] | None = None + for candidate in sorted(_ENV_CONFIG_KEYS, key=len, reverse=True): + key_suffix = "_" + candidate.upper() + if remainder.endswith(key_suffix): + conn_name_upper = remainder[: -len(key_suffix)] + if conn_name_upper: # ensure non-empty connection name + match = (conn_name_upper, candidate) + break + + if not match: + # Unknown/unsupported key suffix; ignore + continue + + conn_name_upper, config_key = match + conn_name = conn_name_upper.lower() + + full_key = f"connections.{conn_name}.{config_key}" + if key is None or full_key == key: + values[full_key] = ConfigValue( + key=full_key, + value=env_value, + source_name=self.source_name, + raw_value=f"{env_name}={env_value}", + ) return values diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index df7081403f..52b37ba4e7 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -603,7 +603,7 @@ def _get_file_based_connections(self) -> dict: connections_prefix = "connections." assert self._resolver is not None - for source in self._resolver._sources: # noqa: SLF001 + for source in self._resolver.get_sources(): if source.source_name not in file_source_names: continue diff --git a/tests/config_ng/test_env_parsing.py b/tests/config_ng/test_env_parsing.py new file mode 100644 index 0000000000..f5bb085e53 --- /dev/null +++ b/tests/config_ng/test_env_parsing.py @@ -0,0 +1,28 @@ +# Copyright (c) 2024 Snowflake Inc. + +"""Focused tests for environment variable parsing in config_ng.""" + + +def test_connection_specific_env_with_underscores(config_ng_setup): + """Connection names containing underscores should parse correctly. + + Also validate keys that themselves contain underscores (e.g., PRIVATE_KEY_PATH). + """ + + env_vars = { + # Connection-specific variables for connection name with underscores + "SNOWFLAKE_CONNECTIONS_DEV_US_EAST_ACCOUNT": "from-specific", + "SNOWFLAKE_CONNECTIONS_DEV_US_EAST_PRIVATE_KEY_PATH": "/tmp/example_key.pem", + # General env remains available for other flat keys + "SNOWFLAKE_SCHEMA": "general-schema", + } + + with config_ng_setup(env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("dev_us_east") + + assert conn["account"] == "from-specific" + assert conn["private_key_path"] == "/tmp/example_key.pem" + # Ensure general env still contributes flat keys + assert conn["schema"] == "general-schema" From d4ce1e90788a7b9bcaaae740ba6f73bf53f8ef99 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 15 Oct 2025 17:02:24 +0200 Subject: [PATCH 51/78] SNOW-2306184: config refactor - clean tmp files --- src/snowflake/cli/api/config_ng/core.py | 15 ++++++- src/snowflake/cli/api/config_ng/resolver.py | 2 - src/snowflake/cli/api/config_ng/sources.py | 18 ++++---- src/snowflake/cli/api/config_provider.py | 50 +++++++++++++++++++-- tests/config_ng/test_private_key_cleanup.py | 39 ++++++++++++++++ 5 files changed, 107 insertions(+), 17 deletions(-) create mode 100644 tests/config_ng/test_private_key_cleanup.py diff --git a/src/snowflake/cli/api/config_ng/core.py b/src/snowflake/cli/api/config_ng/core.py index e7969fd14a..80b764d59f 100644 --- a/src/snowflake/cli/api/config_ng/core.py +++ b/src/snowflake/cli/api/config_ng/core.py @@ -26,7 +26,7 @@ from abc import ABC, abstractmethod from dataclasses import dataclass, field from datetime import datetime -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Callable, Dict, List, Literal, Optional @dataclass(frozen=True) @@ -84,9 +84,20 @@ class ValueSource(ABC): Precedence is determined by the order sources are provided to the resolver. """ + # Allowed source names for config resolution + SourceName = Literal[ + "snowsql_config", + "cli_config_toml", + "connections_toml", + "snowsql_env", + "connection_specific_env", + "cli_env", + "cli_arguments", + ] + @property @abstractmethod - def source_name(self) -> str: + def source_name(self) -> SourceName: """ Unique identifier for this source. Examples: "cli_arguments", "snowsql_config", "cli_env" diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index 6ec6df7293..5e83c3d887 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -404,13 +404,11 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A # Identify sources that connections.toml replaces # connections.toml only replaces cli_config_toml, not SnowSQL config cli_config_source = "cli_config_toml" - connections_file_source = None connections_to_replace: set[str] = set() # First pass: find connections.toml and identify connections to replace for source in self._sources: if hasattr(source, "is_connections_file") and source.is_connections_file: - connections_file_source = source connections_to_replace = source.get_defined_connections() break diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 25320d7671..95b47bc3d0 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -32,7 +32,7 @@ import logging import os from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Dict, Final, Optional from snowflake.cli.api.config_ng.core import ConfigValue, ValueSource @@ -117,7 +117,7 @@ def __init__(self): ] @property - def source_name(self) -> str: + def source_name(self) -> "ValueSource.SourceName": return "snowsql_config" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: @@ -225,7 +225,7 @@ def __init__(self): ] @property - def source_name(self) -> str: + def source_name(self) -> "ValueSource.SourceName": return "cli_config_toml" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: @@ -309,7 +309,7 @@ def __init__(self): self._file_path = Path.home() / ".snowflake" / "connections.toml" @property - def source_name(self) -> str: + def source_name(self) -> "ValueSource.SourceName": return "connections_toml" @property @@ -448,7 +448,7 @@ class SnowSQLEnvironment(ValueSource): } @property - def source_name(self) -> str: + def source_name(self) -> "ValueSource.SourceName": return "snowsql_env" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: @@ -485,7 +485,7 @@ def supports_key(self, key: str) -> bool: # Base configuration keys that can be set via environment -_ENV_CONFIG_KEYS = [ +_ENV_CONFIG_KEYS: Final[list[str]] = [ "account", "user", "password", @@ -533,7 +533,7 @@ class ConnectionSpecificEnvironment(ValueSource): """ @property - def source_name(self) -> str: + def source_name(self) -> "ValueSource.SourceName": return "connection_specific_env" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: @@ -608,7 +608,7 @@ class CliEnvironment(ValueSource): """ @property - def source_name(self) -> str: + def source_name(self) -> "ValueSource.SourceName": return "cli_env" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: @@ -677,7 +677,7 @@ def __init__(self, cli_context: Optional[Dict[str, Any]] = None): self._cli_context = cli_context or {} @property - def source_name(self) -> str: + def source_name(self) -> "ValueSource.SourceName": return "cli_arguments" def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 52b37ba4e7..321630222c 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -14,15 +14,17 @@ from __future__ import annotations +import atexit import os from abc import ABC, abstractmethod from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Dict, Final, Optional if TYPE_CHECKING: + from snowflake.cli.api.config_ng.core import ValueSource from snowflake.cli.api.config_ng.resolver import ConfigurationResolver -ALTERNATIVE_CONFIG_ENV_VAR = "SNOWFLAKE_CLI_CONFIG_V2_ENABLED" +ALTERNATIVE_CONFIG_ENV_VAR: Final[str] = "SNOWFLAKE_CLI_CONFIG_V2_ENABLED" class ConfigProvider(ABC): @@ -97,7 +99,6 @@ def _transform_private_key_raw(self, connection_dict: dict) -> dict: if "private_key_file" in connection_dict: return connection_dict - import os import tempfile try: @@ -116,6 +117,14 @@ def _transform_private_key_raw(self, connection_dict: dict) -> dict: result["private_key_file"] = temp_file_path del result["private_key_raw"] + # Track created temp file on the provider instance for cleanup + temp_files_attr = "_temp_private_key_files" + existing = getattr(self, temp_files_attr, None) + if existing is None: + setattr(self, temp_files_attr, {temp_file_path}) + else: + existing.add(temp_file_path) + return result except Exception: @@ -123,6 +132,20 @@ def _transform_private_key_raw(self, connection_dict: dict) -> dict: # The error will be handled downstream return connection_dict + def cleanup_temp_files(self) -> None: + """Delete any temporary files created from private_key_raw transformation.""" + temp_files = getattr(self, "_temp_private_key_files", None) + if not temp_files: + return + to_remove = list(temp_files) + for path in to_remove: + try: + Path(path).unlink(missing_ok=True) + except Exception: + # Best-effort cleanup; ignore failures + pass + temp_files.clear() + class LegacyConfigProvider(ConfigProvider): """ @@ -409,7 +432,7 @@ def section_exists(self, *path) -> bool: ) # Source priority levels (higher number = higher priority) - _SOURCE_PRIORITIES = { + _SOURCE_PRIORITIES: Final[dict["ValueSource.SourceName", int]] = { "snowsql_config": 1, "cli_config_toml": 2, "connections_toml": 3, @@ -683,4 +706,23 @@ def reset_config_provider(): Useful for testing and when config source changes. """ global _config_provider_instance + # Cleanup any temp files created by the current provider instance + if _config_provider_instance is not None: + try: + _config_provider_instance.cleanup_temp_files() + except Exception: + pass _config_provider_instance = None + + +def _cleanup_provider_at_exit() -> None: + """Process-exit cleanup for provider-managed temporary files.""" + global _config_provider_instance + if _config_provider_instance is not None: + try: + _config_provider_instance.cleanup_temp_files() + except Exception: + pass + + +atexit.register(_cleanup_provider_at_exit) diff --git a/tests/config_ng/test_private_key_cleanup.py b/tests/config_ng/test_private_key_cleanup.py new file mode 100644 index 0000000000..99ad720596 --- /dev/null +++ b/tests/config_ng/test_private_key_cleanup.py @@ -0,0 +1,39 @@ +"""Tests for temporary private_key_raw file lifecycle and cleanup.""" + +from pathlib import Path + + +def test_private_key_raw_creates_and_cleans_temp_file(config_ng_setup, tmp_path): + priv_key_content = ( + """-----BEGIN PRIVATE KEY-----\nABC\n-----END PRIVATE KEY-----\n""" + ) + + cli_config = """ + [connections.test] + user = "cli-user" + """ + + env_vars = { + # Provide private_key_raw via env to trigger transformation + "SNOWFLAKE_CONNECTIONS_TEST_PRIVATE_KEY_RAW": priv_key_content, + } + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + from snowflake.cli.api.config_provider import ( + get_config_provider_singleton, + reset_config_provider, + ) + + provider = get_config_provider_singleton() + + conn = get_connection_dict("test") + temp_path = Path(conn["private_key_file"]) # should exist now + assert temp_path.exists() + assert temp_path.read_text() == priv_key_content + + # Reset provider triggers cleanup + reset_config_provider() + + # File should be gone after cleanup + assert not temp_path.exists() From 7b188a5dbdc5dcb0becc99c2205604046e9e6be0 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 16 Oct 2025 16:13:47 +0200 Subject: [PATCH 52/78] SNOW-2306184: config refactor - connection level overwrite for config merging --- src/snowflake/cli/api/config_ng/__init__.py | 6 + src/snowflake/cli/api/config_ng/core.py | 22 + .../cli/api/config_ng/presentation.py | 370 +++++++++ .../cli/api/config_ng/resolution_logger.py | 24 +- src/snowflake/cli/api/config_ng/resolver.py | 723 +++++++----------- src/snowflake/cli/api/config_ng/sources.py | 80 +- src/snowflake/cli/api/config_provider.py | 2 +- tests/config_ng/test_configuration.py | 82 +- .../config_ng/test_connection_replacement.py | 457 +++++++++++ tests/config_ng/test_resolution_logger.py | 4 +- 10 files changed, 1253 insertions(+), 517 deletions(-) create mode 100644 src/snowflake/cli/api/config_ng/presentation.py create mode 100644 tests/config_ng/test_connection_replacement.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index d767e7341b..5ad8fdfce0 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -16,6 +16,8 @@ Enhanced Configuration System - Next Generation (NG) This package implements a simple, extensible configuration system with: +- Two-phase resolution: file sources use connection-level replacement, + overlay sources (env/CLI) use field-level merging - List-order precedence (explicit ordering in source list) - Migration support (SnowCLI and SnowSQL compatibility) - Complete resolution history tracking @@ -26,8 +28,10 @@ ConfigValue, ResolutionEntry, ResolutionHistory, + SourceType, ValueSource, ) +from snowflake.cli.api.config_ng.presentation import ResolutionPresenter from snowflake.cli.api.config_ng.resolution_logger import ( check_value_source, explain_configuration, @@ -71,9 +75,11 @@ "ResolutionEntry", "ResolutionHistory", "ResolutionHistoryTracker", + "ResolutionPresenter", "show_all_resolution_chains", "show_resolution_chain", "SnowSQLConfigFile", "SnowSQLEnvironment", + "SourceType", "ValueSource", ] diff --git a/src/snowflake/cli/api/config_ng/core.py b/src/snowflake/cli/api/config_ng/core.py index 80b764d59f..3377f81dd7 100644 --- a/src/snowflake/cli/api/config_ng/core.py +++ b/src/snowflake/cli/api/config_ng/core.py @@ -26,9 +26,22 @@ from abc import ABC, abstractmethod from dataclasses import dataclass, field from datetime import datetime +from enum import Enum from typing import Any, Callable, Dict, List, Literal, Optional +class SourceType(Enum): + """ + Classification of configuration sources by merging behavior. + + FILE sources use connection-level replacement (later file replaces entire connection). + OVERLAY sources use field-level overlay (add/override individual fields). + """ + + FILE = "file" + OVERLAY = "overlay" + + @dataclass(frozen=True) class ConfigValue: """ @@ -104,6 +117,15 @@ def source_name(self) -> SourceName: """ ... + @property + @abstractmethod + def source_type(self) -> SourceType: + """ + Classification of this source for merging behavior. + FILE sources replace entire connections, OVERLAY sources merge per-field. + """ + ... + @abstractmethod def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ diff --git a/src/snowflake/cli/api/config_ng/presentation.py b/src/snowflake/cli/api/config_ng/presentation.py new file mode 100644 index 0000000000..c626a77e3c --- /dev/null +++ b/src/snowflake/cli/api/config_ng/presentation.py @@ -0,0 +1,370 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Resolution presentation utilities. + +This module handles all formatting, display, and export of configuration +resolution data. It separates presentation concerns from resolution logic. +""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Tuple + +from snowflake.cli.api.console import cli_console +from snowflake.cli.api.output.types import CollectionResult, MessageResult + +if TYPE_CHECKING: + from snowflake.cli.api.config_ng.resolver import ConfigurationResolver + +# Sensitive configuration keys that should be masked when displayed +SENSITIVE_KEYS = { + "password", + "pwd", + "oauth_client_secret", + "token", + "session_token", + "master_token", + "mfa_passcode", + "private_key", # Private key content (not path) + "passphrase", + "secret", +} + +# Keys that contain file paths (paths are OK to display, but not file contents) +PATH_KEYS = { + "private_key_file", + "private_key_path", + "token_file_path", +} + +# Fixed table columns ordered from most important (left) to least (right) +SourceColumn = Literal[ + "params", + "global_envs", + "connections_env", + "snowsql_env", + "connections.toml", + "config.toml", + "snowsql", +] + +TABLE_COLUMNS: Tuple[str, ...] = ( + "key", + "value", + "params", + "global_envs", + "connections_env", + "snowsql_env", + "connections.toml", + "config.toml", + "snowsql", +) + +# Mapping of internal source names to fixed table columns +SOURCE_TO_COLUMN: Dict[str, SourceColumn] = { + "cli_arguments": "params", + "cli_env": "global_envs", + "connection_specific_env": "connections_env", + "snowsql_env": "snowsql_env", + "connections_toml": "connections.toml", + "cli_config_toml": "config.toml", + "snowsql_config": "snowsql", +} + + +def _should_mask_value(key: str) -> bool: + """ + Determine if a configuration value should be masked for security. + + Args: + key: Configuration key name + + Returns: + True if the value should be masked, False if it can be displayed + """ + key_lower = key.lower() + + # Check if it's a path key (paths are OK to display) + if any(path_key in key_lower for path_key in PATH_KEYS): + return False + + # Check if it contains sensitive keywords + return any(sensitive_key in key_lower for sensitive_key in SENSITIVE_KEYS) + + +def _mask_sensitive_value(key: str, value: Any) -> str: + """ + Mask sensitive configuration values for display. + + Args: + key: Configuration key name + value: Value to potentially mask + + Returns: + Masked string if sensitive, otherwise string representation of value + """ + if _should_mask_value(key): + return "****" + return str(value) + + +class ResolutionPresenter: + """ + Handles all presentation, formatting, and export of resolution data. + + This class is responsible for: + - Console output with colors and formatting + - Building CommandResult objects for the output system + - Exporting resolution data to files + - Masking sensitive values in all outputs + """ + + def __init__(self, resolver: ConfigurationResolver): + """ + Initialize presenter with a resolver. + + Args: + resolver: ConfigurationResolver instance to present data from + """ + self._resolver = resolver + + def get_summary(self) -> dict: + """ + Get summary statistics about configuration resolution. + + Returns: + Dictionary with statistics: + - total_keys_resolved + - keys_with_overrides + - keys_using_defaults + - source_usage (how many values each source provided) + - source_wins (how many final values came from each source) + """ + return self._resolver.get_tracker().get_summary() + + def build_sources_table(self, key: Optional[str] = None) -> CollectionResult: + """ + Build a tabular view of configuration sources per key. + + Columns (left to right): key, value, params, env, connections.toml, cli_config.toml, snowsql. + - value: masked final selected value for the key + - presence columns: "+" if a given source provided a value for the key, empty otherwise + + Args: + key: Optional specific key to build table for, or None for all keys + """ + # Ensure history is populated + tracker = self._resolver.get_tracker() + if key is None and not tracker.get_all_histories(): + # Resolve all keys to populate history + self._resolver.resolve() + elif key is not None and tracker.get_history(key) is None: + # Resolve only the specific key + self._resolver.resolve(key=key) + + histories = ( + {key: tracker.get_history(key)} + if key is not None + else tracker.get_all_histories() + ) + + def _row_items(): + for k, history in histories.items(): + if history is None: + continue + # Initialize row with fixed columns + row: Dict[str, Any] = {c: "" for c in TABLE_COLUMNS} + row["key"] = k + + # Final value (masked) + masked_final = _mask_sensitive_value(k, history.final_value) + row["value"] = masked_final + + # Mark presence per source + for entry in history.entries: + source_column = SOURCE_TO_COLUMN.get(entry.config_value.source_name) + if source_column is not None: + row[source_column] = "+" + + # Ensure result preserves the column order + ordered_row = {column: row[column] for column in TABLE_COLUMNS} + yield ordered_row + + return CollectionResult(_row_items()) + + def format_history_message(self, key: Optional[str] = None) -> MessageResult: + """ + Build a masked, human-readable history of merging as a single message. + If key is None, returns concatenated histories for all keys. + + Args: + key: Optional specific key to format, or None for all keys + """ + histories = ( + {key: self._resolver.get_resolution_history(key)} + if key is not None + else self._resolver.get_all_histories() + ) + + if not histories: + return MessageResult("No resolution history available") + + lines = [] + lines.append("Configuration Resolution History") + lines.append("=" * 80) + lines.append("") + + for k in sorted(histories.keys()): + history = histories[k] + if history is None: + continue + + lines.append(f"Key: {k}") + lines.append( + f"Final Value: {_mask_sensitive_value(k, history.final_value)}" + ) + + if history.entries: + lines.append("Resolution Chain:") + for i, entry in enumerate(history.entries, 1): + cv = entry.config_value + status = "SELECTED" if entry.was_used else "overridden" + masked_value = _mask_sensitive_value(cv.key, cv.value) + lines.append(f" {i}. [{status}] {cv.source_name}: {masked_value}") + + if history.default_used: + lines.append(" (default value used)") + + lines.append("") + + return MessageResult("\n".join(lines)) + + def print_resolution_chain(self, key: str) -> None: + """ + Print the resolution chain for a key using cli_console formatting. + Sensitive values (passwords, tokens, etc.) are automatically masked. + + Args: + key: Configuration key + """ + history = self._resolver.get_resolution_history(key) + if not history: + cli_console.warning(f"No resolution history found for key: {key}") + return + + with cli_console.phase( + f"{key} resolution chain ({len(history.entries)} sources):" + ): + for i, entry in enumerate(history.entries, 1): + cv = entry.config_value + status_icon = "✅" if entry.was_used else "❌" + + if entry.was_used: + status_text = "(SELECTED)" + elif entry.overridden_by: + status_text = f"(overridden by {entry.overridden_by})" + else: + status_text = "(not used)" + + # Mask sensitive values + masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_raw = ( + _mask_sensitive_value(cv.key, cv.raw_value) + if cv.raw_value is not None + else None + ) + + # Show raw value if different from parsed value + value_display = f'"{masked_value}"' + if masked_raw is not None and cv.raw_value != cv.value: + value_display = f'"{masked_raw}" → {masked_value}' + + cli_console.step( + f"{i}. {status_icon} {cv.source_name}: {value_display} {status_text}" + ) + + if history.default_used: + masked_default = _mask_sensitive_value(key, history.final_value) + cli_console.step(f"Default value used: {masked_default}") + + def print_all_chains(self) -> None: + """ + Print resolution chains for all keys using cli_console formatting. + Sensitive values (passwords, tokens, etc.) are automatically masked. + """ + histories = self._resolver.get_all_histories() + if not histories: + cli_console.warning("No resolution history available") + return + + with cli_console.phase( + f"Configuration Resolution History ({len(histories)} keys)" + ): + for key in sorted(histories.keys()): + history = histories[key] + cli_console.message( + f"\n{key} resolution chain ({len(history.entries)} sources):" + ) + with cli_console.indented(): + for i, entry in enumerate(history.entries, 1): + cv = entry.config_value + status_icon = "✅" if entry.was_used else "❌" + + if entry.was_used: + status_text = "(SELECTED)" + elif entry.overridden_by: + status_text = f"(overridden by {entry.overridden_by})" + else: + status_text = "(not used)" + + # Mask sensitive values + masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_raw = ( + _mask_sensitive_value(cv.key, cv.raw_value) + if cv.raw_value is not None + else None + ) + + # Show raw value if different from parsed value + value_display = f'"{masked_value}"' + if masked_raw is not None and cv.raw_value != cv.value: + value_display = f'"{masked_raw}" → {masked_value}' + + cli_console.step( + f"{i}. {status_icon} {cv.source_name}: {value_display} {status_text}" + ) + + if history.default_used: + masked_default = _mask_sensitive_value(key, history.final_value) + cli_console.step(f"Default value used: {masked_default}") + + def export_history(self, filepath: Path) -> None: + """ + Export resolution history to JSON file. + + Args: + filepath: Path to output file + """ + histories = self._resolver.get_all_histories() + data = { + "summary": self.get_summary(), + "histories": {key: history.to_dict() for key, history in histories.items()}, + } + + with open(filepath, "w") as f: + json.dump(data, f, indent=2) diff --git a/src/snowflake/cli/api/config_ng/resolution_logger.py b/src/snowflake/cli/api/config_ng/resolution_logger.py index ed05ee40dc..4af8c73678 100644 --- a/src/snowflake/cli/api/config_ng/resolution_logger.py +++ b/src/snowflake/cli/api/config_ng/resolution_logger.py @@ -25,6 +25,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Dict, Optional +from snowflake.cli.api.config_ng.presentation import ResolutionPresenter from snowflake.cli.api.config_provider import ( ALTERNATIVE_CONFIG_ENV_VAR, AlternativeConfigProvider, @@ -98,7 +99,8 @@ def show_resolution_chain(key: str) -> None: ) return - resolver.print_resolution_chain(key) + presenter = ResolutionPresenter(resolver) + presenter.print_resolution_chain(key) def show_all_resolution_chains() -> None: @@ -124,7 +126,8 @@ def show_all_resolution_chains() -> None: ) return - resolver.print_all_chains() + presenter = ResolutionPresenter(resolver) + presenter.print_all_chains() def get_resolution_summary() -> Optional[Dict]: @@ -153,7 +156,8 @@ def get_resolution_summary() -> Optional[Dict]: if resolver is None: return None - return resolver.get_history_summary() + presenter = ResolutionPresenter(resolver) + return presenter.get_summary() def export_resolution_history(output_path: Path) -> bool: @@ -188,7 +192,8 @@ def export_resolution_history(output_path: Path) -> bool: return False try: - resolver.export_history(output_path) + presenter = ResolutionPresenter(resolver) + presenter.export_history(output_path) cli_console.message(f"✅ Resolution history exported to: {output_path}") return True except Exception as e: @@ -290,6 +295,8 @@ def explain_configuration(key: Optional[str] = None, verbose: bool = False) -> N ) return + presenter = ResolutionPresenter(resolver) + if key: # Explain specific key with cli_console.phase(f"Configuration Resolution: {key}"): @@ -300,7 +307,7 @@ def explain_configuration(key: Optional[str] = None, verbose: bool = False) -> N cli_console.message("No value found for this key") if verbose: - resolver.print_resolution_chain(key) + presenter.print_resolution_chain(key) else: # Explain all configuration with cli_console.phase("Complete Configuration Resolution"): @@ -309,7 +316,7 @@ def explain_configuration(key: Optional[str] = None, verbose: bool = False) -> N cli_console.message(summary_text) if verbose: - resolver.print_all_chains() + presenter.print_all_chains() def get_configuration_explanation_results( @@ -336,9 +343,10 @@ def get_configuration_explanation_results( f"Set {ALTERNATIVE_CONFIG_ENV_VAR}=true to enable it." ) - table_result: CollectionResult = resolver.build_sources_table(key) + presenter = ResolutionPresenter(resolver) + table_result: CollectionResult = presenter.build_sources_table(key) if not verbose: return table_result - history_message: MessageResult = resolver.format_history_message(key) + history_message: MessageResult = presenter.format_history_message(key) return MultipleResults([table_result, history_message]) diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index 5e83c3d887..2f690263a1 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -22,117 +22,23 @@ from __future__ import annotations -import json import logging from collections import defaultdict from datetime import datetime -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple from snowflake.cli.api.config_ng.core import ( ConfigValue, ResolutionEntry, ResolutionHistory, + SourceType, ) -from snowflake.cli.api.console import cli_console -from snowflake.cli.api.output.types import CollectionResult, MessageResult if TYPE_CHECKING: from snowflake.cli.api.config_ng.core import ValueSource log = logging.getLogger(__name__) -# Sensitive configuration keys that should be masked when displayed -SENSITIVE_KEYS = { - "password", - "pwd", - "oauth_client_secret", - "token", - "session_token", - "master_token", - "mfa_passcode", - "private_key", # Private key content (not path) - "passphrase", - "secret", -} - -# Keys that contain file paths (paths are OK to display, but not file contents) -PATH_KEYS = { - "private_key_file", - "private_key_path", - "token_file_path", -} - -# Fixed table columns ordered from most important (left) to least (right) -SourceColumn = Literal[ - "params", - "global_envs", - "connections_env", - "snowsql_env", - "connections.toml", - "config.toml", - "snowsql", -] - -TABLE_COLUMNS: Tuple[str, ...] = ( - "key", - "value", - "params", - "global_envs", - "connections_env", - "snowsql_env", - "connections.toml", - "config.toml", - "snowsql", -) - -# Mapping of internal source names to fixed table columns -SOURCE_TO_COLUMN: Dict[str, SourceColumn] = { - "cli_arguments": "params", - "cli_env": "global_envs", - "connection_specific_env": "connections_env", - "snowsql_env": "snowsql_env", - "connections_toml": "connections.toml", - "cli_config_toml": "config.toml", - "snowsql_config": "snowsql", -} - - -def _should_mask_value(key: str) -> bool: - """ - Determine if a configuration value should be masked for security. - - Args: - key: Configuration key name - - Returns: - True if the value should be masked, False if it can be displayed - """ - key_lower = key.lower() - - # Check if it's a path key (paths are OK to display) - if any(path_key in key_lower for path_key in PATH_KEYS): - return False - - # Check if it contains sensitive keywords - return any(sensitive_key in key_lower for sensitive_key in SENSITIVE_KEYS) - - -def _mask_sensitive_value(key: str, value: Any) -> str: - """ - Mask sensitive configuration values for display. - - Args: - key: Configuration key name - value: Configuration value - - Returns: - Masked representation of the value - """ - if _should_mask_value(key): - return "****" - return str(value) - class ResolutionHistoryTracker: """ @@ -300,42 +206,44 @@ def get_summary(self) -> dict: class ConfigurationResolver: """ - Orchestrates configuration sources with full resolution history tracking. + Orchestrates configuration sources with resolution history tracking. This is the main entry point for configuration resolution. It: - Manages multiple configuration sources in precedence order - Applies precedence rules based on source list order - Tracks complete resolution history - - Provides debugging and export utilities Sources should be provided in precedence order (lowest to highest priority). Later sources in the list override earlier sources. + For presentation/formatting of resolution data, use ResolutionPresenter + from the presentation module. + Example: + from snowflake.cli.api.config_ng import ConfigurationResolver + from snowflake.cli.api.config_ng.presentation import ResolutionPresenter + resolver = ConfigurationResolver( sources=[ snowsql_config, # Lowest priority cli_config, env_source, cli_arguments, # Highest priority - ], - track_history=True + ] ) # Resolve all configuration config = resolver.resolve() - # Debug: where did 'account' come from? - resolver.print_resolution_chain("account") - - # Export for support - resolver.export_history(Path("debug_config.json")) + # For debugging/presentation, use the presenter + presenter = ResolutionPresenter(resolver) + presenter.print_resolution_chain("account") + presenter.export_history(Path("debug_config.json")) """ def __init__( self, sources: Optional[List["ValueSource"]] = None, - track_history: bool = True, ): """ Initialize resolver with sources and history tracking. @@ -343,14 +251,10 @@ def __init__( Args: sources: List of configuration sources in precedence order (first = lowest priority, last = highest priority) - track_history: Enable resolution history tracking (default: True) """ self._sources = sources or [] self._history_tracker = ResolutionHistoryTracker() - if not track_history: - self._history_tracker.disable() - def add_source(self, source: "ValueSource") -> None: """ Add a configuration source to the end of the list (highest priority). @@ -364,432 +268,355 @@ def get_sources(self) -> List["ValueSource"]: """Get list of all sources in precedence order (for inspection).""" return self._sources.copy() - def enable_history(self) -> None: - """Enable resolution history tracking.""" - self._history_tracker.enable() - - def disable_history(self) -> None: - """Disable history tracking (for performance).""" - self._history_tracker.disable() - - def clear_history(self) -> None: - """Clear all resolution history.""" - self._history_tracker.clear() - - def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, Any]: + def _parse_connection_key(self, key: str) -> Optional[Tuple[str, str]]: """ - Resolve configuration values from all sources with history tracking. - - Resolution Process: - 1. Iterate sources in order (lowest to highest priority) - 2. Record all discovered values in history - 3. For connection keys (connections.{name}.{param}): - - If connections.toml defines a connection, it REPLACES cli_config_toml only - - SnowSQL config, environment vars, and CLI parameters still override - 4. For flat keys: later sources overwrite earlier sources - 5. Mark which value was selected - 6. Return final resolved values + Parse a connection key into (connection_name, parameter). Args: - key: Specific key to resolve (None = all keys) - default: Default value if key not found + key: Configuration key (e.g., "connections.prod.account") Returns: - Dictionary of resolved values (key -> value) + Tuple of (connection_name, parameter) or None if not a connection key """ - all_values: Dict[str, ConfigValue] = {} - # Track connection values separately for intelligent merging - connections: Dict[str, Dict[str, ConfigValue]] = defaultdict(dict) + if not key.startswith("connections."): + return None - # Identify sources that connections.toml replaces - # connections.toml only replaces cli_config_toml, not SnowSQL config - cli_config_source = "cli_config_toml" - connections_to_replace: set[str] = set() + parts = key.split(".", 2) + if len(parts) != 3: + return None - # First pass: find connections.toml and identify connections to replace - for source in self._sources: - if hasattr(source, "is_connections_file") and source.is_connections_file: - connections_to_replace = source.get_defined_connections() - break + return parts[1], parts[2] # (conn_name, param) - # Process sources in order (first = lowest priority, last = highest) - for source in self._sources: - try: - source_values = source.discover(key) + def _get_sources_by_type(self, source_type: SourceType) -> List["ValueSource"]: + """ + Get all sources matching the specified type. - # Record discoveries in history - for k, config_value in source_values.items(): - self._history_tracker.record_discovery(k, config_value) + Args: + source_type: Type of source to filter by - # Separate connection keys from flat keys - for k, config_value in source_values.items(): - if k.startswith("connections."): - # Parse: connections.{name}.{param} - parts = k.split(".", 2) - if len(parts) == 3: - conn_name = parts[1] - param = parts[2] - param_key = f"connections.{conn_name}.{param}" - - # Replacement logic: Skip cli_config_toml if connection is in connections.toml - # SnowSQL config is NOT replaced by connections.toml - is_cli_config = source.source_name == cli_config_source - connection_in_connections_toml = ( - conn_name in connections_to_replace - ) - - if is_cli_config and connection_in_connections_toml: - # Skip this value - connections.toml replaces cli_config_toml - log.debug( - "Skipping %s from %s (replaced by connections.toml)", - param_key, - source.source_name, - ) - continue - - # Merge at parameter level: later source overwrites/extends - connections[conn_name][param_key] = config_value - else: - # Flat key: later source overwrites - all_values[k] = config_value + Returns: + List of sources matching the type + """ + return [s for s in self._sources if s.source_type is source_type] - except Exception as e: - log.warning("Error from source %s: %s", source.source_name, e) + def _record_discoveries(self, source_values: Dict[str, ConfigValue]) -> None: + """ + Record all discovered values in history tracker. - # Flatten connection data back into all_values - for conn_name, conn_params in connections.items(): - all_values.update(conn_params) + Args: + source_values: Dictionary of discovered configuration values + """ + for k, config_value in source_values.items(): + self._history_tracker.record_discovery(k, config_value) - # Mark which values were selected in history + def _finalize_history(self, all_values: Dict[str, ConfigValue]) -> None: + """ + Mark which values were selected in resolution history. + + Args: + all_values: Final dictionary of selected configuration values + """ for k, config_value in all_values.items(): self._history_tracker.mark_selected(k, config_value.source_name) - # Convert ConfigValue objects to plain values - resolved = {k: v.value for k, v in all_values.items()} - - # Handle default for specific key - if key is not None and key not in resolved: - if default is not None: - resolved[key] = default - self._history_tracker.mark_default_used(key, default) - - return resolved - - def resolve_value(self, key: str, default: Any = None) -> Any: + def _apply_default( + self, resolved: Dict[str, Any], key: str, default: Any + ) -> Dict[str, Any]: """ - Resolve a single configuration value. + Apply default value for a specific key if provided. Args: + resolved: Current resolved configuration dictionary key: Configuration key - default: Default value if not found + default: Default value to apply Returns: - Resolved value or default + Updated resolved dictionary """ - resolved = self.resolve(key=key, default=default) - return resolved.get(key, default) + if default is not None: + resolved[key] = default + self._history_tracker.mark_default_used(key, default) + return resolved - def get_value_metadata(self, key: str) -> Optional[ConfigValue]: + def _group_by_connection( + self, source_values: Dict[str, ConfigValue] + ) -> Tuple[Dict[str, Dict[str, ConfigValue]], set[str]]: """ - Get metadata for the selected value. + Group connection parameters by connection name. Args: - key: Configuration key + source_values: All values discovered from a source Returns: - ConfigValue for the selected value, or None if not found + Tuple of (per_conn, empty_connections): + - per_conn: Dict mapping connection name to its ConfigValue parameters + - empty_connections: Set of connection names that are empty """ - history = self._history_tracker.get_history(key) - if history and history.selected_entry: - return history.selected_entry.config_value + per_conn: Dict[str, Dict[str, ConfigValue]] = defaultdict(dict) + empty_connections: set[str] = set() - # Fallback to live query if history not available - for source in self._sources: - values = source.discover(key) - if key in values: - return values[key] + for k, config_value in source_values.items(): + parsed = self._parse_connection_key(k) + if parsed is None: + continue - return None + conn_name, param = parsed - def get_resolution_history(self, key: str) -> Optional[ResolutionHistory]: + # Track empty connection markers + if param == "_empty_connection": + empty_connections.add(conn_name) + else: + per_conn[conn_name][k] = config_value + + return per_conn, empty_connections + + def _extract_flat_values( + self, source_values: Dict[str, ConfigValue] + ) -> Dict[str, ConfigValue]: """ - Get complete resolution history for a key. + Extract non-connection (flat) configuration values. Args: - key: Configuration key + source_values: All values discovered from a source Returns: - ResolutionHistory showing the full precedence chain + Dictionary of flat configuration values (non-connection keys) """ - return self._history_tracker.get_history(key) - - def get_all_histories(self) -> Dict[str, ResolutionHistory]: - """Get resolution histories for all keys.""" - return self._history_tracker.get_all_histories() + return { + k: v for k, v in source_values.items() if not k.startswith("connections.") + } - def get_history_summary(self) -> dict: + def _replace_connections( + self, + file_connections: Dict[str, Dict[str, ConfigValue]], + per_conn: Dict[str, Dict[str, ConfigValue]], + empty_connections: set[str], + source: "ValueSource", + ) -> None: """ - Get summary statistics about configuration resolution. - - Returns: - Dictionary with statistics: - - total_keys_resolved - - keys_with_overrides - - keys_using_defaults - - source_usage (how many values each source provided) - - source_wins (how many final values came from each source) - """ - return self._history_tracker.get_summary() - - def build_sources_table(self, key: Optional[str] = None) -> CollectionResult: - """ - Build a tabular view of configuration sources per key. - - Columns (left to right): key, value, params, env, connections.toml, cli_config.toml, snowsql. - - value: masked final selected value for the key - - presence columns: "+" if a given source provided a value for the key, empty otherwise - """ - # Ensure history is populated - if key is None and not self._history_tracker.get_all_histories(): - # Resolve all keys to populate history - self.resolve() - elif key is not None and self._history_tracker.get_history(key) is None: - # Resolve only the specific key - self.resolve(key=key) - - histories = ( - {key: self._history_tracker.get_history(key)} - if key is not None - else self._history_tracker.get_all_histories() - ) + Replace entire connections with new definitions from source. - def _row_items(): - for k, history in histories.items(): - if history is None: - continue - # Initialize row with fixed columns - row: Dict[str, Any] = {c: "" for c in TABLE_COLUMNS} - row["key"] = k + This implements connection-level replacement: when a FILE source defines + a connection, it completely replaces any previous definition. - # Final value (masked) - masked_final = _mask_sensitive_value(k, history.final_value) - row["value"] = masked_final + Args: + file_connections: Accumulator for all file-based connections + per_conn: New connection definitions from current source + empty_connections: Set of empty connection names from current source + source: The source providing these connections + """ + all_conn_names = set(per_conn.keys()) | empty_connections + + for conn_name in all_conn_names: + conn_params = per_conn.get(conn_name, {}) + log.debug( + "Connection %s replaced by file source %s (%d params)", + conn_name, + source.source_name, + len(conn_params), + ) + file_connections[conn_name] = conn_params - # Mark presence per source - for entry in history.entries: - source_column = SOURCE_TO_COLUMN.get(entry.config_value.source_name) - if source_column is not None: - row[source_column] = "+" + def _resolve_file_sources( + self, key: Optional[str] + ) -> Tuple[Dict[str, Dict[str, ConfigValue]], Dict[str, ConfigValue]]: + """ + Process all FILE sources with connection-level replacement semantics. - # Ensure result preserves the column order - ordered_row = {column: row[column] for column in TABLE_COLUMNS} - yield ordered_row + FILE sources replace entire connections rather than merging fields. + Later FILE sources override earlier ones completely. - return CollectionResult(_row_items()) + Args: + key: Specific key to resolve (None = all keys) - def format_history_message(self, key: Optional[str] = None) -> MessageResult: - """ - Build a masked, human-readable history of merging as a single message. - If key is None, returns concatenated histories for all keys. + Returns: + Tuple of (file_connections, file_flat_values): + - file_connections: Dict mapping connection name to its parameters + - file_flat_values: Dict of flat configuration keys """ - histories = ( - {key: self.get_resolution_history(key)} - if key is not None - else self.get_all_histories() - ) + file_connections: Dict[str, Dict[str, ConfigValue]] = defaultdict(dict) + file_flat_values: Dict[str, ConfigValue] = {} - if not histories: - return MessageResult("No resolution history available") + for source in self._get_sources_by_type(SourceType.FILE): + try: + source_values = source.discover(key) + self._record_discoveries(source_values) - lines: List[str] = [] - for k in sorted(histories.keys()): - history = histories[k] - if history is None: - continue - lines.append(f"{k} resolution chain ({len(history.entries)} sources):") - for i, entry in enumerate(history.entries, 1): - cv = entry.config_value - status_text = ( - "(SELECTED)" - if entry.was_used - else ( - f"(overridden by {entry.overridden_by})" - if entry.overridden_by - else "(not used)" - ) - ) + # Process this source's values + per_conn, empty_conns = self._group_by_connection(source_values) + flat_values = self._extract_flat_values(source_values) - masked_value = _mask_sensitive_value(cv.key, cv.value) - masked_raw = ( - _mask_sensitive_value(cv.key, cv.raw_value) - if cv.raw_value is not None - else None + # Replace connections (entire connection replacement) + self._replace_connections( + file_connections, per_conn, empty_conns, source ) - value_display = f'"{masked_value}"' - if masked_raw is not None and cv.raw_value != cv.value: - value_display = f'"{masked_raw}" → {masked_value}' - lines.append(f" {i}. {cv.source_name}: {value_display} {status_text}") + # Update flat values + file_flat_values.update(flat_values) + + except Exception as e: + log.warning("Error from source %s: %s", source.source_name, e) + + return file_connections, file_flat_values - if history.default_used: - masked_default = _mask_sensitive_value(k, history.final_value) - lines.append(f" Default value used: {masked_default}") + def _merge_file_results( + self, + file_connections: Dict[str, Dict[str, ConfigValue]], + file_flat_values: Dict[str, ConfigValue], + ) -> Dict[str, ConfigValue]: + """ + Merge file connections and flat values into single dictionary. - lines.append("") + Args: + file_connections: Connection parameters from file sources + file_flat_values: Flat configuration values from file sources - return MessageResult("\n".join(lines).rstrip()) + Returns: + Merged dictionary of all file-based configuration values + """ + all_values: Dict[str, ConfigValue] = {} + + # Add all connection parameters + for conn_params in file_connections.values(): + all_values.update(conn_params) - def format_resolution_chain(self, key: str) -> str: + # Add flat values + all_values.update(file_flat_values) + + return all_values + + def _apply_overlay_sources( + self, all_values: Dict[str, ConfigValue], key: Optional[str] + ) -> Dict[str, ConfigValue]: """ - Format the resolution chain for a key (debugging helper). + Apply OVERLAY sources with field-level overlay semantics. + + OVERLAY sources (env vars, CLI args) add or override individual fields + without replacing entire connections. Args: - key: Configuration key + all_values: Current configuration values (typically from file sources) + key: Specific key to resolve (None = all keys) Returns: - Formatted resolution chain as a string + Updated dictionary with overlay values applied """ - history = self.get_resolution_history(key) - if history: - return history.format_chain() - return f"No resolution history found for key: {key}" + for source in self._get_sources_by_type(SourceType.OVERLAY): + try: + source_values = source.discover(key) + + # Record and apply overlays (field-level) + for k, config_value in source_values.items(): + self._history_tracker.record_discovery(k, config_value) + all_values[k] = config_value - def format_all_chains(self) -> str: + except Exception as e: + log.warning("Error from source %s: %s", source.source_name, e) + + return all_values + + def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, Any]: """ - Format resolution chains for all keys (debugging helper). + Resolve configuration values from all sources with history tracking. + + Resolution Process (Two-Phase): + + Phase A - File Sources (Connection-Level Replacement): + 1. Process FILE sources in precedence order (lowest to highest priority) + 2. For each connection, later FILE sources completely REPLACE earlier ones + 3. Fields from earlier file sources are NOT inherited + + Phase B - Overlay Sources (Field-Level Overlay): + 4. Start with the file-derived connection snapshot + 5. Process OVERLAY sources (env vars, CLI args) in precedence order + 6. These add/override individual fields without replacing entire connections + 7. For flat keys: later sources overwrite earlier sources + + Args: + key: Specific key to resolve (None = all keys) + default: Default value if key not found Returns: - Formatted resolution chains as a string + Dictionary of resolved values (key -> value) """ - histories = self.get_all_histories() - if not histories: - return "No resolution history available" + # Phase A: Process FILE sources (connection-level replacement) + file_connections, file_flat_values = self._resolve_file_sources(key) + + # Start with file-derived snapshot + all_values = self._merge_file_results(file_connections, file_flat_values) - lines = [ - f"\n{'=' * 80}", - f"Configuration Resolution History ({len(histories)} keys)", - f"{'=' * 80}\n", - ] + # Phase B: Process OVERLAY sources (field-level overlay) + all_values = self._apply_overlay_sources(all_values, key) - for key in sorted(histories.keys()): - lines.append(histories[key].format_chain()) - lines.append("") + # Mark selected values in history + self._finalize_history(all_values) - return "\n".join(lines) + # Convert ConfigValue objects to plain values + resolved = {k: v.value for k, v in all_values.items()} - def print_resolution_chain(self, key: str) -> None: + # Handle default for specific key + if key is not None and key not in resolved: + resolved = self._apply_default(resolved, key, default) + + return resolved + + def resolve_value(self, key: str, default: Any = None) -> Any: """ - Print the resolution chain for a key using cli_console formatting. - Sensitive values (passwords, tokens, etc.) are automatically masked. + Resolve a single configuration value. Args: key: Configuration key + default: Default value if not found + + Returns: + Resolved value or default """ - history = self.get_resolution_history(key) - if not history: - cli_console.warning(f"No resolution history found for key: {key}") - return + resolved = self.resolve(key=key, default=default) + return resolved.get(key, default) - with cli_console.phase( - f"{key} resolution chain ({len(history.entries)} sources):" - ): - for i, entry in enumerate(history.entries, 1): - cv = entry.config_value - status_icon = "✅" if entry.was_used else "❌" + def get_value_metadata(self, key: str) -> Optional[ConfigValue]: + """ + Get metadata for the selected value. - if entry.was_used: - status_text = "(SELECTED)" - elif entry.overridden_by: - status_text = f"(overridden by {entry.overridden_by})" - else: - status_text = "(not used)" - - # Mask sensitive values - masked_value = _mask_sensitive_value(cv.key, cv.value) - masked_raw = ( - _mask_sensitive_value(cv.key, cv.raw_value) - if cv.raw_value is not None - else None - ) + Args: + key: Configuration key - # Show raw value if different from parsed value - value_display = f'"{masked_value}"' - if masked_raw is not None and cv.raw_value != cv.value: - value_display = f'"{masked_raw}" → {masked_value}' + Returns: + ConfigValue for the selected value, or None if not found + """ + history = self._history_tracker.get_history(key) + if history and history.selected_entry: + return history.selected_entry.config_value - cli_console.step( - f"{i}. {status_icon} {cv.source_name}: {value_display} {status_text}" - ) + # Fallback to live query if history not available + for source in self._sources: + values = source.discover(key) + if key in values: + return values[key] - if history.default_used: - masked_default = _mask_sensitive_value(key, history.final_value) - cli_console.step(f"Default value used: {masked_default}") + return None - def print_all_chains(self) -> None: + def get_tracker(self) -> ResolutionHistoryTracker: """ - Print resolution chains for all keys using cli_console formatting. - Sensitive values (passwords, tokens, etc.) are automatically masked. + Get the history tracker for direct access to resolution data. + + Returns: + ResolutionHistoryTracker instance """ - histories = self.get_all_histories() - if not histories: - cli_console.warning("No resolution history available") - return + return self._history_tracker - with cli_console.phase( - f"Configuration Resolution History ({len(histories)} keys)" - ): - for key in sorted(histories.keys()): - history = histories[key] - cli_console.message( - f"\n{key} resolution chain ({len(history.entries)} sources):" - ) - with cli_console.indented(): - for i, entry in enumerate(history.entries, 1): - cv = entry.config_value - status_icon = "✅" if entry.was_used else "❌" - - if entry.was_used: - status_text = "(SELECTED)" - elif entry.overridden_by: - status_text = f"(overridden by {entry.overridden_by})" - else: - status_text = "(not used)" - - # Mask sensitive values - masked_value = _mask_sensitive_value(cv.key, cv.value) - masked_raw = ( - _mask_sensitive_value(cv.key, cv.raw_value) - if cv.raw_value is not None - else None - ) - - # Show raw value if different from parsed value - value_display = f'"{masked_value}"' - if masked_raw is not None and cv.raw_value != cv.value: - value_display = f'"{masked_raw}" → {masked_value}' - - cli_console.step( - f"{i}. {status_icon} {cv.source_name}: {value_display} {status_text}" - ) - - if history.default_used: - masked_default = _mask_sensitive_value(key, history.final_value) - cli_console.step(f"Default value used: {masked_default}") - - def export_history(self, filepath: Path) -> None: - """ - Export resolution history to JSON file. + def get_resolution_history(self, key: str) -> Optional[ResolutionHistory]: + """ + Get complete resolution history for a key. Args: - filepath: Path to output file - """ - histories = self.get_all_histories() - data = { - "summary": self.get_history_summary(), - "histories": {key: history.to_dict() for key, history in histories.items()}, - } + key: Configuration key - with open(filepath, "w") as f: - json.dump(data, f, indent=2) + Returns: + ResolutionHistory showing the full precedence chain + """ + return self._history_tracker.get_history(key) - log.info("Resolution history exported to %s", filepath) + def get_all_histories(self) -> Dict[str, ResolutionHistory]: + """Get resolution histories for all keys.""" + return self._history_tracker.get_all_histories() diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 95b47bc3d0..99c752b6d2 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -34,7 +34,7 @@ from pathlib import Path from typing import Any, Dict, Final, Optional -from snowflake.cli.api.config_ng.core import ConfigValue, ValueSource +from snowflake.cli.api.config_ng.core import ConfigValue, SourceType, ValueSource log = logging.getLogger(__name__) @@ -120,6 +120,10 @@ def __init__(self): def source_name(self) -> "ValueSource.SourceName": return "snowsql_config" + @property + def source_type(self) -> SourceType: + return SourceType.FILE + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Read and MERGE all SnowSQL config files. @@ -228,6 +232,10 @@ def __init__(self): def source_name(self) -> "ValueSource.SourceName": return "cli_config_toml" + @property + def source_type(self) -> SourceType: + return SourceType.FILE + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Find FIRST existing config file and use it (CLI behavior). @@ -312,6 +320,10 @@ def __init__(self): def source_name(self) -> "ValueSource.SourceName": return "connections_toml" + @property + def source_type(self) -> SourceType: + return SourceType.FILE + @property def is_connections_file(self) -> bool: """Mark this as the dedicated connections file source.""" @@ -375,30 +387,52 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: for section_name, section_data in data.items(): if isinstance(section_data, dict) and section_name != "connections": # This is a direct connection section like [default] - for param_key, param_value in section_data.items(): - full_key = f"connections.{section_name}.{param_key}" - if key is None or full_key == key: - result[full_key] = ConfigValue( - key=full_key, - value=param_value, + if not section_data: + # Empty connection section + marker_key = f"connections.{section_name}._empty_connection" + if key is None or marker_key == key: + result[marker_key] = ConfigValue( + key=marker_key, + value=True, source_name=self.source_name, - raw_value=param_value, + raw_value=True, ) + else: + for param_key, param_value in section_data.items(): + full_key = f"connections.{section_name}.{param_key}" + if key is None or full_key == key: + result[full_key] = ConfigValue( + key=full_key, + value=param_value, + source_name=self.source_name, + raw_value=param_value, + ) # Check for nested [connections] section format connections_section = data.get("connections", {}) if isinstance(connections_section, dict): for conn_name, conn_data in connections_section.items(): if isinstance(conn_data, dict): - for param_key, param_value in conn_data.items(): - full_key = f"connections.{conn_name}.{param_key}" - if key is None or full_key == key: - result[full_key] = ConfigValue( - key=full_key, - value=param_value, + if not conn_data: + # Empty connection section + marker_key = f"connections.{conn_name}._empty_connection" + if key is None or marker_key == key: + result[marker_key] = ConfigValue( + key=marker_key, + value=True, source_name=self.source_name, - raw_value=param_value, + raw_value=True, ) + else: + for param_key, param_value in conn_data.items(): + full_key = f"connections.{conn_name}.{param_key}" + if key is None or full_key == key: + result[full_key] = ConfigValue( + key=full_key, + value=param_value, + source_name=self.source_name, + raw_value=param_value, + ) return result @@ -451,6 +485,10 @@ class SnowSQLEnvironment(ValueSource): def source_name(self) -> "ValueSource.SourceName": return "snowsql_env" + @property + def source_type(self) -> SourceType: + return SourceType.OVERLAY + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Discover SNOWSQL_* environment variables. @@ -536,6 +574,10 @@ class ConnectionSpecificEnvironment(ValueSource): def source_name(self) -> "ValueSource.SourceName": return "connection_specific_env" + @property + def source_type(self) -> SourceType: + return SourceType.OVERLAY + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Discover SNOWFLAKE_CONNECTIONS_* environment variables. @@ -611,6 +653,10 @@ class CliEnvironment(ValueSource): def source_name(self) -> "ValueSource.SourceName": return "cli_env" + @property + def source_type(self) -> SourceType: + return SourceType.OVERLAY + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Discover general SNOWFLAKE_* environment variables. @@ -680,6 +726,10 @@ def __init__(self, cli_context: Optional[Dict[str, Any]] = None): def source_name(self) -> "ValueSource.SourceName": return "cli_arguments" + @property + def source_type(self) -> SourceType: + return SourceType.OVERLAY + def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: """ Extract non-None values from CLI context. diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 321630222c..8215d1605c 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -288,7 +288,7 @@ def _ensure_initialized(self) -> None: ] # Create resolver with all sources in order - self._resolver = ConfigurationResolver(sources=sources, track_history=True) + self._resolver = ConfigurationResolver(sources=sources) self._initialized = True diff --git a/tests/config_ng/test_configuration.py b/tests/config_ng/test_configuration.py index 976a2fd341..ad4d6c953f 100644 --- a/tests/config_ng/test_configuration.py +++ b/tests/config_ng/test_configuration.py @@ -198,8 +198,9 @@ def test_complete_7_level_chain(config_ng_setup): # Level 2 (cli_config) is skipped because connections.toml defines this connection # password from cli_config is NOT present - # Level 1 provides user - assert conn["user"] == "level1-user" + # user NOT in connection - connections_toml (level 3) replaced entire connection + # and didn't include user field + assert "user" not in conn def test_get_connection_dict_uses_config_ng_when_enabled(config_ng_setup): @@ -443,10 +444,9 @@ def test_all_file_sources_precedence(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "from-connections", # Level 3 wins - "user": "snowsql-user", # Level 1 only (cli_config skipped) - "warehouse": "snowsql-warehouse", # Level 1 only source - "password": "connections-pass", # Level 3 wins + "account": "from-connections", # connections_toml (Level 3) wins + # user and warehouse NOT inherited - connections_toml replaced entire connection + "password": "connections-pass", # From connections_toml (FILE) } assert conn == expected @@ -502,9 +502,9 @@ def test_file_and_env_mix_with_gaps(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "env-account", # Level 6 wins - "user": "snowsql-user", # Level 1 only source - "warehouse": "toml-warehouse", # Level 3 only source + "account": "env-account", # Level 6 OVERLAY wins + # user NOT inherited - connections_toml replaced entire snowsql connection + "warehouse": "toml-warehouse", # From connections_toml (FILE) } assert conn == expected @@ -573,10 +573,9 @@ def test_all_files_plus_snowsql_env(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "env-account", # Level 4 wins - "user": "snowsql-user", # Level 1 only - # warehouse from cli_config is skipped (connections.toml replaces cli_config) - "database": "toml-db", # Level 3 only + "account": "env-account", # Level 4 OVERLAY wins + # user NOT inherited - connections_toml replaced entire connection chain + "database": "toml-db", # From connections_toml (FILE) } assert conn == expected @@ -617,10 +616,10 @@ def test_all_files_plus_general_env(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "env-account", # Level 6 wins - "user": "snowsql-user", # Level 1 only + "account": "env-account", # Level 6 OVERLAY wins + # user NOT inherited - connections_toml replaced entire connection chain # role from cli_config is skipped (connections.toml replaces cli_config) - "warehouse": "env-warehouse", # Level 6 wins + "warehouse": "env-warehouse", # Level 6 OVERLAY wins } assert conn == expected @@ -688,11 +687,11 @@ def test_two_files_two_envs_with_gap(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "general-env", # Level 6 wins - "user": "snowsql-user", # Level 1 only - "warehouse": "toml-warehouse", # Level 3 only - "database": "conn-db", # Level 5 only - "schema": "general-schema", # Level 6 only + "account": "general-env", # Level 6 OVERLAY wins + # user NOT inherited - connections_toml replaced entire connection + "warehouse": "toml-warehouse", # From connections_toml (FILE) + "database": "conn-db", # Level 5 OVERLAY + "schema": "general-schema", # Level 6 OVERLAY } assert conn == expected @@ -737,10 +736,9 @@ def test_all_files_plus_two_env_types(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "conn-specific", # Level 5 wins - "user": "snowsql-user", # Level 1 only - # password from cli_config is skipped (connections.toml replaces cli_config) - "warehouse": "conn-warehouse", # Level 5 wins + "account": "conn-specific", # Level 5 OVERLAY wins + # user NOT inherited - connections_toml replaced entire connection + "warehouse": "conn-warehouse", # Level 5 OVERLAY (on top of toml) } assert conn == expected @@ -778,12 +776,12 @@ def test_two_files_all_envs(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "general-env", # Level 6 wins - "user": "snowsql-user", # Level 1 only - "password": "cli-password", # Level 2 only - "database": "snowsql-db", # Level 4 only - "role": "conn-role", # Level 5 only - "warehouse": "general-warehouse", # Level 6 only + "account": "general-env", # Level 6 OVERLAY wins + # user NOT inherited - cli_config (level 2) replaced snowsql connection + "password": "cli-password", # From cli_config (FILE) + "database": "snowsql-db", # Level 4 OVERLAY + "role": "conn-role", # Level 5 OVERLAY + "warehouse": "general-warehouse", # Level 6 OVERLAY } assert conn == expected @@ -856,12 +854,12 @@ def test_snowsql_and_connections_with_all_envs(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "general-env", # Level 6 wins - "user": "snowsql-user", # Level 1 only - "password": "conn-password", # Level 5 wins - "warehouse": "snowsql-warehouse", # Level 4 wins (overrides level 3) - "role": "conn-role", # Level 5 only - "database": "general-db", # Level 6 only + "account": "general-env", # Level 6 OVERLAY wins + # user NOT inherited - connections_toml replaced snowsql connection + "password": "conn-password", # Level 5 OVERLAY + "warehouse": "snowsql-warehouse", # Level 4 OVERLAY (on top of toml FILE) + "role": "conn-role", # Level 5 OVERLAY + "database": "general-db", # Level 6 OVERLAY } assert conn == expected @@ -961,12 +959,10 @@ def test_snowsql_key_mapping_with_precedence(config_ng_setup): conn = get_connection_dict("test") expected = { - "account": "env-account", # Level 6 wins - "user": "snowsql-user", # Level 1 only (mapped from username) - "database": "cli-db", # Level 2 wins - "schema": "env-schema", # Level 6 wins - "role": "snowsql-role", # Level 1 only (mapped from rolename) - "warehouse": "snowsql-warehouse", # Level 1 only (mapped from warehousename) + "account": "env-account", # Level 6 OVERLAY wins + # user, role, warehouse NOT inherited - cli_config replaced snowsql connection + "database": "cli-db", # From cli_config (FILE) + "schema": "env-schema", # Level 6 OVERLAY } assert conn == expected diff --git a/tests/config_ng/test_connection_replacement.py b/tests/config_ng/test_connection_replacement.py new file mode 100644 index 0000000000..9d8787780f --- /dev/null +++ b/tests/config_ng/test_connection_replacement.py @@ -0,0 +1,457 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Tests for connection-level replacement behavior in config_ng. + +These tests verify that: +1. FILE sources (snowsql_config, cli_config_toml, connections_toml) use + connection-level replacement (later file replaces entire connection) +2. OVERLAY sources (env vars, CLI args) use field-level overlay +3. SnowSQL's multi-file merge acts as a single FILE source +""" + + +def test_file_replacement_basic(config_ng_setup): + """ + Test basic file replacement: later file replaces entire connection. + Fields from earlier file should NOT be inherited. + """ + snowsql_config = """ + [connections.test] + accountname = snowsql-account + user = snowsql-user + warehouse = snowsql-warehouse + database = snowsql-database + """ + + cli_config = """ + [connections.test] + account = "cli-account" + user = "cli-user" + # Note: warehouse and database are NOT included + """ + + with config_ng_setup(snowsql_config=snowsql_config, cli_config=cli_config): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + # Values from cli_config (later FILE source) + assert conn["account"] == "cli-account" + assert conn["user"] == "cli-user" + + # warehouse and database from snowsql NOT inherited (connection replaced) + assert "warehouse" not in conn + assert "database" not in conn + + +def test_file_replacement_connections_toml_replaces_cli_config(config_ng_setup): + """ + Test that connections.toml replaces cli_config.toml entirely. + """ + cli_config = """ + [connections.prod] + account = "cli-account" + user = "cli-user" + warehouse = "cli-warehouse" + database = "cli-database" + schema = "cli-schema" + """ + + connections_toml = """ + [connections.prod] + account = "conn-account" + database = "conn-database" + # Note: user, warehouse, schema are NOT included + """ + + with config_ng_setup(cli_config=cli_config, connections_toml=connections_toml): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("prod") + + # Values from connections.toml + assert conn["account"] == "conn-account" + assert conn["database"] == "conn-database" + + # user, warehouse, schema from cli_config NOT inherited + assert "user" not in conn + assert "warehouse" not in conn + assert "schema" not in conn + + +def test_file_replacement_three_levels(config_ng_setup): + """ + Test replacement across three file sources: snowsql -> cli_config -> connections.toml + """ + snowsql_config = """ + [connections.dev] + accountname = snowsql-account + user = snowsql-user + warehouse = snowsql-warehouse + database = snowsql-database + schema = snowsql-schema + """ + + cli_config = """ + [connections.dev] + account = "cli-account" + user = "cli-user" + warehouse = "cli-warehouse" + # database and schema not included + """ + + connections_toml = """ + [connections.dev] + account = "conn-account" + # Only account specified + """ + + with config_ng_setup( + snowsql_config=snowsql_config, + cli_config=cli_config, + connections_toml=connections_toml, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("dev") + + # Only value from connections.toml (highest FILE source) + assert conn["account"] == "conn-account" + + # All other fields NOT inherited from earlier FILE sources + assert "user" not in conn + assert "warehouse" not in conn + assert "database" not in conn + assert "schema" not in conn + + +def test_overlay_adds_fields_without_replacing_connection(config_ng_setup): + """ + Test that OVERLAY sources (env vars) add/override individual fields + without replacing the entire connection from FILE sources. + """ + cli_config = """ + [connections.test] + account = "cli-account" + database = "cli-database" + schema = "cli-schema" + """ + + env_vars = { + "SNOWFLAKE_CONNECTIONS_TEST_USER": "env-user", + "SNOWFLAKE_CONNECTIONS_TEST_WAREHOUSE": "env-warehouse", + } + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + # Values from FILE source (cli_config) + assert conn["account"] == "cli-account" + assert conn["database"] == "cli-database" + assert conn["schema"] == "cli-schema" + + # Values from OVERLAY source (env vars) - added without replacing + assert conn["user"] == "env-user" + assert conn["warehouse"] == "env-warehouse" + + +def test_overlay_overrides_file_field_without_replacing_connection(config_ng_setup): + """ + Test that OVERLAY sources can override individual fields from FILE sources + without replacing the entire connection. + """ + connections_toml = """ + [connections.prod] + account = "file-account" + user = "file-user" + warehouse = "file-warehouse" + database = "file-database" + """ + + env_vars = { + "SNOWFLAKE_CONNECTIONS_PROD_USER": "env-user", + "SNOWFLAKE_CONNECTIONS_PROD_WAREHOUSE": "env-warehouse", + } + + with config_ng_setup(connections_toml=connections_toml, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("prod") + + # account and database from FILE source (not overridden) + assert conn["account"] == "file-account" + assert conn["database"] == "file-database" + + # user and warehouse overridden by OVERLAY source + assert conn["user"] == "env-user" + assert conn["warehouse"] == "env-warehouse" + + +def test_snowsql_env_overlay_on_replaced_connection(config_ng_setup): + """ + Test that SNOWSQL_* env vars (OVERLAY) overlay on replaced connections. + """ + snowsql_config = """ + [connections.test] + accountname = snowsql-account + user = snowsql-user + warehouse = snowsql-warehouse + database = snowsql-database + """ + + connections_toml = """ + [connections.test] + account = "conn-account" + user = "conn-user" + # warehouse and database not included (connection replaced) + """ + + env_vars = { + "SNOWSQL_WAREHOUSE": "env-warehouse", + } + + with config_ng_setup( + snowsql_config=snowsql_config, + connections_toml=connections_toml, + env_vars=env_vars, + ): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + # Values from connections.toml (FILE source) + assert conn["account"] == "conn-account" + assert conn["user"] == "conn-user" + + # database from snowsql NOT inherited (connection replaced by connections.toml) + assert "database" not in conn + + # warehouse from SNOWSQL_* env (OVERLAY source) + assert conn["warehouse"] == "env-warehouse" + + +def test_cli_env_overlay_on_file_connection(config_ng_setup): + """ + Test that SNOWFLAKE_* env vars (OVERLAY) add fields to file connections. + """ + cli_config = """ + [connections.dev] + account = "cli-account" + database = "cli-database" + """ + + env_vars = { + "SNOWFLAKE_USER": "global-env-user", + "SNOWFLAKE_WAREHOUSE": "global-env-warehouse", + } + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("dev") + + # Values from FILE source + assert conn["account"] == "cli-account" + assert conn["database"] == "cli-database" + + # Values from global OVERLAY source (apply to all connections) + assert conn["user"] == "global-env-user" + assert conn["warehouse"] == "global-env-warehouse" + + +def test_multiple_connections_independent_replacement(config_ng_setup): + """ + Test that replacement is per-connection: different connections can be + replaced independently. + """ + snowsql_config = """ + [connections.conn1] + accountname = snowsql-account1 + user = snowsql-user1 + warehouse = snowsql-warehouse1 + + [connections.conn2] + accountname = snowsql-account2 + user = snowsql-user2 + database = snowsql-database2 + """ + + connections_toml = """ + [connections.conn1] + account = "conn-account1" + # Only account specified for conn1 - warehouse NOT inherited + + # conn2 NOT defined in connections.toml + """ + + with config_ng_setup( + snowsql_config=snowsql_config, connections_toml=connections_toml + ): + from snowflake.cli.api.config import get_connection_dict + + # conn1: replaced by connections.toml + conn1 = get_connection_dict("conn1") + assert conn1["account"] == "conn-account1" + assert "user" not in conn1 # Not inherited + assert "warehouse" not in conn1 # Not inherited + + # conn2: NOT replaced, uses snowsql_config values + conn2 = get_connection_dict("conn2") + assert conn2["account"] == "snowsql-account2" + assert conn2["user"] == "snowsql-user2" + assert conn2["database"] == "snowsql-database2" + + +def test_empty_connection_replacement(config_ng_setup): + """ + Test that an empty connection in a later FILE source still replaces + the entire connection from earlier sources, resulting in no configured connection. + """ + cli_config = """ + [connections.test] + account = "cli-account" + user = "cli-user" + warehouse = "cli-warehouse" + """ + + connections_toml = """ + [connections.test] + # Empty connection section + """ + + with config_ng_setup(cli_config=cli_config, connections_toml=connections_toml): + import pytest + from snowflake.cli.api.config import get_connection_dict + from snowflake.cli.api.exceptions import MissingConfigurationError + + # Empty connection replacement means no parameters, which raises an error + with pytest.raises( + MissingConfigurationError, match="Connection test is not configured" + ): + get_connection_dict("test") + + +def test_overlay_precedence_connection_specific_over_global(config_ng_setup): + """ + Test OVERLAY precedence: global env (SNOWFLAKE_*) overrides connection-specific env. + Source order: connection_specific_env (#5) < cli_env (#6) + """ + cli_config = """ + [connections.test] + account = "cli-account" + """ + + env_vars = { + "SNOWFLAKE_USER": "global-user", + "SNOWFLAKE_CONNECTIONS_TEST_USER": "specific-user", + "SNOWFLAKE_WAREHOUSE": "global-warehouse", + } + + with config_ng_setup(cli_config=cli_config, env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + + assert conn["account"] == "cli-account" + # Global env (later OVERLAY source) overrides connection-specific env + assert conn["user"] == "global-user" + # Global env applies when no specific override exists + assert conn["warehouse"] == "global-warehouse" + + +def test_resolution_history_shows_replacement(config_ng_setup): + """ + Test that resolution history correctly shows file replacement behavior. + """ + snowsql_config = """ + [connections.test] + accountname = snowsql-account + user = snowsql-user + warehouse = snowsql-warehouse + """ + + cli_config = """ + [connections.test] + account = "cli-account" + user = "cli-user" + # warehouse not included + """ + + with config_ng_setup(snowsql_config=snowsql_config, cli_config=cli_config): + from snowflake.cli.api.config import get_connection_dict + from snowflake.cli.api.config_ng import get_resolver + + # Trigger resolution to populate history + conn = get_connection_dict("test") + assert conn["account"] == "cli-account" + + resolver = get_resolver() + assert resolver is not None + + # account: both sources provide, cli_config wins + account_history = resolver.get_resolution_history("connections.test.account") + assert account_history is not None + assert len(account_history.entries) == 2 + assert account_history.final_value == "cli-account" + + # user: both sources provide, cli_config wins + user_history = resolver.get_resolution_history("connections.test.user") + assert user_history is not None + assert len(user_history.entries) == 2 + assert user_history.final_value == "cli-user" + + # warehouse: only snowsql provides, but NOT in final config (connection replaced) + # Since the connection was replaced and warehouse wasn't in the new connection, + # it was discovered but never made it to the final resolution, so no history entry + warehouse_history = resolver.get_resolution_history( + "connections.test.warehouse" + ) + # Note: warehouse was discovered but since connection was replaced by cli_config, + # and cli_config didn't include warehouse, it's not in the final resolved values + # The history tracking only marks selected values, so warehouse has no marked entry + if warehouse_history: + # If history exists, it should show discovery but no selection + assert warehouse_history.selected_entry is None + + +def test_flat_keys_still_use_simple_override(config_ng_setup): + """ + Test that flat keys (non-connection) still use simple override behavior. + """ + snowsql_config = """ + [connections] + some_global = snowsql-global + """ + + cli_config = """ + [connections] + some_global = "cli-global" + """ + + # Note: This test is somewhat artificial as flat keys in connections sections + # are not commonly used, but verifies the logic handles them correctly + + with config_ng_setup(snowsql_config=snowsql_config, cli_config=cli_config): + from snowflake.cli.api.config_ng import get_resolver + + resolver = get_resolver() + if resolver: + resolved = resolver.resolve() + # This would need actual flat key support in sources to fully test + # For now, just verify no errors occur + assert resolved is not None diff --git a/tests/config_ng/test_resolution_logger.py b/tests/config_ng/test_resolution_logger.py index 73b84f3174..1349d82681 100644 --- a/tests/config_ng/test_resolution_logger.py +++ b/tests/config_ng/test_resolution_logger.py @@ -137,12 +137,12 @@ def test_format_summary_with_alternative_provider(self): with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): reset_config_provider() - # Mock the resolver to have some data + # Mock the resolver's tracker to have some data provider = AlternativeConfigProvider() provider._ensure_initialized() with mock.patch.object( - provider._resolver, "get_history_summary" + provider._resolver.get_tracker(), "get_summary" ) as mock_summary: mock_summary.return_value = { "total_keys_resolved": 5, From d94d29a8ee5babc0e9324a3d3b42ea8a1cb115c0 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 16 Oct 2025 17:47:39 +0200 Subject: [PATCH 53/78] SNOW-2306184: config refactor - variables merge --- src/snowflake/cli/_plugins/dcm/manager.py | 10 +- src/snowflake/cli/_plugins/sql/commands.py | 7 +- src/snowflake/cli/_plugins/stage/manager.py | 8 +- src/snowflake/cli/api/config_ng/__init__.py | 4 + src/snowflake/cli/api/config_ng/sources.py | 66 +++- src/snowflake/cli/api/config_provider.py | 9 + tests/config_ng/test_snowsql_variables.py | 398 ++++++++++++++++++++ 7 files changed, 491 insertions(+), 11 deletions(-) create mode 100644 tests/config_ng/test_snowsql_variables.py diff --git a/src/snowflake/cli/_plugins/dcm/manager.py b/src/snowflake/cli/_plugins/dcm/manager.py index 717239b4cf..8357f40150 100644 --- a/src/snowflake/cli/_plugins/dcm/manager.py +++ b/src/snowflake/cli/_plugins/dcm/manager.py @@ -18,7 +18,6 @@ import yaml from snowflake.cli._plugins.stage.manager import StageManager from snowflake.cli.api.artifacts.upload import sync_artifacts_with_stage -from snowflake.cli.api.commands.utils import parse_key_value_variables from snowflake.cli.api.console.console import cli_console from snowflake.cli.api.constants import ( DEFAULT_SIZE_LIMIT_MB, @@ -102,8 +101,15 @@ def execute( if configuration: query += f" CONFIGURATION {configuration}" if variables: + from snowflake.cli.api.commands.common import Variable + from snowflake.cli.api.config_ng import get_merged_variables + + # Get merged variables from SnowSQL config and CLI -D parameters + merged_vars_dict = get_merged_variables(variables) + # Convert dict to List[Variable] for compatibility with parse_execute_variables + parsed_variables = [Variable(k, v) for k, v in merged_vars_dict.items()] query += StageManager.parse_execute_variables( - parse_key_value_variables(variables) + parsed_variables ).removeprefix(" using") stage_path = StagePath.from_stage_str(from_stage) query += f" FROM {stage_path.absolute_path()}" diff --git a/src/snowflake/cli/_plugins/sql/commands.py b/src/snowflake/cli/_plugins/sql/commands.py index 7a3b2ad817..9b73929f65 100644 --- a/src/snowflake/cli/_plugins/sql/commands.py +++ b/src/snowflake/cli/_plugins/sql/commands.py @@ -29,7 +29,6 @@ ) from snowflake.cli.api.commands.overrideable_parameter import OverrideableOption from snowflake.cli.api.commands.snow_typer import SnowTyperFactory -from snowflake.cli.api.commands.utils import parse_key_value_variables from snowflake.cli.api.exceptions import CliArgumentError from snowflake.cli.api.output.types import ( CommandResult, @@ -136,9 +135,9 @@ def execute_sql( The command supports variable substitution that happens on client-side. """ - data = {} - if data_override: - data = {v.key: v.value for v in parse_key_value_variables(data_override)} + from snowflake.cli.api.config_ng import get_merged_variables + + data = get_merged_variables(data_override) template_syntax_config = _parse_template_syntax_config(enabled_templating) diff --git a/src/snowflake/cli/_plugins/stage/manager.py b/src/snowflake/cli/_plugins/stage/manager.py index 3e8a72c9f7..4865697f3d 100644 --- a/src/snowflake/cli/_plugins/stage/manager.py +++ b/src/snowflake/cli/_plugins/stage/manager.py @@ -38,7 +38,6 @@ OnErrorType, Variable, ) -from snowflake.cli.api.commands.utils import parse_key_value_variables from snowflake.cli.api.console import cli_console from snowflake.cli.api.constants import PYTHON_3_12 from snowflake.cli.api.exceptions import CliError @@ -608,7 +607,12 @@ def execute( filtered_file_list, key=lambda f: (path.dirname(f), path.basename(f)) ) - parsed_variables = parse_key_value_variables(variables) + from snowflake.cli.api.config_ng import get_merged_variables + + # Get merged variables from SnowSQL config and CLI -D parameters + merged_vars_dict = get_merged_variables(variables) + # Convert dict back to List[Variable] for compatibility with existing methods + parsed_variables = [Variable(k, v) for k, v in merged_vars_dict.items()] sql_variables = self.parse_execute_variables(parsed_variables) python_variables = self._parse_python_variables(parsed_variables) results = [] diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index 5ad8fdfce0..27a762b9be 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -55,6 +55,8 @@ ConnectionSpecificEnvironment, SnowSQLConfigFile, SnowSQLEnvironment, + SnowSQLSection, + get_merged_variables, ) __all__ = [ @@ -69,6 +71,7 @@ "explain_configuration", "export_resolution_history", "format_summary_for_display", + "get_merged_variables", "get_resolution_summary", "get_resolver", "is_resolution_logging_available", @@ -80,6 +83,7 @@ "show_resolution_chain", "SnowSQLConfigFile", "SnowSQLEnvironment", + "SnowSQLSection", "SourceType", "ValueSource", ] diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 99c752b6d2..8319e086b6 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -31,13 +31,27 @@ import configparser import logging import os +from enum import Enum from pathlib import Path -from typing import Any, Dict, Final, Optional +from typing import Any, Dict, Final, List, Optional from snowflake.cli.api.config_ng.core import ConfigValue, SourceType, ValueSource log = logging.getLogger(__name__) + +class SnowSQLSection(Enum): + """ + SnowSQL configuration file section names. + + These sections can be present in SnowSQL INI config files. + """ + + CONNECTIONS = "connections" + VARIABLES = "variables" + OPTIONS = "options" + + # Try to import tomllib (Python 3.11+) or fall back to tomli try: import tomllib @@ -142,9 +156,9 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: # Process all connection sections for section in config.sections(): - if section.startswith("connections"): + if section.startswith(SnowSQLSection.CONNECTIONS.value): # Extract connection name - if section == "connections": + if section == SnowSQLSection.CONNECTIONS.value: # This is default connection connection_name = "default" else: @@ -170,6 +184,19 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: raw_value=f"{param_key}={param_value}", # Show original key in raw_value ) + elif section == SnowSQLSection.VARIABLES.value: + # Process variables section (global, not connection-specific) + section_data = dict(config[section]) + for var_key, var_value in section_data.items(): + full_key = f"variables.{var_key}" + if key is None or full_key == key: + merged_values[full_key] = ConfigValue( + key=full_key, + value=var_value, + source_name=self.source_name, + raw_value=f"{var_key}={var_value}", + ) + except Exception as e: log.debug("Failed to read SnowSQL config %s: %s", config_file, e) @@ -755,3 +782,36 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: def supports_key(self, key: str) -> bool: """Check if key is present in CLI context with non-None value.""" return key in self._cli_context and self._cli_context[key] is not None + + +def get_merged_variables(cli_variables: Optional[List[str]] = None) -> Dict[str, str]: + """ + Merge SnowSQL [variables] with CLI -D parameters. + + Precedence: SnowSQL variables (lower) < -D parameters (higher) + + Args: + cli_variables: List of "key=value" strings from -D parameters + + Returns: + Dictionary of merged variables (key -> value) + """ + from snowflake.cli.api.config_provider import get_config_provider_singleton + + # Start with SnowSQL variables from config + provider = get_config_provider_singleton() + try: + snowsql_vars = provider.get_section(SnowSQLSection.VARIABLES.value) + except Exception: + # If variables section doesn't exist or provider not initialized, start with empty dict + snowsql_vars = {} + + # Parse and overlay -D parameters (higher precedence) + if cli_variables: + from snowflake.cli.api.commands.utils import parse_key_value_variables + + cli_vars_parsed = parse_key_value_variables(cli_variables) + for var in cli_vars_parsed: + snowsql_vars[var.key] = var.value + + return snowsql_vars diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 8215d1605c..5f7589e430 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -335,6 +335,15 @@ def get_section(self, *path) -> dict: connection_name = path[1] return self._get_connection_dict_internal(connection_name) + # For variables section, return all variables as flat dict + if len(path) == 1 and path[0] == "variables": + result = {} + for key, value in self._config_cache.items(): + if key.startswith("variables."): + var_name = key[len("variables.") :] + result[var_name] = value + return result + # For other sections, try to resolve with path prefix section_prefix = ".".join(path) result = {} diff --git a/tests/config_ng/test_snowsql_variables.py b/tests/config_ng/test_snowsql_variables.py new file mode 100644 index 0000000000..f7ae946a51 --- /dev/null +++ b/tests/config_ng/test_snowsql_variables.py @@ -0,0 +1,398 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Tests for SnowSQL [variables] section reading and merging with -D parameters. +""" + +import tempfile +from pathlib import Path +from unittest import mock + +from snowflake.cli.api.config_ng import ( + ConfigurationResolver, + SnowSQLConfigFile, + SnowSQLSection, + get_merged_variables, +) + + +class TestSnowSQLVariablesSection: + """Tests for reading [variables] section from SnowSQL config files.""" + + def test_read_variables_section_from_snowsql_config(self): + """Test that [variables] section is correctly read from SnowSQL config.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_file = Path(temp_dir) / "config" + config_file.write_text( + """ +[connections] +accountname = test_account +username = test_user + +[variables] +var1=value1 +var2=value2 +example_variable=27 +""" + ) + + source = SnowSQLConfigFile() + setattr(source, "_config_files", [config_file]) + + discovered = source.discover() + + # Check that variables are discovered with proper prefix + assert "variables.var1" in discovered + assert "variables.var2" in discovered + assert "variables.example_variable" in discovered + + # Check values + assert discovered["variables.var1"].value == "value1" + assert discovered["variables.var2"].value == "value2" + assert discovered["variables.example_variable"].value == "27" + + # Check source name + assert discovered["variables.var1"].source_name == "snowsql_config" + + def test_variables_section_empty(self): + """Test that empty [variables] section doesn't cause errors.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_file = Path(temp_dir) / "config" + config_file.write_text( + """ +[connections] +accountname = test_account + +[variables] +""" + ) + + source = SnowSQLConfigFile() + setattr(source, "_config_files", [config_file]) + + discovered = source.discover() + + # Should have connections but no variables + assert any(k.startswith("connections.") for k in discovered.keys()) + assert not any(k.startswith("variables.") for k in discovered.keys()) + + def test_no_variables_section(self): + """Test that config without [variables] section works correctly.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_file = Path(temp_dir) / "config" + config_file.write_text( + """ +[connections] +accountname = test_account +username = test_user +""" + ) + + source = SnowSQLConfigFile() + setattr(source, "_config_files", [config_file]) + + discovered = source.discover() + + # Should have connections but no variables + assert any(k.startswith("connections.") for k in discovered.keys()) + assert not any(k.startswith("variables.") for k in discovered.keys()) + + def test_variables_merged_from_multiple_files(self): + """Test that variables from multiple SnowSQL config files are merged.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_file1 = Path(temp_dir) / "config1" + config_file1.write_text( + """ +[variables] +var1=value1 +var2=original_value2 +""" + ) + + config_file2 = Path(temp_dir) / "config2" + config_file2.write_text( + """ +[variables] +var2=overridden_value2 +var3=value3 +""" + ) + + source = SnowSQLConfigFile() + setattr(source, "_config_files", [config_file1, config_file2]) + + discovered = source.discover() + + # var1 from file1 should be present + assert discovered["variables.var1"].value == "value1" + + # var2 should be overridden by file2 + assert discovered["variables.var2"].value == "overridden_value2" + + # var3 from file2 should be present + assert discovered["variables.var3"].value == "value3" + + def test_variables_with_special_characters(self): + """Test that variables with special characters in values are handled.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_file = Path(temp_dir) / "config" + config_file.write_text( + """ +[variables] +var_with_equals=key=value +var_with_spaces=value with spaces +var_with_quotes='quoted value' +""" + ) + + source = SnowSQLConfigFile() + setattr(source, "_config_files", [config_file]) + + discovered = source.discover() + + assert discovered["variables.var_with_equals"].value == "key=value" + assert discovered["variables.var_with_spaces"].value == "value with spaces" + assert discovered["variables.var_with_quotes"].value == "'quoted value'" + + +class TestAlternativeConfigProviderVariables: + """Tests for getting variables section from AlternativeConfigProvider.""" + + def test_get_variables_section(self): + """Test get_section('variables') returns flat dict without prefix.""" + from snowflake.cli.api.config_provider import AlternativeConfigProvider + + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "variables.var1": "value1", + "variables.var2": "value2", + "connections.default.account": "test_account", + } + setattr(provider, "_initialized", True) + # Prevent re-initialization + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + setattr( + provider, + "_last_config_override", + get_cli_context().config_file_override, + ) + except Exception: + setattr(provider, "_last_config_override", None) + + result = provider.get_section("variables") + + # Should return flat dict without variables. prefix + assert result == {"var1": "value1", "var2": "value2"} + + def test_get_variables_section_empty(self): + """Test get_section('variables') with no variables returns empty dict.""" + from snowflake.cli.api.config_provider import AlternativeConfigProvider + + provider = AlternativeConfigProvider() + + with mock.patch.object(provider, "_resolver") as mock_resolver: + mock_resolver.resolve.return_value = { + "connections.default.account": "test_account", + } + setattr(provider, "_initialized", True) + # Prevent re-initialization + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + setattr( + provider, + "_last_config_override", + get_cli_context().config_file_override, + ) + except Exception: + setattr(provider, "_last_config_override", None) + + result = provider.get_section("variables") + + assert result == {} + + +class TestGetMergedVariables: + """Tests for get_merged_variables() utility function.""" + + def test_get_merged_variables_no_cli_params(self): + """Test get_merged_variables with only SnowSQL variables.""" + with mock.patch( + "snowflake.cli.api.config_provider.get_config_provider_singleton" + ) as mock_provider: + mock_instance = mock.Mock() + mock_instance.get_section.return_value = { + "var1": "snowsql_value1", + "var2": "snowsql_value2", + } + mock_provider.return_value = mock_instance + + result = get_merged_variables(None) + + assert result == {"var1": "snowsql_value1", "var2": "snowsql_value2"} + mock_instance.get_section.assert_called_once_with("variables") + + def test_get_merged_variables_with_cli_params(self): + """Test get_merged_variables with both SnowSQL and CLI -D parameters.""" + with mock.patch( + "snowflake.cli.api.config_provider.get_config_provider_singleton" + ) as mock_provider: + mock_instance = mock.Mock() + mock_instance.get_section.return_value = { + "var1": "snowsql_value1", + "var2": "snowsql_value2", + } + mock_provider.return_value = mock_instance + + cli_vars = ["var2=cli_value2", "var3=cli_value3"] + result = get_merged_variables(cli_vars) + + # var1 from SnowSQL + assert result["var1"] == "snowsql_value1" + # var2 should be overridden by CLI + assert result["var2"] == "cli_value2" + # var3 from CLI + assert result["var3"] == "cli_value3" + + def test_get_merged_variables_cli_only(self): + """Test get_merged_variables with only CLI -D parameters.""" + with mock.patch( + "snowflake.cli.api.config_provider.get_config_provider_singleton" + ) as mock_provider: + mock_instance = mock.Mock() + mock_instance.get_section.return_value = {} + mock_provider.return_value = mock_instance + + cli_vars = ["var1=cli_value1", "var2=cli_value2"] + result = get_merged_variables(cli_vars) + + assert result == {"var1": "cli_value1", "var2": "cli_value2"} + + def test_get_merged_variables_precedence(self): + """Test that CLI -D parameters have higher precedence than SnowSQL variables.""" + with mock.patch( + "snowflake.cli.api.config_provider.get_config_provider_singleton" + ) as mock_provider: + mock_instance = mock.Mock() + mock_instance.get_section.return_value = { + "database": "snowsql_db", + "schema": "snowsql_schema", + "custom_var": "snowsql_value", + } + mock_provider.return_value = mock_instance + + cli_vars = ["database=cli_db", "custom_var=cli_value"] + result = get_merged_variables(cli_vars) + + # CLI should override SnowSQL + assert result["database"] == "cli_db" + assert result["custom_var"] == "cli_value" + # SnowSQL value should remain for non-overridden keys + assert result["schema"] == "snowsql_schema" + + def test_get_merged_variables_provider_error(self): + """Test get_merged_variables handles provider errors gracefully.""" + with mock.patch( + "snowflake.cli.api.config_provider.get_config_provider_singleton" + ) as mock_provider: + mock_instance = mock.Mock() + mock_instance.get_section.side_effect = Exception("Provider error") + mock_provider.return_value = mock_instance + + cli_vars = ["var1=cli_value1"] + result = get_merged_variables(cli_vars) + + # Should fall back to only CLI variables + assert result == {"var1": "cli_value1"} + + def test_get_merged_variables_empty(self): + """Test get_merged_variables with no variables at all.""" + with mock.patch( + "snowflake.cli.api.config_provider.get_config_provider_singleton" + ) as mock_provider: + mock_instance = mock.Mock() + mock_instance.get_section.return_value = {} + mock_provider.return_value = mock_instance + + result = get_merged_variables(None) + + assert result == {} + + +class TestConfigurationResolverVariables: + """Integration tests for variables in ConfigurationResolver.""" + + def test_resolver_with_variables(self): + """Test that resolver correctly processes variables from SnowSQL config.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_file = Path(temp_dir) / "config" + config_file.write_text( + """ +[connections] +accountname = test_account + +[variables] +var1=value1 +var2=value2 +""" + ) + + source = SnowSQLConfigFile() + setattr(source, "_config_files", [config_file]) + + resolver = ConfigurationResolver(sources=[source]) + config = resolver.resolve() + + assert "variables.var1" in config + assert "variables.var2" in config + assert config["variables.var1"] == "value1" + assert config["variables.var2"] == "value2" + + +class TestSnowSQLSectionEnum: + """Tests for SnowSQLSection enum.""" + + def test_section_enum_values(self): + """Test that SnowSQLSection enum has correct values.""" + assert SnowSQLSection.CONNECTIONS.value == "connections" + assert SnowSQLSection.VARIABLES.value == "variables" + assert SnowSQLSection.OPTIONS.value == "options" + + def test_section_enum_in_snowsql_source(self): + """Test that SnowSQLConfigFile uses the enum.""" + with tempfile.TemporaryDirectory() as temp_dir: + config_file = Path(temp_dir) / "config" + config_file.write_text( + """ +[connections] +accountname = test_account + +[variables] +var1=value1 +""" + ) + + source = SnowSQLConfigFile() + setattr(source, "_config_files", [config_file]) + + # Should discover both connections and variables + discovered = source.discover() + + assert any(k.startswith("connections.") for k in discovered.keys()) + assert any(k.startswith("variables.") for k in discovered.keys()) From 55376b361d5ba3e342f8c257e08f8021a25f1d5d Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 20 Oct 2025 15:59:09 +0200 Subject: [PATCH 54/78] SNOW-2306184: config refactor - separate parsing from reading and move history handling logic to own class --- src/snowflake/cli/api/config_ng/__init__.py | 25 + src/snowflake/cli/api/config_ng/constants.py | 61 ++ src/snowflake/cli/api/config_ng/core.py | 20 +- src/snowflake/cli/api/config_ng/dict_utils.py | 47 ++ .../cli/api/config_ng/merge_operations.py | 118 ++++ src/snowflake/cli/api/config_ng/parsers.py | 142 ++++ src/snowflake/cli/api/config_ng/resolver.py | 401 +++++++++-- .../cli/api/config_ng/source_factory.py | 62 ++ .../cli/api/config_ng/source_manager.py | 87 +++ src/snowflake/cli/api/config_ng/sources.py | 652 +++++++++--------- src/snowflake/cli/api/config_provider.py | 343 +++------ tests/api/test_config_provider.py | 31 - tests/config_ng/test_constants.py | 80 +++ tests/config_ng/test_merge_operations.py | 269 ++++++++ tests/config_ng/test_parsers.py | 345 +++++++++ tests/config_ng/test_snowsql_variables.py | 164 +++-- tests/config_ng/test_sources.py | 380 ++++++++++ tests/test_config_provider_integration.py | 83 +-- 18 files changed, 2497 insertions(+), 813 deletions(-) create mode 100644 src/snowflake/cli/api/config_ng/constants.py create mode 100644 src/snowflake/cli/api/config_ng/dict_utils.py create mode 100644 src/snowflake/cli/api/config_ng/merge_operations.py create mode 100644 src/snowflake/cli/api/config_ng/parsers.py create mode 100644 src/snowflake/cli/api/config_ng/source_factory.py create mode 100644 src/snowflake/cli/api/config_ng/source_manager.py create mode 100644 tests/config_ng/test_constants.py create mode 100644 tests/config_ng/test_merge_operations.py create mode 100644 tests/config_ng/test_parsers.py create mode 100644 tests/config_ng/test_sources.py diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index 27a762b9be..cbe465eaa6 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -24,6 +24,11 @@ - Read-only, immutable configuration sources """ +from snowflake.cli.api.config_ng.constants import ( + FILE_SOURCE_NAMES, + INTERNAL_CLI_PARAMETERS, + ConfigSection, +) from snowflake.cli.api.config_ng.core import ( ConfigValue, ResolutionEntry, @@ -31,6 +36,13 @@ SourceType, ValueSource, ) +from snowflake.cli.api.config_ng.dict_utils import deep_merge +from snowflake.cli.api.config_ng.merge_operations import ( + create_default_connection_from_params, + extract_root_level_connection_params, + merge_params_into_connections, +) +from snowflake.cli.api.config_ng.parsers import SnowSQLParser, TOMLParser from snowflake.cli.api.config_ng.presentation import ResolutionPresenter from snowflake.cli.api.config_ng.resolution_logger import ( check_value_source, @@ -47,6 +59,8 @@ ConfigurationResolver, ResolutionHistoryTracker, ) +from snowflake.cli.api.config_ng.source_factory import create_default_sources +from snowflake.cli.api.config_ng.source_manager import SourceManager from snowflake.cli.api.config_ng.sources import ( CliConfigFile, CliEnvironment, @@ -64,17 +78,25 @@ "CliConfigFile", "CliEnvironment", "CliParameters", + "ConfigSection", "ConfigurationResolver", "ConfigValue", "ConnectionsConfigFile", "ConnectionSpecificEnvironment", + "create_default_connection_from_params", + "create_default_sources", + "deep_merge", "explain_configuration", "export_resolution_history", + "extract_root_level_connection_params", + "FILE_SOURCE_NAMES", "format_summary_for_display", "get_merged_variables", "get_resolution_summary", "get_resolver", + "INTERNAL_CLI_PARAMETERS", "is_resolution_logging_available", + "merge_params_into_connections", "ResolutionEntry", "ResolutionHistory", "ResolutionHistoryTracker", @@ -83,7 +105,10 @@ "show_resolution_chain", "SnowSQLConfigFile", "SnowSQLEnvironment", + "SnowSQLParser", "SnowSQLSection", + "SourceManager", "SourceType", + "TOMLParser", "ValueSource", ] diff --git a/src/snowflake/cli/api/config_ng/constants.py b/src/snowflake/cli/api/config_ng/constants.py new file mode 100644 index 0000000000..40b938507a --- /dev/null +++ b/src/snowflake/cli/api/config_ng/constants.py @@ -0,0 +1,61 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Constants for configuration system.""" + +from enum import Enum +from typing import Final, Literal + + +class ConfigSection(str, Enum): + """Configuration section names.""" + + CONNECTIONS = "connections" + VARIABLES = "variables" + CLI = "cli" + CLI_LOGS = "cli.logs" + CLI_FEATURES = "cli.features" + + def __str__(self) -> str: + """Return the string value for backward compatibility.""" + return self.value + + +# Environment variable names +SNOWFLAKE_HOME_ENV: Final[str] = "SNOWFLAKE_HOME" + +# Internal CLI parameters that should not be treated as connection parameters +INTERNAL_CLI_PARAMETERS: Final[set[str]] = { + "enable_diag", + "temporary_connection", + "default_connection_name", + "connection_name", + "diag_log_path", + "diag_allowlist_path", + "mfa_passcode", +} + +# Define Literal type for file source names +FileSourceName = Literal[ + "snowsql_config", + "cli_config_toml", + "connections_toml", +] + +# Source names that represent file-based configuration sources +FILE_SOURCE_NAMES: Final[set[str]] = { + "snowsql_config", + "cli_config_toml", + "connections_toml", +} diff --git a/src/snowflake/cli/api/config_ng/core.py b/src/snowflake/cli/api/config_ng/core.py index 3377f81dd7..7d8bff20f0 100644 --- a/src/snowflake/cli/api/config_ng/core.py +++ b/src/snowflake/cli/api/config_ng/core.py @@ -127,16 +127,26 @@ def source_type(self) -> SourceType: ... @abstractmethod - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ - Discover configuration values from this source. + Discover configuration values as nested dict structure. + + Sources return configuration as nested dictionaries that reflect + the natural structure of the configuration. For example: + {"connections": {"prod": {"account": "val"}}} + + Empty connections are represented as empty dicts: + {"connections": {"prod": {}}} + + General parameters (not connection-specific) are at the root level: + {"database": "mydb", "role": "myrole"} Args: - key: Specific key to discover, or None to discover all values + key: Specific key path to discover (dot-separated), or None for all Returns: - Dictionary mapping configuration keys to ConfigValue objects. - Returns empty dict if no values found. + Nested dictionary of configuration values. Returns empty dict + if no values found. """ ... diff --git a/src/snowflake/cli/api/config_ng/dict_utils.py b/src/snowflake/cli/api/config_ng/dict_utils.py new file mode 100644 index 0000000000..740e1b31f2 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/dict_utils.py @@ -0,0 +1,47 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utility functions for nested dictionary operations.""" + +from typing import Any, Dict + + +def deep_merge(base: Dict[str, Any], overlay: Dict[str, Any]) -> Dict[str, Any]: + """ + Deep merge two dictionaries. Overlay values win on conflict. + + Recursively merges nested dictionaries. Non-dict values from overlay + replace values in base. + + Example: + base = {"a": {"b": 1, "c": 2}} + overlay = {"a": {"c": 3, "d": 4}} + result = {"a": {"b": 1, "c": 3, "d": 4}} + + Args: + base: Base dictionary + overlay: Overlay dictionary (wins on conflicts) + + Returns: + Merged dictionary + """ + result = base.copy() + + for key, value in overlay.items(): + if key in result and isinstance(result[key], dict) and isinstance(value, dict): + result[key] = deep_merge(result[key], value) + else: + result[key] = value + + return result diff --git a/src/snowflake/cli/api/config_ng/merge_operations.py b/src/snowflake/cli/api/config_ng/merge_operations.py new file mode 100644 index 0000000000..f789a66632 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/merge_operations.py @@ -0,0 +1,118 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Pure functions for configuration merging operations.""" + +from typing import Any, Dict + +from snowflake.cli.api.config_ng.constants import ( + INTERNAL_CLI_PARAMETERS, + ConfigSection, +) + + +def extract_root_level_connection_params( + config: Dict[str, Any], +) -> tuple[Dict[str, Any], Dict[str, Any]]: + """ + Extract root-level connection parameters from config. + + Connection parameters at root level (not under any section) should + be treated as general connection parameters that apply to all connections. + + Args: + config: Configuration dictionary with mixed sections and parameters + + Returns: + Tuple of (connection_params, remaining_config) + + Example: + Input: {"account": "acc", "cli": {...}, "connections": {...}} + Output: ({"account": "acc"}, {"cli": {...}, "connections": {...}}) + """ + known_sections = {s.value for s in ConfigSection} + + connection_params = {} + remaining = {} + + for key, value in config.items(): + # Check if this key is a known section or internal parameter + is_section = key in known_sections or any( + key.startswith(s + ".") for s in known_sections + ) + is_internal = key in INTERNAL_CLI_PARAMETERS + + if not is_section and not is_internal: + # Root-level parameter that's not a section = connection parameter + connection_params[key] = value + else: + remaining[key] = value + + return connection_params, remaining + + +def merge_params_into_connections( + connections: Dict[str, Dict[str, Any]], params: Dict[str, Any] +) -> Dict[str, Dict[str, Any]]: + """ + Merge parameters into all existing connections. + + Used for overlay sources where root-level connection params apply to all connections. + The params overlay (override) values in each connection. + + Args: + connections: Dictionary of connection configurations + params: Parameters to merge into each connection + + Returns: + Dictionary of connections with params merged in + + Example: + Input: + connections = {"dev": {"account": "dev_acc", "user": "dev_user"}} + params = {"user": "override_user", "password": "new_pass"} + Output: + {"dev": {"account": "dev_acc", "user": "override_user", "password": "new_pass"}} + """ + from snowflake.cli.api.config_ng.dict_utils import deep_merge + + result = {} + for conn_name, conn_config in connections.items(): + if isinstance(conn_config, dict): + result[conn_name] = deep_merge(conn_config, params) + else: + result[conn_name] = conn_config + + return result + + +def create_default_connection_from_params( + params: Dict[str, Any], +) -> Dict[str, Dict[str, Any]]: + """ + Create a default connection from connection parameters. + + Args: + params: Connection parameters + + Returns: + Dictionary with "default" connection containing the params + + Example: + Input: {"account": "acc", "user": "usr"} + Output: {"default": {"account": "acc", "user": "usr"}} + """ + if not params: + return {} + return {"default": params.copy()} diff --git a/src/snowflake/cli/api/config_ng/parsers.py b/src/snowflake/cli/api/config_ng/parsers.py new file mode 100644 index 0000000000..8d6754714f --- /dev/null +++ b/src/snowflake/cli/api/config_ng/parsers.py @@ -0,0 +1,142 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Configuration parsers - decouple parsing from file I/O.""" + +import configparser +from typing import Any, Dict + +# Try to import tomllib (Python 3.11+) or fall back to tomli +try: + import tomllib +except ImportError: + import tomli as tomllib # type: ignore + + +class SnowSQLParser: + """Parse SnowSQL INI format to nested dict.""" + + # Mapping of SnowSQL key names to CLI standard names + SNOWSQL_KEY_MAP: Dict[str, str] = { + "accountname": "account", + "username": "user", + "rolename": "role", + "warehousename": "warehouse", + "schemaname": "schema", + "dbname": "database", + "pwd": "password", + # Keys that don't need mapping (already correct) + "password": "password", + "database": "database", + "schema": "schema", + "role": "role", + "warehouse": "warehouse", + "host": "host", + "port": "port", + "protocol": "protocol", + "authenticator": "authenticator", + "private_key_path": "private_key_path", + "private_key_passphrase": "private_key_passphrase", + "account": "account", + "user": "user", + } + + @classmethod + def parse(cls, content: str) -> Dict[str, Any]: + """ + Parse SnowSQL INI format from string. + + Args: + content: INI format configuration as string + + Returns: + Nested dict: {"connections": {...}, "variables": {...}} + + Example: + Input: + [connections.dev] + accountname = myaccount + username = myuser + + [variables] + stage = mystage + + Output: + { + "connections": { + "dev": {"account": "myaccount", "user": "myuser"} + }, + "variables": {"stage": "mystage"} + } + """ + config = configparser.ConfigParser() + config.read_string(content) + + result: Dict[str, Any] = {} + + for section in config.sections(): + if section.startswith("connections"): + # Extract connection name from section + if section == "connections": + conn_name = "default" + else: + conn_name = ( + section.split(".", 1)[1] if "." in section else "default" + ) + + # Ensure connections dict exists + if "connections" not in result: + result["connections"] = {} + if conn_name not in result["connections"]: + result["connections"][conn_name] = {} + + # Map keys and add to connection + for key, value in config[section].items(): + mapped_key = cls.SNOWSQL_KEY_MAP.get(key, key) + result["connections"][conn_name][mapped_key] = value + + elif section == "variables": + # Process variables section + result["variables"] = dict(config[section]) + + return result + + +class TOMLParser: + """Parse TOML format to nested dict.""" + + @staticmethod + def parse(content: str) -> Dict[str, Any]: + """ + Parse TOML format from string. + + TOML is already nested, so this just wraps tomllib.loads(). + All TOML sources (CLI config, connections.toml) use this parser. + + Args: + content: TOML format configuration as string + + Returns: + Nested dict with TOML structure preserved + + Example: + Input: + [connections.prod] + account = "myaccount" + user = "myuser" + + Output: + {"connections": {"prod": {"account": "myaccount", "user": "myuser"}}} + """ + return tomllib.loads(content) diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index 2f690263a1..91a8d32921 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -78,6 +78,60 @@ def clear(self) -> None: self._histories.clear() self._discoveries.clear() + def _flatten_nested_dict( + self, nested: Dict[str, Any], prefix: str = "" + ) -> Dict[str, Any]: + """ + Flatten nested dict to dot-separated keys for internal storage. + + Args: + nested: Nested dictionary structure + prefix: Current key prefix + + Returns: + Flat dictionary with dot-separated keys + + Example: + {"connections": {"test": {"account": "val"}}} + -> {"connections.test.account": "val"} + """ + result = {} + for key, value in nested.items(): + flat_key = f"{prefix}.{key}" if prefix else key + + if isinstance(value, dict) and value: + # Recursively flatten nested dicts + result.update(self._flatten_nested_dict(value, flat_key)) + else: + # Leaf value - store it + result[flat_key] = value + + return result + + def record_nested_discovery( + self, nested_data: Dict[str, Any], source_name: str + ) -> None: + """ + Record discoveries from a source that returns nested dict. + + Args: + nested_data: Nested dictionary from source + source_name: Name of the source providing this data + """ + if not self._enabled: + return + + # Flatten the nested data + flat_data = self._flatten_nested_dict(nested_data) + + # Record each flat key + timestamp = datetime.now() + for flat_key, value in flat_data.items(): + config_value = ConfigValue( + key=flat_key, value=value, source_name=source_name + ) + self._discoveries[flat_key].append((config_value, timestamp)) + def record_discovery(self, key: str, config_value: ConfigValue) -> None: """ Record a value discovery from a source. @@ -168,6 +222,92 @@ def get_all_histories(self) -> Dict[str, ResolutionHistory]: """ return self._histories.copy() + def finalize_with_result(self, final_config: Dict[str, Any]) -> None: + """ + Mark which values were selected in the final configuration. + + This method flattens the final nested config and marks the selected + source for each value. + + Args: + final_config: The final resolved configuration (nested dict) + """ + if not self._enabled: + return + + # Flatten final config to identify which values were selected + flat_final = self._flatten_nested_dict(final_config) + + # For each flat key in final config, find which source provided it + for flat_key, final_value in flat_final.items(): + # Check if this key has discoveries + if flat_key not in self._discoveries: + continue + + # Find the entry with matching value (should be highest priority) + discoveries = self._discoveries[flat_key] + for config_value, timestamp in reversed( + discoveries + ): # Check from highest to lowest + if config_value.value == final_value: + self.mark_selected(flat_key, config_value.source_name) + break + + def record_general_params_merged_to_connections( + self, + general_params: Dict[str, Any], + connection_names: List[str], + source_name: str, + ) -> None: + """ + Record when general parameters are merged into connections. + + When overlay sources provide general params (like SNOWFLAKE_ACCOUNT), + these get merged into each existing connection. This method records + that merge operation for history tracking. + + Args: + general_params: Dictionary of general parameters + connection_names: List of connection names to merge into + source_name: Name of the source providing these params + """ + if not self._enabled: + return + + timestamp = datetime.now() + for param_key, param_value in general_params.items(): + # Record for each connection + for conn_name in connection_names: + flat_key = f"connections.{conn_name}.{param_key}" + config_value = ConfigValue( + key=flat_key, value=param_value, source_name=source_name + ) + self._discoveries[flat_key].append((config_value, timestamp)) + + def replicate_root_level_discoveries_to_connection( + self, param_keys: List[str], connection_name: str + ) -> None: + """ + Replicate discoveries from root-level keys to connection-specific keys. + + This is used when creating a default connection from general parameters + (e.g., SNOWFLAKE_ACCOUNT -> connections.default.account). + + Args: + param_keys: List of parameter keys that exist at root level + connection_name: Name of the connection to replicate discoveries to + """ + if not self._enabled: + return + + for param_key in param_keys: + # Check if we have discoveries for the root-level key + if param_key in self._discoveries: + conn_key = f"connections.{connection_name}.{param_key}" + # Copy all discoveries from root to connection location + for config_value, timestamp in self._discoveries[param_key]: + self._discoveries[conn_key].append((config_value, timestamp)) + def get_summary(self) -> dict: """ Get summary statistics about configuration resolution. @@ -417,11 +557,9 @@ def _replace_connections( ) file_connections[conn_name] = conn_params - def _resolve_file_sources( - self, key: Optional[str] - ) -> Tuple[Dict[str, Dict[str, ConfigValue]], Dict[str, ConfigValue]]: + def _resolve_file_sources(self, key: Optional[str] = None) -> Dict[str, Any]: """ - Process all FILE sources with connection-level replacement semantics. + Process FILE sources with connection-level replacement semantics. FILE sources replace entire connections rather than merging fields. Later FILE sources override earlier ones completely. @@ -430,34 +568,37 @@ def _resolve_file_sources( key: Specific key to resolve (None = all keys) Returns: - Tuple of (file_connections, file_flat_values): - - file_connections: Dict mapping connection name to its parameters - - file_flat_values: Dict of flat configuration keys + Nested dict with merged file source data """ - file_connections: Dict[str, Dict[str, ConfigValue]] = defaultdict(dict) - file_flat_values: Dict[str, ConfigValue] = {} + result: Dict[str, Any] = {} for source in self._get_sources_by_type(SourceType.FILE): try: - source_values = source.discover(key) - self._record_discoveries(source_values) - - # Process this source's values - per_conn, empty_conns = self._group_by_connection(source_values) - flat_values = self._extract_flat_values(source_values) + source_data = source.discover(key) # Already nested! - # Replace connections (entire connection replacement) - self._replace_connections( - file_connections, per_conn, empty_conns, source + # Record discoveries for history tracking + self._history_tracker.record_nested_discovery( + source_data, source.source_name ) - # Update flat values - file_flat_values.update(flat_values) + # For FILE sources: connection-level replacement + if "connections" in source_data: + if "connections" not in result: + result["connections"] = {} + + # Replace entire connections (not merge) + for conn_name, conn_data in source_data["connections"].items(): + result["connections"][conn_name] = conn_data + + # Merge other top-level keys + for k, v in source_data.items(): + if k != "connections": + result[k] = v except Exception as e: log.warning("Error from source %s: %s", source.source_name, e) - return file_connections, file_flat_values + return result def _merge_file_results( self, @@ -486,79 +627,190 @@ def _merge_file_results( return all_values def _apply_overlay_sources( - self, all_values: Dict[str, ConfigValue], key: Optional[str] - ) -> Dict[str, ConfigValue]: + self, base: Dict[str, Any], key: Optional[str] = None + ) -> Dict[str, Any]: """ - Apply OVERLAY sources with field-level overlay semantics. + Apply OVERLAY sources with field-level merging. OVERLAY sources (env vars, CLI args) add or override individual fields - without replacing entire connections. + without replacing entire connections. General params are merged into + each existing connection. Args: - all_values: Current configuration values (typically from file sources) + base: Base configuration (typically from file sources) key: Specific key to resolve (None = all keys) Returns: Updated dictionary with overlay values applied """ + from snowflake.cli.api.config_ng.dict_utils import deep_merge + from snowflake.cli.api.config_ng.merge_operations import ( + extract_root_level_connection_params, + merge_params_into_connections, + ) + + result = base.copy() + for source in self._get_sources_by_type(SourceType.OVERLAY): try: - source_values = source.discover(key) + source_data = source.discover(key) + + # Record discoveries for history tracking + self._history_tracker.record_nested_discovery( + source_data, source.source_name + ) + + # Separate general connection params from other data + general_params, other_data = extract_root_level_connection_params( + source_data + ) - # Record and apply overlays (field-level) - for k, config_value in source_values.items(): - self._history_tracker.record_discovery(k, config_value) - all_values[k] = config_value + # First, merge connection-specific data and internal params + result = deep_merge(result, other_data) + + # Then, merge general params into all existing connections + if general_params and "connections" in result and result["connections"]: + connection_names = [ + name + for name in result["connections"] + if isinstance(result["connections"][name], dict) + ] + + # Record history for general params being merged into connections + self._history_tracker.record_general_params_merged_to_connections( + general_params, connection_names, source.source_name + ) + + # Merge general params into existing connections + result["connections"] = merge_params_into_connections( + result["connections"], general_params + ) + elif general_params: + # No connections exist yet, keep general params at root + # for default connection creation later + result = deep_merge(result, general_params) except Exception as e: log.warning("Error from source %s: %s", source.source_name, e) - return all_values + # Final cleanup: merge any remaining root-level general params into all connections + # This handles params from early sources that were added before connections existed + if "connections" in result and result["connections"]: + remaining_general_params, _ = extract_root_level_connection_params(result) + + if remaining_general_params: + # Merge remaining params into connections (connection values take precedence) + for conn_name in result["connections"]: + if isinstance(result["connections"][conn_name], dict): + result["connections"][conn_name] = deep_merge( + remaining_general_params, result["connections"][conn_name] + ) + + # Remove general params from root since they're now in connections + for key in remaining_general_params: + if key in result: + result.pop(key) + + return result + + def _ensure_default_connection(self, config: Dict[str, Any]) -> Dict[str, Any]: + """ + Ensure a default connection exists when general connection params are present. + + Border conditions for creating default connection: + 1. No connections exist in config (empty or missing "connections" key) + 2. At least one general connection parameter exists at root level + 3. General params are NOT internal CLI parameters or variables + + This allows users to set SNOWFLAKE_ACCOUNT, SNOWFLAKE_USER etc. without + needing --temporary-connection flag or defining connections in config files. + + Args: + config: Resolved configuration dictionary + + Returns: + Configuration with default connection created if conditions are met + """ + from snowflake.cli.api.config_ng.constants import INTERNAL_CLI_PARAMETERS + + # Check if connections already exist + connections = config.get("connections", {}) + if connections: + return config # Connections exist, nothing to do + + # Identify general connection parameters (root-level, non-internal) + general_params = {} + for key, value in config.items(): + if ( + key not in ("connections", "variables") + and key not in INTERNAL_CLI_PARAMETERS + ): + general_params[key] = value + + # If no general params, nothing to create + if not general_params: + return config + + # Create default connection with general params + result = config.copy() + result["connections"] = {"default": general_params.copy()} + + # Record history for moved parameters + self._history_tracker.replicate_root_level_discoveries_to_connection( + list(general_params.keys()), "default" + ) + + # Remove general params from root level (they're now in default connection) + for key in general_params: + result.pop(key, None) + + return result def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, Any]: """ - Resolve configuration values from all sources with history tracking. + Resolve configuration to nested dict. - Resolution Process (Two-Phase): + Resolution Process (Four-Phase): Phase A - File Sources (Connection-Level Replacement): - 1. Process FILE sources in precedence order (lowest to highest priority) - 2. For each connection, later FILE sources completely REPLACE earlier ones - 3. Fields from earlier file sources are NOT inherited + - Process FILE sources in precedence order (lowest to highest priority) + - For each connection, later FILE sources completely REPLACE earlier ones + - Fields from earlier file sources are NOT inherited Phase B - Overlay Sources (Field-Level Overlay): - 4. Start with the file-derived connection snapshot - 5. Process OVERLAY sources (env vars, CLI args) in precedence order - 6. These add/override individual fields without replacing entire connections - 7. For flat keys: later sources overwrite earlier sources + - Start with the file-derived configuration + - Process OVERLAY sources (env vars, CLI args) in precedence order + - These add/override individual fields without replacing entire connections + - Uses deep merge for nested structures + + Phase C - Default Connection Creation: + - If no connections exist but general params present, create "default" connection + - Allows env-only configuration without --temporary-connection flag + + Phase D - Resolution History Finalization: + - Mark which values were selected in the final configuration + - Enables debugging and diagnostics Args: key: Specific key to resolve (None = all keys) default: Default value if key not found Returns: - Dictionary of resolved values (key -> value) + Nested dictionary of resolved configuration """ - # Phase A: Process FILE sources (connection-level replacement) - file_connections, file_flat_values = self._resolve_file_sources(key) - - # Start with file-derived snapshot - all_values = self._merge_file_results(file_connections, file_flat_values) + # Phase A: FILE sources (connection-level replacement) + result = self._resolve_file_sources(key) - # Phase B: Process OVERLAY sources (field-level overlay) - all_values = self._apply_overlay_sources(all_values, key) + # Phase B: OVERLAY sources (field-level overlay with deep merge) + result = self._apply_overlay_sources(result, key) - # Mark selected values in history - self._finalize_history(all_values) + # Phase C: Ensure default connection exists if general params present + result = self._ensure_default_connection(result) - # Convert ConfigValue objects to plain values - resolved = {k: v.value for k, v in all_values.items()} + # Phase D: Finalize resolution history + self._finalize_resolution_history(result) - # Handle default for specific key - if key is not None and key not in resolved: - resolved = self._apply_default(resolved, key, default) - - return resolved + return result def resolve_value(self, key: str, default: Any = None) -> Any: """ @@ -605,17 +857,46 @@ def get_tracker(self) -> ResolutionHistoryTracker: """ return self._history_tracker + def _finalize_resolution_history(self, final_config: Dict[str, Any]) -> None: + """ + Mark which values were selected in final configuration. + + Delegates to the history tracker which handles all history-related logic. + + Args: + final_config: The final resolved configuration (nested dict) + """ + self._history_tracker.finalize_with_result(final_config) + def get_resolution_history(self, key: str) -> Optional[ResolutionHistory]: """ Get complete resolution history for a key. + Supports both formats: + - Flat: "connections.test.account" + - Root-level: "account" (checks connections for this key) + Args: - key: Configuration key + key: Configuration key (flat or simple) Returns: ResolutionHistory showing the full precedence chain """ - return self._history_tracker.get_history(key) + # First, try exact match + history = self._history_tracker.get_history(key) + if history: + return history + + # If not found and it's a simple key (no dots), search in connections + if "." not in key: + # Look for any connection that has this key + all_histories = self._history_tracker.get_all_histories() + for hist_key, hist in all_histories.items(): + # Match pattern: "connections.*.{key}" or root level "{key}" + if hist_key.endswith(f".{key}"): + return hist + + return None def get_all_histories(self) -> Dict[str, ResolutionHistory]: """Get resolution histories for all keys.""" diff --git a/src/snowflake/cli/api/config_ng/source_factory.py b/src/snowflake/cli/api/config_ng/source_factory.py new file mode 100644 index 0000000000..a76a36b353 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/source_factory.py @@ -0,0 +1,62 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Factory for creating configuration sources.""" + +from typing import Any, Dict, List, Optional + +from snowflake.cli.api.config_ng.core import ValueSource + + +def create_default_sources( + cli_context: Optional[Dict[str, Any]] = None, +) -> List[ValueSource]: + """ + Create default source list in precedence order. + + Creates the standard 7-source configuration stack from lowest + to highest priority: + 1. SnowSQL config files (merged) + 2. CLI config.toml (first-found) + 3. Dedicated connections.toml + 4. SnowSQL environment variables (SNOWSQL_*) + 5. Connection-specific environment variables (SNOWFLAKE_CONNECTIONS_*) + 6. General CLI environment variables (SNOWFLAKE_*) + 7. CLI command-line arguments (highest priority) + + Args: + cli_context: Optional CLI context dictionary for CliParameters source + + Returns: + List of ValueSource instances in precedence order + """ + from snowflake.cli.api.config_ng import ( + CliConfigFile, + CliEnvironment, + CliParameters, + ConnectionsConfigFile, + ConnectionSpecificEnvironment, + SnowSQLConfigFile, + SnowSQLEnvironment, + ) + + return [ + SnowSQLConfigFile(), + CliConfigFile(), + ConnectionsConfigFile(), + SnowSQLEnvironment(), + ConnectionSpecificEnvironment(), + CliEnvironment(), + CliParameters(cli_context=cli_context or {}), + ] diff --git a/src/snowflake/cli/api/config_ng/source_manager.py b/src/snowflake/cli/api/config_ng/source_manager.py new file mode 100644 index 0000000000..4a13c9918a --- /dev/null +++ b/src/snowflake/cli/api/config_ng/source_manager.py @@ -0,0 +1,87 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Manager for configuration sources.""" + +from typing import Any, Dict, List, Optional + +from snowflake.cli.api.config_ng.constants import FILE_SOURCE_NAMES +from snowflake.cli.api.config_ng.core import ValueSource + + +class SourceManager: + """ + Manages configuration sources and derives their priorities. + + Provides a clean interface for working with configuration sources + without exposing implementation details. + """ + + def __init__(self, sources: List[ValueSource]): + """ + Initialize with a list of sources. + + Args: + sources: List of sources in precedence order (lowest to highest) + """ + self._sources = sources + + @classmethod + def with_default_sources( + cls, cli_context: Optional[Dict[str, Any]] = None + ) -> "SourceManager": + """ + Class method constructor with default sources. + + Args: + cli_context: Optional CLI context for CliParameters source + + Returns: + SourceManager configured with default 7-source stack + """ + from snowflake.cli.api.config_ng.source_factory import create_default_sources + + sources = create_default_sources(cli_context) + return cls(sources) + + def get_source_priorities(self) -> Dict[str, int]: + """ + Derive priorities from source list order. + + Priority numbers are 1-indexed (1 = lowest, higher = higher priority). + This is dynamically derived from the source list order to eliminate + duplication and ensure consistency. + + Returns: + Dictionary mapping source names to priority levels + """ + return {s.source_name: idx + 1 for idx, s in enumerate(self._sources)} + + def get_file_sources(self) -> List[ValueSource]: + """ + Get only file-based sources. + + Returns: + List of sources that are file-based + """ + return [s for s in self._sources if s.source_name in FILE_SOURCE_NAMES] + + def get_sources(self) -> List[ValueSource]: + """ + Get all sources. + + Returns: + Copy of the sources list + """ + return self._sources.copy() diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 8319e086b6..2eae2dab76 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -35,7 +35,8 @@ from pathlib import Path from typing import Any, Dict, Final, List, Optional -from snowflake.cli.api.config_ng.core import ConfigValue, SourceType, ValueSource +from snowflake.cli.api.config_ng.constants import SNOWFLAKE_HOME_ENV +from snowflake.cli.api.config_ng.core import SourceType, ValueSource log = logging.getLogger(__name__) @@ -52,83 +53,67 @@ class SnowSQLSection(Enum): OPTIONS = "options" -# Try to import tomllib (Python 3.11+) or fall back to tomli -try: - import tomllib -except ImportError: - import tomli as tomllib # type: ignore - - class SnowSQLConfigFile(ValueSource): """ - SnowSQL configuration file source. + SnowSQL configuration file source with two-phase design. + + Phase 1: Acquire content (read and merge multiple config files) + Phase 2: Parse content (using SnowSQLParser) Reads multiple config files in order and MERGES them (SnowSQL behavior). Later files override earlier files for the same keys. Returns configuration for ALL connections. - Config files searched (in order, when not in test mode): - 1. Bundled default config (if in package) - 2. /etc/snowsql.cnf (system-wide) - 3. /etc/snowflake/snowsql.cnf (alternative system) - 4. /usr/local/etc/snowsql.cnf (local system) - 5. ~/.snowsql.cnf (legacy user config) - 6. ~/.snowsql/config (current user config) - - In test mode (when config_file_override is set), SnowSQL config files are skipped - to ensure test isolation. + Config files searched (in order): + 1. /etc/snowsql.cnf (system-wide) + 2. /etc/snowflake/snowsql.cnf (alternative system) + 3. /usr/local/etc/snowsql.cnf (local system) + 4. ~/.snowsql.cnf (legacy user config) + 5. ~/.snowsql/config (current user config) """ - # SnowSQL uses different key names - map them to CLI standard names - KEY_MAPPING = { - "accountname": "account", - "username": "user", - "rolename": "role", - "warehousename": "warehouse", - "schemaname": "schema", - "dbname": "database", - "pwd": "password", - # Keys that don't need mapping (already correct) - "password": "password", - "database": "database", - "schema": "schema", - "role": "role", - "warehouse": "warehouse", - "host": "host", - "port": "port", - "protocol": "protocol", - "authenticator": "authenticator", - "private_key_path": "private_key_path", - "private_key_passphrase": "private_key_passphrase", - } + def __init__( + self, content: Optional[str] = None, config_paths: Optional[List[Path]] = None + ): + """ + Initialize SnowSQL config file source. - def __init__(self): - """Initialize SnowSQL config file source.""" - # Use SNOWFLAKE_HOME if set and directory exists, otherwise use standard paths - snowflake_home = os.environ.get("SNOWFLAKE_HOME") + Args: + content: Optional string content for testing (bypasses file I/O) + config_paths: Optional custom config file paths + """ + self._content = content + self._config_paths = config_paths or self._get_default_paths() + + @staticmethod + def _get_default_paths() -> List[Path]: + """Get standard SnowSQL config file paths.""" + snowflake_home = os.environ.get(SNOWFLAKE_HOME_ENV) if snowflake_home: snowflake_home_path = Path(snowflake_home).expanduser() if snowflake_home_path.exists(): - # Use only the SnowSQL config file within SNOWFLAKE_HOME - self._config_files = [snowflake_home_path / "config"] - else: - # SNOWFLAKE_HOME set but doesn't exist, use standard paths - self._config_files = [ - Path("/etc/snowsql.cnf"), - Path("/etc/snowflake/snowsql.cnf"), - Path("/usr/local/etc/snowsql.cnf"), - Path.home() / ".snowsql.cnf", - Path.home() / ".snowsql" / "config", - ] - else: - # Standard paths when SNOWFLAKE_HOME not set - self._config_files = [ - Path("/etc/snowsql.cnf"), - Path("/etc/snowflake/snowsql.cnf"), - Path("/usr/local/etc/snowsql.cnf"), - Path.home() / ".snowsql.cnf", - Path.home() / ".snowsql" / "config", - ] + return [snowflake_home_path / "config"] + + return [ + Path("/etc/snowsql.cnf"), + Path("/etc/snowflake/snowsql.cnf"), + Path("/usr/local/etc/snowsql.cnf"), + Path.home() / ".snowsql.cnf", + Path.home() / ".snowsql" / "config", + ] + + @classmethod + def from_string(cls, content: str) -> "SnowSQLConfigFile": + """ + Create source from string content (for testing). + + Args: + content: INI format configuration as string + + Returns: + SnowSQLConfigFile instance using string content + """ + return cls(content=content) @property def source_name(self) -> "ValueSource.SourceName": @@ -138,69 +123,49 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.FILE - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ - Read and MERGE all SnowSQL config files. - Later files override earlier files (SnowSQL merging behavior). - Returns keys in format: connections.{name}.{param} for ALL connections. + Two-phase discovery: acquire content → parse. + + Phase 1: Get content (from string or by reading and merging files) + Phase 2: Parse content using SnowSQLParser + + Returns: + Nested dict structure: {"connections": {...}, "variables": {...}} """ - merged_values: Dict[str, ConfigValue] = {} + from snowflake.cli.api.config_ng.parsers import SnowSQLParser - for config_file in self._config_files: - if not config_file.exists(): - continue + # Phase 1: Content acquisition + if self._content is not None: + content = self._content + else: + content = self._read_and_merge_files() - try: - config = configparser.ConfigParser() - config.read(config_file) - - # Process all connection sections - for section in config.sections(): - if section.startswith(SnowSQLSection.CONNECTIONS.value): - # Extract connection name - if section == SnowSQLSection.CONNECTIONS.value: - # This is default connection - connection_name = "default" - else: - # Format: connections.qa6 -> qa6 - connection_name = ( - section.split(".", 1)[1] - if "." in section - else "default" - ) - - section_data = dict(config[section]) - - # Add all params for this connection - for param_key, param_value in section_data.items(): - # Map SnowSQL key names to CLI standard names - normalized_key = self.KEY_MAPPING.get(param_key, param_key) - full_key = f"connections.{connection_name}.{normalized_key}" - if key is None or full_key == key: - merged_values[full_key] = ConfigValue( - key=full_key, - value=param_value, - source_name=self.source_name, - raw_value=f"{param_key}={param_value}", # Show original key in raw_value - ) - - elif section == SnowSQLSection.VARIABLES.value: - # Process variables section (global, not connection-specific) - section_data = dict(config[section]) - for var_key, var_value in section_data.items(): - full_key = f"variables.{var_key}" - if key is None or full_key == key: - merged_values[full_key] = ConfigValue( - key=full_key, - value=var_value, - source_name=self.source_name, - raw_value=f"{var_key}={var_value}", - ) + # Phase 2: Parse content + return SnowSQLParser.parse(content) - except Exception as e: - log.debug("Failed to read SnowSQL config %s: %s", config_file, e) + def _read_and_merge_files(self) -> str: + """ + Read all config files and merge into single INI string. + + Returns: + Merged INI content as string + """ + merged_config = configparser.ConfigParser() + + for config_file in self._config_paths: + if config_file.exists(): + try: + merged_config.read(config_file) + except Exception as e: + log.debug("Failed to read SnowSQL config %s: %s", config_file, e) - return merged_values + # Convert merged config to string + from io import StringIO + + output = StringIO() + merged_config.write(output) + return output.getvalue() def supports_key(self, key: str) -> bool: return key in self.discover() @@ -208,7 +173,10 @@ def supports_key(self, key: str) -> bool: class CliConfigFile(ValueSource): """ - CLI config.toml file source. + CLI config.toml file source with two-phase design. + + Phase 1: Acquire content (find and read first config file) + Phase 2: Parse content (using TOMLParser) Scans for config.toml files in order and uses FIRST file found (CLI behavior). Does NOT merge multiple files - first found wins. @@ -217,12 +185,24 @@ class CliConfigFile(ValueSource): Search order (when no override is set): 1. ./config.toml (current directory) 2. ~/.snowflake/config.toml (user config) - - When config_file_override is set (e.g., in tests), only that file is used. """ - def __init__(self): - """Initialize CLI config file source.""" + def __init__( + self, content: Optional[str] = None, search_paths: Optional[List[Path]] = None + ): + """ + Initialize CLI config file source. + + Args: + content: Optional string content for testing (bypasses file I/O) + search_paths: Optional custom search paths + """ + self._content = content + self._search_paths = search_paths or self._get_default_paths() + + @staticmethod + def _get_default_paths() -> List[Path]: + """Get standard CLI config search paths.""" # Check for config file override from CLI context first try: from snowflake.cli.api.cli_global_context import get_cli_context @@ -230,30 +210,35 @@ def __init__(self): cli_context = get_cli_context() config_override = cli_context.config_file_override if config_override: - self._search_paths = [Path(config_override)] - return + return [Path(config_override)] except Exception: - pass + log.debug("CLI context not available, using standard config paths") - # Use SNOWFLAKE_HOME if set and directory exists, otherwise use standard paths - snowflake_home = os.environ.get("SNOWFLAKE_HOME") + # Use SNOWFLAKE_HOME if set and directory exists + snowflake_home = os.environ.get(SNOWFLAKE_HOME_ENV) if snowflake_home: snowflake_home_path = Path(snowflake_home).expanduser() if snowflake_home_path.exists(): - # Use only config.toml within SNOWFLAKE_HOME - self._search_paths = [snowflake_home_path / "config.toml"] - else: - # SNOWFLAKE_HOME set but doesn't exist, use standard paths - self._search_paths = [ - Path.cwd() / "config.toml", - Path.home() / ".snowflake" / "config.toml", - ] - else: - # Standard paths when SNOWFLAKE_HOME not set - self._search_paths = [ - Path.cwd() / "config.toml", - Path.home() / ".snowflake" / "config.toml", - ] + return [snowflake_home_path / "config.toml"] + + # Standard paths + return [ + Path.cwd() / "config.toml", + Path.home() / ".snowflake" / "config.toml", + ] + + @classmethod + def from_string(cls, content: str) -> "CliConfigFile": + """ + Create source from TOML string (for testing). + + Args: + content: TOML format configuration as string + + Returns: + CliConfigFile instance using string content + """ + return cls(content=content) @property def source_name(self) -> "ValueSource.SourceName": @@ -263,85 +248,106 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.FILE - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ - Find FIRST existing config file and use it (CLI behavior). - Does NOT merge multiple files. - Returns keys in format: connections.{name}.{param} for ALL connections. - """ - for config_file in self._search_paths: - if config_file.exists(): - return self._parse_toml_file(config_file, key) + Two-phase discovery: acquire content → parse. - return {} + Phase 1: Get content (from string or by reading first existing file) + Phase 2: Parse content using TOMLParser - def _parse_toml_file( - self, file_path: Path, key: Optional[str] = None - ) -> Dict[str, ConfigValue]: - """Parse TOML file and extract ALL connection configurations.""" - try: - with open(file_path, "rb") as f: - data = tomllib.load(f) - - result = {} - - # Get all connections - connections = data.get("connections", {}) - for conn_name, conn_data in connections.items(): - if isinstance(conn_data, dict): - # Process parameters if they exist - for param_key, param_value in conn_data.items(): - full_key = f"connections.{conn_name}.{param_key}" - if key is None or full_key == key: - result[full_key] = ConfigValue( - key=full_key, - value=param_value, - source_name=self.source_name, - raw_value=param_value, - ) - - # For empty connections, we need to ensure they are recognized - # even if they have no parameters. We add a special marker. - if not conn_data: # Empty connection section - marker_key = f"connections.{conn_name}._empty_connection" - if key is None or marker_key == key: - result[marker_key] = ConfigValue( - key=marker_key, - value=True, - source_name=self.source_name, - raw_value=True, - ) - - return result + Returns: + Nested dict structure with all TOML sections preserved + """ + from snowflake.cli.api.config_ng.parsers import TOMLParser - except Exception as e: - log.debug("Failed to parse CLI config %s: %s", file_path, e) + # Phase 1: Content acquisition + if self._content is not None: + content = self._content + else: + content = self._read_first_file() + + if not content: return {} + # Phase 2: Parse content + return TOMLParser.parse(content) + + def _read_first_file(self) -> str: + """ + Read first existing config file. + + Returns: + File content as string, or empty string if no file found + """ + for config_file in self._search_paths: + if config_file.exists(): + try: + return config_file.read_text() + except Exception as e: + log.debug("Failed to read CLI config %s: %s", config_file, e) + + return "" + def supports_key(self, key: str) -> bool: return key in self.discover() class ConnectionsConfigFile(ValueSource): """ - Dedicated connections.toml file source. + Dedicated connections.toml file source with three-phase design. + + Phase 1: Acquire content (read file) + Phase 2: Parse content (using TOMLParser) + Phase 3: Normalize legacy format (connections.toml specific) Reads ~/.snowflake/connections.toml specifically. Returns configuration for ALL connections. + + Supports both legacy formats: + 1. Direct connection sections (legacy): + [default] + database = "value" + + 2. Nested under [connections] section: + [connections.default] + database = "value" + + Both are normalized to nested format: {"connections": {"default": {...}}} """ - def __init__(self): - """Initialize connections.toml source.""" - # Use SNOWFLAKE_HOME if set and directory exists, otherwise use standard path - snowflake_home = os.environ.get("SNOWFLAKE_HOME") + def __init__(self, content: Optional[str] = None, file_path: Optional[Path] = None): + """ + Initialize connections.toml source. + + Args: + content: Optional string content for testing (bypasses file I/O) + file_path: Optional custom file path + """ + self._content = content + self._file_path = file_path or self._get_default_path() + + @staticmethod + def _get_default_path() -> Path: + """Get standard connections.toml path.""" + snowflake_home = os.environ.get(SNOWFLAKE_HOME_ENV) if snowflake_home: snowflake_home_path = Path(snowflake_home).expanduser() if snowflake_home_path.exists(): - self._file_path = snowflake_home_path / "connections.toml" - else: - self._file_path = Path.home() / ".snowflake" / "connections.toml" - else: - self._file_path = Path.home() / ".snowflake" / "connections.toml" + return snowflake_home_path / "connections.toml" + return Path.home() / ".snowflake" / "connections.toml" + + @classmethod + def from_string(cls, content: str) -> "ConnectionsConfigFile": + """ + Create source from TOML string (for testing). + + Args: + content: TOML format configuration as string + + Returns: + ConnectionsConfigFile instance using string content + """ + return cls(content=content) @property def source_name(self) -> "ValueSource.SourceName": @@ -361,112 +367,87 @@ def get_defined_connections(self) -> set[str]: Return set of connection names that are defined in connections.toml. This is used by the resolver to implement replacement behavior. """ - if not self._file_path.exists(): - return set() - try: - with open(self._file_path, "rb") as f: - data = tomllib.load(f) - - connection_names = set() - - # Check for direct connection sections (legacy format) - for section_name, section_data in data.items(): - if isinstance(section_data, dict) and section_name != "connections": - connection_names.add(section_name) - - # Check for nested [connections] section format + data = self.discover() connections_section = data.get("connections", {}) if isinstance(connections_section, dict): - for conn_name in connections_section.keys(): - connection_names.add(conn_name) - - return connection_names - + return set(connections_section.keys()) + return set() except Exception as e: - log.debug("Failed to read connections.toml: %s", e) + log.debug("Failed to get defined connections: %s", e) return set() - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ - Read connections.toml if it exists. - Returns keys in format: connections.{name}.{param} for ALL connections. + Three-phase discovery: acquire content → parse → normalize. - Supports both legacy formats: - 1. Direct connection sections (legacy): - [default] - database = "value" + Phase 1: Get content (from string or file) + Phase 2: Parse TOML (generic parser) + Phase 3: Normalize legacy format (connections.toml specific) - 2. Nested under [connections] section: - [connections.default] - database = "value" + Returns: + Nested dict structure: {"connections": {"conn_name": {...}}} """ - if not self._file_path.exists(): - return {} + from snowflake.cli.api.config_ng.parsers import TOMLParser - try: - with open(self._file_path, "rb") as f: - data = tomllib.load(f) - - result = {} - - # Check for direct connection sections (legacy format) - for section_name, section_data in data.items(): - if isinstance(section_data, dict) and section_name != "connections": - # This is a direct connection section like [default] - if not section_data: - # Empty connection section - marker_key = f"connections.{section_name}._empty_connection" - if key is None or marker_key == key: - result[marker_key] = ConfigValue( - key=marker_key, - value=True, - source_name=self.source_name, - raw_value=True, - ) - else: - for param_key, param_value in section_data.items(): - full_key = f"connections.{section_name}.{param_key}" - if key is None or full_key == key: - result[full_key] = ConfigValue( - key=full_key, - value=param_value, - source_name=self.source_name, - raw_value=param_value, - ) - - # Check for nested [connections] section format - connections_section = data.get("connections", {}) - if isinstance(connections_section, dict): - for conn_name, conn_data in connections_section.items(): - if isinstance(conn_data, dict): - if not conn_data: - # Empty connection section - marker_key = f"connections.{conn_name}._empty_connection" - if key is None or marker_key == key: - result[marker_key] = ConfigValue( - key=marker_key, - value=True, - source_name=self.source_name, - raw_value=True, - ) - else: - for param_key, param_value in conn_data.items(): - full_key = f"connections.{conn_name}.{param_key}" - if key is None or full_key == key: - result[full_key] = ConfigValue( - key=full_key, - value=param_value, - source_name=self.source_name, - raw_value=param_value, - ) - - return result + # Phase 1: Content acquisition + if self._content is not None: + content = self._content + else: + if not self._file_path.exists(): + return {} + try: + content = self._file_path.read_text() + except Exception as e: + log.debug("Failed to read connections.toml: %s", e) + return {} + # Phase 2: Parse TOML (generic parser) + try: + data = TOMLParser.parse(content) except Exception as e: - log.debug("Failed to read connections.toml: %s", e) + log.debug("Failed to parse connections.toml: %s", e) return {} + # Phase 3: Normalize legacy format (connections.toml specific) + return self._normalize_connections_format(data) + + @staticmethod + def _normalize_connections_format(data: Dict[str, Any]) -> Dict[str, Any]: + """ + Normalize connections.toml format to standard structure. + + Supports: + - Legacy: [connection_name] → {"connections": {"connection_name": {...}}} + - New: [connections.connection_name] → {"connections": {"connection_name": {...}}} + + Args: + data: Parsed TOML data + + Returns: + Normalized structure with connections under "connections" key + """ + result: Dict[str, Any] = {} + + # Handle direct connection sections (legacy format) + # Any top-level section that's not "connections" is treated as a connection + for section_name, section_data in data.items(): + if isinstance(section_data, dict) and section_name != "connections": + if "connections" not in result: + result["connections"] = {} + result["connections"][section_name] = section_data + + # Handle nested [connections] section (new format) + connections_section = data.get("connections", {}) + if isinstance(connections_section, dict) and connections_section: + if "connections" not in result: + result["connections"] = {} + # Merge with any legacy connections found above + # (nested format takes precedence if there's overlap) + result["connections"].update(connections_section) + + return result + def supports_key(self, key: str) -> bool: return key in self.discover() @@ -516,30 +497,22 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.OVERLAY - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ Discover SNOWSQL_* environment variables. - No connection-specific variables supported. + Returns flat values at root level (no connection prefix). """ - values: Dict[str, ConfigValue] = {} + result: Dict[str, Any] = {} for env_var, config_key in self.ENV_VAR_MAPPING.items(): - if key is not None and config_key != key: - continue - env_value = os.getenv(env_var) if env_value is not None: # Only set if not already set by a previous env var # (e.g., SNOWSQL_ACCOUNT takes precedence over SNOWSQL_ACCOUNTNAME) - if config_key not in values: - values[config_key] = ConfigValue( - key=config_key, - value=env_value, - source_name=self.source_name, - raw_value=env_value, - ) + if config_key not in result: + result[config_key] = env_value - return values + return result def supports_key(self, key: str) -> bool: # Check if any env var for this key is set @@ -605,14 +578,15 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.OVERLAY - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ Discover SNOWFLAKE_CONNECTIONS_* environment variables. - Returns connection-specific (prefixed) keys only. + Returns nested dict structure. - Pattern: SNOWFLAKE_CONNECTIONS__=value -> connections.{name}.{key}=value + Pattern: SNOWFLAKE_CONNECTIONS__=value + -> {"connections": {"{name}": {"{key}": value}}} """ - values: Dict[str, ConfigValue] = {} + result: Dict[str, Any] = {} # Scan all environment variables for env_name, env_value in os.environ.items(): @@ -639,16 +613,15 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: conn_name_upper, config_key = match conn_name = conn_name_upper.lower() - full_key = f"connections.{conn_name}.{config_key}" - if key is None or full_key == key: - values[full_key] = ConfigValue( - key=full_key, - value=env_value, - source_name=self.source_name, - raw_value=f"{env_name}={env_value}", - ) + # Build nested structure + if "connections" not in result: + result["connections"] = {} + if conn_name not in result["connections"]: + result["connections"][conn_name] = {} + + result["connections"][conn_name][config_key] = env_value - return values + return result def supports_key(self, key: str) -> bool: # Check if key matches pattern connections.{name}.{param} @@ -684,14 +657,14 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.OVERLAY - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ Discover general SNOWFLAKE_* environment variables. - Returns general (flat) keys only. + Returns flat values at root level. - Pattern: SNOWFLAKE_=value -> {key}=value + Pattern: SNOWFLAKE_=value -> {key: value} """ - values: Dict[str, ConfigValue] = {} + result: Dict[str, Any] = {} # Scan all environment variables for env_name, env_value in os.environ.items(): @@ -707,15 +680,9 @@ def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: config_key = config_key_upper.lower() if config_key in _ENV_CONFIG_KEYS: - if key is None or config_key == key: - values[config_key] = ConfigValue( - key=config_key, - value=env_value, - source_name=self.source_name, - raw_value=f"{env_name}={env_value}", - ) + result[config_key] = env_value - return values + return result def supports_key(self, key: str) -> bool: # Only support flat keys (not prefixed with connections.) @@ -757,27 +724,22 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.OVERLAY - def discover(self, key: Optional[str] = None) -> Dict[str, ConfigValue]: + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ Extract non-None values from CLI context. CLI arguments are already parsed by the framework. + Returns flat values at root level. """ - values: Dict[str, ConfigValue] = {} + result: Dict[str, Any] = {} for k, v in self._cli_context.items(): # Skip None values (not provided on CLI) if v is None: continue - if key is None or k == key: - values[k] = ConfigValue( - key=k, - value=v, - source_name=self.source_name, - raw_value=v, - ) + result[k] = v - return values + return result def supports_key(self, key: str) -> bool: """Check if key is present in CLI context with non-None value.""" diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 5f7589e430..19088fc7d7 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -21,8 +21,8 @@ from typing import TYPE_CHECKING, Any, Dict, Final, Optional if TYPE_CHECKING: - from snowflake.cli.api.config_ng.core import ValueSource from snowflake.cli.api.config_ng.resolver import ConfigurationResolver + from snowflake.cli.api.config_ng.source_manager import SourceManager ALTERNATIVE_CONFIG_ENV_VAR: Final[str] = "SNOWFLAKE_CLI_CONFIG_V2_ENABLED" @@ -222,19 +222,40 @@ class AlternativeConfigProvider(ConfigProvider): Maintains backward compatibility with LegacyConfigProvider output format. """ - def __init__(self) -> None: - self._resolver: Optional[ConfigurationResolver] = None + def __init__( + self, + source_manager: Optional["SourceManager"] = None, + cli_context_getter: Optional[Any] = None, + ) -> None: + """ + Initialize provider with optional dependencies for testing. + + Args: + source_manager: Optional source manager (for testing) + cli_context_getter: Optional CLI context getter function (for testing) + """ + self._source_manager = source_manager + self._cli_context_getter = ( + cli_context_getter or self._default_cli_context_getter + ) + self._resolver: Optional["ConfigurationResolver"] = None self._config_cache: Dict[str, Any] = {} self._initialized: bool = False self._last_config_override: Optional[Path] = None + @staticmethod + def _default_cli_context_getter(): + """Default implementation that accesses global CLI context.""" + from snowflake.cli.api.cli_global_context import get_cli_context + + return get_cli_context() + def _ensure_initialized(self) -> None: """Lazily initialize the resolver on first use.""" # Check if config_file_override has changed try: - from snowflake.cli.api.cli_global_context import get_cli_context - - current_override = get_cli_context().config_file_override + cli_context = self._cli_context_getter() + current_override = cli_context.config_file_override # If override changed, force re-initialization if current_override != self._last_config_override: @@ -247,120 +268,78 @@ def _ensure_initialized(self) -> None: if self._initialized: return - from snowflake.cli.api.cli_global_context import get_cli_context - from snowflake.cli.api.config_ng import ( - CliConfigFile, - CliEnvironment, - CliParameters, - ConfigurationResolver, - ConnectionsConfigFile, - ConnectionSpecificEnvironment, - SnowSQLConfigFile, - SnowSQLEnvironment, - ) + from snowflake.cli.api.config_ng import ConfigurationResolver + from snowflake.cli.api.config_ng.source_factory import create_default_sources + from snowflake.cli.api.config_ng.source_manager import SourceManager - # Get CLI context safely + # Get CLI context try: - cli_context = get_cli_context().connection_context - cli_context_dict = cli_context.present_values_as_dict() + cli_context = self._cli_context_getter() + cli_context_dict = cli_context.connection_context.present_values_as_dict() except Exception: cli_context_dict = {} - # Create sources in precedence order (lowest to highest priority) - # File sources return keys: connections.{name}.{param} - # Env/CLI sources return flat keys: account, user, etc. - - sources = [ - # 1. SnowSQL config files (lowest priority, merged) - SnowSQLConfigFile(), - # 2. CLI config.toml (first-found behavior) - CliConfigFile(), - # 3. Dedicated connections.toml - ConnectionsConfigFile(), - # 4. SnowSQL environment variables (SNOWSQL_*) - SnowSQLEnvironment(), - # 5. Connection-specific environment variables (SNOWFLAKE_CONNECTIONS_*) - ConnectionSpecificEnvironment(), - # 6. General CLI environment variables (SNOWFLAKE_*) - CliEnvironment(), - # 7. CLI command-line arguments (highest priority) - CliParameters(cli_context=cli_context_dict), - ] - - # Create resolver with all sources in order - self._resolver = ConfigurationResolver(sources=sources) + # Create or use provided source manager + if self._source_manager is None: + sources = create_default_sources(cli_context_dict) + self._source_manager = SourceManager(sources) + + # Create resolver + self._resolver = ConfigurationResolver( + sources=self._source_manager.get_sources() + ) + + # Initialize cache (resolver returns nested dict) + if not self._config_cache: + self._config_cache = self._resolver.resolve() self._initialized = True def read_config(self) -> None: """ Load configuration from all sources. - For config_ng, this means (re)initializing the resolver. + Resolver returns nested dict structure. """ self._initialized = False self._config_cache.clear() - self._last_config_override = None # Reset cached override to force re-check + self._last_config_override = None self._ensure_initialized() - # Resolve all configuration to populate cache + # Resolver returns nested dict assert self._resolver is not None self._config_cache = self._resolver.resolve() def get_section(self, *path) -> dict: """ - Get configuration section at specified path. + Navigate nested dict to get configuration section. Args: - *path: Section path (e.g., "connections", "my_conn") + *path: Section path components (e.g., "connections", "prod") Returns: Dictionary of section contents + + Example: + Cache: {"connections": {"prod": {"account": "val"}}} + get_section("connections", "prod") -> {"account": "val"} """ self._ensure_initialized() - if not self._config_cache: - assert self._resolver is not None - self._config_cache = self._resolver.resolve() - - # Navigate through path to find section if not path: return self._config_cache - # For connections section, return all connections as nested dicts - if len(path) == 1 and path[0] == "connections": - return self._get_all_connections_dict() - - # For specific connection, return connection dict - if len(path) == 2 and path[0] == "connections": - connection_name = path[1] - return self._get_connection_dict_internal(connection_name) - - # For variables section, return all variables as flat dict - if len(path) == 1 and path[0] == "variables": - result = {} - for key, value in self._config_cache.items(): - if key.startswith("variables."): - var_name = key[len("variables.") :] - result[var_name] = value - return result - - # For other sections, try to resolve with path prefix - section_prefix = ".".join(path) - result = {} - for key, value in self._config_cache.items(): - if key.startswith(section_prefix + "."): - # Strip prefix to get relative key - relative_key = key[len(section_prefix) + 1 :] - result[relative_key] = value - elif key == section_prefix: - # Exact match for section itself - return value if isinstance(value, dict) else {section_prefix: value} + # Navigate nested structure + result = self._config_cache + for part in path: + if not isinstance(result, dict) or part not in result: + return {} + result = result[part] - return result + return result if isinstance(result, dict) else {} def get_value(self, *path, key: str, default: Optional[Any] = None) -> Any: """ - Get single configuration value at path + key. + Get single configuration value by navigating nested dict. Args: *path: Path to section @@ -372,19 +351,9 @@ def get_value(self, *path, key: str, default: Optional[Any] = None) -> Any: """ self._ensure_initialized() - if not self._config_cache: - assert self._resolver is not None - self._config_cache = self._resolver.resolve() - - # Build full key from path and key - if path: - full_key = ".".join(path) + "." + key - else: - full_key = key - - # Try to resolve the value - value = self._config_cache.get(full_key, default) - return value + # Navigate to section, then get key + section = self.get_section(*path) + return section.get(key, default) def set_value(self, path: list[str], value: Any) -> None: """ @@ -416,130 +385,57 @@ def unset_value(self, path: list[str]) -> None: def section_exists(self, *path) -> bool: """ - Check if configuration section exists. + Check if configuration section exists by navigating nested dict. Args: *path: Section path Returns: - True if section exists and has values + True if section exists """ self._ensure_initialized() - if not self._config_cache: - assert self._resolver is not None - self._config_cache = self._resolver.resolve() - if not path: return True - section_prefix = ".".join(path) - # Check if any key starts with this prefix - return any( - key == section_prefix or key.startswith(section_prefix + ".") - for key in self._config_cache.keys() - ) + # Navigate nested structure + result = self._config_cache + for part in path: + if not isinstance(result, dict) or part not in result: + return False + result = result[part] - # Source priority levels (higher number = higher priority) - _SOURCE_PRIORITIES: Final[dict["ValueSource.SourceName", int]] = { - "snowsql_config": 1, - "cli_config_toml": 2, - "connections_toml": 3, - "snowsql_env": 4, - "connection_specific_env": 5, - "cli_env": 6, - "cli_arguments": 7, - } + return True def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: """ - Get connection configuration by name. + Get connection configuration by navigating nested dict. - Merges configuration from all sources (files and environment variables) - based on the 7-level precedence order. For each parameter, the value - from the highest-priority source wins. + Note: The resolver already merged general params into each connection + during the OVERLAY phase, so we just return the connection dict directly. Args: connection_name: Name of the connection Returns: - Dictionary of connection parameters with all sources merged + Dictionary of connection parameters """ - self._ensure_initialized() + from snowflake.cli.api.exceptions import MissingConfigurationError - if not self._config_cache: - assert self._resolver is not None - self._config_cache = self._resolver.resolve() - - connection_dict: Dict[str, Any] = {} - connection_prefix = f"connections.{connection_name}." - - # Collect all parameter names from both prefixed and flat keys - param_names = set() - - # Get param names from prefixed keys (file sources, connection-specific env) - for key in self._config_cache.keys(): - if key.startswith(connection_prefix): - param_name = key[len(connection_prefix) :] - param_names.add(param_name) - - # Get param names from flat keys (general env vars, SnowSQL env, CLI params) - # Skip internal CLI arguments and global settings that aren't connection parameters - for key in self._config_cache.keys(): - if "." not in key and key not in ( - "enable_diag", - "temporary_connection", - "default_connection_name", - "connection_name", - "diag_log_path", - "diag_allowlist_path", - "mfa_passcode", - ): - param_names.add(key) - - # For each parameter, determine the best value based on source priority - for param_name in param_names: - prefixed_key = f"{connection_prefix}{param_name}" - flat_key = param_name - - best_value = None - best_priority = -1 - - # Check prefixed key (from files and connection-specific env) - if prefixed_key in self._config_cache: - value = self._config_cache[prefixed_key] - if self._resolver is not None: - history = self._resolver.get_resolution_history(prefixed_key) - if history and history.selected_entry: - source = history.selected_entry.config_value.source_name - priority = self._SOURCE_PRIORITIES.get(source, 0) - if priority > best_priority: - best_value = value - best_priority = priority - - # Check flat key (from general env vars, SnowSQL env, CLI params) - if flat_key in self._config_cache: - value = self._config_cache[flat_key] - if self._resolver is not None: - history = self._resolver.get_resolution_history(flat_key) - if history and history.selected_entry: - source = history.selected_entry.config_value.source_name - priority = self._SOURCE_PRIORITIES.get(source, 0) - if priority > best_priority: - best_value = value - best_priority = priority - - if best_value is not None: - connection_dict[param_name] = best_value - - if not connection_dict: - from snowflake.cli.api.exceptions import MissingConfigurationError - - raise MissingConfigurationError( - f"Connection {connection_name} is not configured" - ) + self._ensure_initialized() - return connection_dict + # Get connection from nested dict + connections = self._config_cache.get("connections", {}) + if connection_name in connections and isinstance( + connections[connection_name], dict + ): + result = connections[connection_name] + if result: + return result + + raise MissingConfigurationError( + f"Connection {connection_name} is not configured" + ) def get_connection_dict(self, connection_name: str) -> dict: """ @@ -556,38 +452,15 @@ def get_connection_dict(self, connection_name: str) -> dict: def _get_all_connections_dict(self) -> Dict[str, Dict[str, Any]]: """ - Get all connection configurations as nested dictionary. + Get all connections from nested dict. Returns: Dictionary mapping connection names to their configurations """ self._ensure_initialized() - if not self._config_cache: - assert self._resolver is not None - self._config_cache = self._resolver.resolve() - - connections: Dict[str, Dict[str, Any]] = {} - connections_prefix = "connections." - - for key, value in self._config_cache.items(): - if key.startswith(connections_prefix): - # Parse "connections.{name}.{param}" - parts = key[len(connections_prefix) :].split(".", 1) - if len(parts) == 2: - conn_name, param_name = parts - if conn_name not in connections: - connections[conn_name] = {} - - # Skip internal markers, but ensure connection exists - if param_name == "_empty_connection": - # This is just a marker for empty connections - # Connection dict already created above - continue - - connections[conn_name][param_name] = value - - return connections + connections = self._config_cache.get("connections", {}) + return connections if isinstance(connections, dict) else {} def get_all_connections(self, include_env_connections: bool = False) -> dict: """ @@ -625,33 +498,23 @@ def _get_file_based_connections(self) -> dict: Dictionary mapping connection names to ConnectionConfig objects """ from snowflake.cli.api.config import ConnectionConfig + from snowflake.cli.api.config_ng.constants import FILE_SOURCE_NAMES self._ensure_initialized() - # Only query file sources: SnowSQL config, CLI config.toml, connections.toml - file_source_names = {"snowsql_config", "cli_config_toml", "connections_toml"} - connections: Dict[str, Dict[str, Any]] = {} - connections_prefix = "connections." assert self._resolver is not None for source in self._resolver.get_sources(): - if source.source_name not in file_source_names: + if source.source_name not in FILE_SOURCE_NAMES: continue try: - source_values = source.discover() - for key, config_value in source_values.items(): - if key.startswith(connections_prefix): - parts = key[len(connections_prefix) :].split(".", 1) - if len(parts) == 2: - conn_name, param_name = parts - if conn_name not in connections: - connections[conn_name] = {} - - # Skip internal markers - if param_name != "_empty_connection": - connections[conn_name][param_name] = config_value.value + source_data = source.discover() # Returns nested dict + if "connections" in source_data: + for conn_name, conn_config in source_data["connections"].items(): + if isinstance(conn_config, dict): + connections[conn_name] = conn_config except Exception: # Silently skip sources that fail to discover pass diff --git a/tests/api/test_config_provider.py b/tests/api/test_config_provider.py index cc8ceda3b7..310a2f7d72 100644 --- a/tests/api/test_config_provider.py +++ b/tests/api/test_config_provider.py @@ -88,34 +88,3 @@ def test_reset_provider(): reset_config_provider() provider2 = get_config_provider_singleton() assert provider1 is not provider2 - - -def test_alternative_provider_has_all_required_methods(): - """AlternativeConfigProvider should have all ConfigProvider methods implemented.""" - provider = AlternativeConfigProvider() - - # Verify all abstract methods are implemented and callable - # Note: These are smoke tests - comprehensive tests are in test_config_provider_integration.py - assert callable(provider.get_section) - assert callable(provider.get_value) - assert callable(provider.set_value) - assert callable(provider.unset_value) - assert callable(provider.section_exists) - assert callable(provider.read_config) - assert callable(provider.get_connection_dict) - assert callable(provider.get_all_connections) - - -def test_legacy_provider_has_all_required_methods(): - """LegacyConfigProvider should have all ConfigProvider methods implemented.""" - provider = LegacyConfigProvider() - - # Verify all abstract methods are implemented and callable - assert callable(provider.get_section) - assert callable(provider.get_value) - assert callable(provider.set_value) - assert callable(provider.unset_value) - assert callable(provider.section_exists) - assert callable(provider.read_config) - assert callable(provider.get_connection_dict) - assert callable(provider.get_all_connections) diff --git a/tests/config_ng/test_constants.py b/tests/config_ng/test_constants.py new file mode 100644 index 0000000000..fdb0639eaf --- /dev/null +++ b/tests/config_ng/test_constants.py @@ -0,0 +1,80 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for configuration constants.""" + +from snowflake.cli.api.config_ng.constants import ( + FILE_SOURCE_NAMES, + INTERNAL_CLI_PARAMETERS, + SNOWFLAKE_HOME_ENV, + ConfigSection, +) + + +class TestConfigSection: + """Test ConfigSection enum.""" + + def test_enum_values(self): + """Test that enum has expected values.""" + assert ConfigSection.CONNECTIONS.value == "connections" + assert ConfigSection.VARIABLES.value == "variables" + assert ConfigSection.CLI.value == "cli" + assert ConfigSection.CLI_LOGS.value == "cli.logs" + assert ConfigSection.CLI_FEATURES.value == "cli.features" + + def test_enum_string_representation(self): + """Test that enum converts to string correctly.""" + assert str(ConfigSection.CONNECTIONS) == "connections" + assert str(ConfigSection.VARIABLES) == "variables" + assert str(ConfigSection.CLI) == "cli" + + def test_enum_is_string(self): + """Test that enum instances are strings.""" + assert isinstance(ConfigSection.CONNECTIONS, str) + assert isinstance(ConfigSection.VARIABLES, str) + + def test_enum_comparison(self): + """Test that enum can be compared with strings.""" + assert ConfigSection.CONNECTIONS == "connections" + assert ConfigSection.VARIABLES == "variables" + + +class TestConstants: + """Test other constants.""" + + def test_snowflake_home_env(self): + """Test SNOWFLAKE_HOME environment variable constant.""" + assert SNOWFLAKE_HOME_ENV == "SNOWFLAKE_HOME" + + def test_internal_cli_parameters(self): + """Test INTERNAL_CLI_PARAMETERS set.""" + expected_params = { + "enable_diag", + "temporary_connection", + "default_connection_name", + "connection_name", + "diag_log_path", + "diag_allowlist_path", + "mfa_passcode", + } + assert INTERNAL_CLI_PARAMETERS == expected_params + + def test_file_source_names(self): + """Test FILE_SOURCE_NAMES set.""" + expected_sources = { + "snowsql_config", + "cli_config_toml", + "connections_toml", + } + assert FILE_SOURCE_NAMES == expected_sources diff --git a/tests/config_ng/test_merge_operations.py b/tests/config_ng/test_merge_operations.py new file mode 100644 index 0000000000..b3115e4f6a --- /dev/null +++ b/tests/config_ng/test_merge_operations.py @@ -0,0 +1,269 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for configuration merge operations.""" + +from snowflake.cli.api.config_ng.merge_operations import ( + create_default_connection_from_params, + extract_root_level_connection_params, + merge_params_into_connections, +) + + +class TestExtractRootLevelConnectionParams: + """Test extract_root_level_connection_params function.""" + + def test_extract_connection_params_from_mixed_config(self): + """Test extracting connection params from config with sections.""" + config = { + "account": "test_account", + "user": "test_user", + "connections": {"dev": {"database": "db"}}, + "cli": {"enable_diag": True}, + } + + conn_params, remaining = extract_root_level_connection_params(config) + + assert conn_params == {"account": "test_account", "user": "test_user"} + assert "connections" in remaining + assert "cli" in remaining + assert "account" not in remaining + assert "user" not in remaining + + def test_extract_with_no_connection_params(self): + """Test extraction when no root-level connection params exist.""" + config = { + "connections": {"dev": {"account": "acc"}}, + "variables": {"key": "value"}, + } + + conn_params, remaining = extract_root_level_connection_params(config) + + assert conn_params == {} + assert remaining == config + + def test_extract_with_only_connection_params(self): + """Test extraction when only connection params exist.""" + config = {"account": "acc", "user": "usr", "password": "pwd"} + + conn_params, remaining = extract_root_level_connection_params(config) + + assert conn_params == config + assert remaining == {} + + def test_extract_ignores_internal_cli_parameters(self): + """Test that internal CLI parameters are not treated as connection params.""" + config = { + "account": "acc", + "enable_diag": True, + "temporary_connection": True, + "default_connection_name": "dev", + } + + conn_params, remaining = extract_root_level_connection_params(config) + + assert conn_params == {"account": "acc"} + assert "enable_diag" in remaining + assert "temporary_connection" in remaining + assert "default_connection_name" in remaining + + def test_extract_recognizes_all_sections(self): + """Test that all ConfigSection values are recognized as sections.""" + config = { + "account": "acc", + "connections": {}, + "variables": {}, + "cli": {}, + } + + conn_params, remaining = extract_root_level_connection_params(config) + + assert conn_params == {"account": "acc"} + assert "connections" in remaining + assert "variables" in remaining + assert "cli" in remaining + + def test_extract_with_nested_section_names(self): + """Test extraction with nested section names like cli.logs.""" + config = { + "account": "acc", + "cli.logs": {"save_logs": True}, + "cli.features": {"feature1": True}, + } + + conn_params, remaining = extract_root_level_connection_params(config) + + assert conn_params == {"account": "acc"} + assert "cli.logs" in remaining + assert "cli.features" in remaining + + def test_extract_empty_config(self): + """Test extraction with empty config.""" + conn_params, remaining = extract_root_level_connection_params({}) + + assert conn_params == {} + assert remaining == {} + + def test_extract_preserves_nested_structures(self): + """Test that nested structures in sections are preserved.""" + config = { + "account": "acc", + "connections": {"dev": {"nested": {"deep": "value"}}}, + } + + conn_params, remaining = extract_root_level_connection_params(config) + + assert conn_params == {"account": "acc"} + assert remaining["connections"]["dev"]["nested"]["deep"] == "value" + + +class TestMergeParamsIntoConnections: + """Test merge_params_into_connections function.""" + + def test_merge_params_into_single_connection(self): + """Test merging params into a single connection.""" + connections = {"dev": {"account": "dev_acc", "user": "dev_user"}} + params = {"password": "new_pass"} + + result = merge_params_into_connections(connections, params) + + assert result["dev"]["account"] == "dev_acc" + assert result["dev"]["user"] == "dev_user" + assert result["dev"]["password"] == "new_pass" + + def test_merge_params_into_multiple_connections(self): + """Test merging params into multiple connections.""" + connections = { + "dev": {"account": "dev_acc"}, + "prod": {"account": "prod_acc"}, + } + params = {"user": "global_user", "password": "global_pass"} + + result = merge_params_into_connections(connections, params) + + assert result["dev"]["user"] == "global_user" + assert result["dev"]["password"] == "global_pass" + assert result["prod"]["user"] == "global_user" + assert result["prod"]["password"] == "global_pass" + + def test_merge_params_override_connection_values(self): + """Test that params override existing connection values.""" + connections = {"dev": {"account": "old_acc", "user": "old_user"}} + params = {"user": "new_user"} + + result = merge_params_into_connections(connections, params) + + assert result["dev"]["account"] == "old_acc" + assert result["dev"]["user"] == "new_user" + + def test_merge_empty_params(self): + """Test merging with empty params.""" + connections = {"dev": {"account": "acc"}} + params = {} + + result = merge_params_into_connections(connections, params) + + assert result == connections + + def test_merge_into_empty_connections(self): + """Test merging params into empty connections dict.""" + connections = {} + params = {"account": "acc"} + + result = merge_params_into_connections(connections, params) + + assert result == {} + + def test_merge_preserves_original_connections(self): + """Test that original connections dict is not modified.""" + connections = {"dev": {"account": "acc"}} + params = {"user": "usr"} + + result = merge_params_into_connections(connections, params) + + # Original should be unchanged + assert "user" not in connections["dev"] + # Result should have merged values + assert result["dev"]["user"] == "usr" + + def test_merge_nested_connection_values(self): + """Test merging with nested connection structures.""" + connections = {"dev": {"account": "acc", "nested": {"key": "value"}}} + params = {"nested": {"key": "new_value", "new_key": "new"}} + + result = merge_params_into_connections(connections, params) + + assert result["dev"]["nested"]["key"] == "new_value" + assert result["dev"]["nested"]["new_key"] == "new" + + def test_merge_handles_non_dict_connection(self): + """Test that non-dict connection values are preserved.""" + connections = {"dev": {"account": "acc"}, "invalid": "not_a_dict"} + params = {"user": "usr"} + + result = merge_params_into_connections(connections, params) + + assert result["dev"]["user"] == "usr" + assert result["invalid"] == "not_a_dict" + + +class TestCreateDefaultConnectionFromParams: + """Test create_default_connection_from_params function.""" + + def test_create_default_connection(self): + """Test creating default connection from params.""" + params = {"account": "test_acc", "user": "test_user"} + + result = create_default_connection_from_params(params) + + assert "default" in result + assert result["default"] == params + + def test_create_default_with_single_param(self): + """Test creating default connection with single param.""" + params = {"account": "test_acc"} + + result = create_default_connection_from_params(params) + + assert result == {"default": {"account": "test_acc"}} + + def test_create_default_with_empty_params(self): + """Test creating default connection with empty params.""" + result = create_default_connection_from_params({}) + + assert result == {} + + def test_create_default_preserves_original_params(self): + """Test that original params dict is not modified.""" + params = {"account": "acc"} + + result = create_default_connection_from_params(params) + + # Modify result + result["default"]["user"] = "usr" + + # Original should be unchanged + assert "user" not in params + + def test_create_default_with_complex_params(self): + """Test creating default connection with nested params.""" + params = { + "account": "acc", + "user": "usr", + "nested": {"key": "value"}, + } + + result = create_default_connection_from_params(params) + + assert result["default"]["nested"]["key"] == "value" diff --git a/tests/config_ng/test_parsers.py b/tests/config_ng/test_parsers.py new file mode 100644 index 0000000000..493280434f --- /dev/null +++ b/tests/config_ng/test_parsers.py @@ -0,0 +1,345 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for configuration parsers.""" + +import pytest +from snowflake.cli.api.config_ng.parsers import SnowSQLParser, TOMLParser + + +class TestSnowSQLParser: + """Test SnowSQL INI parser.""" + + def test_parse_single_connection(self): + """Test parsing a single connection.""" + content = """ +[connections.dev] +accountname = myaccount +username = myuser +password = mypass +""" + result = SnowSQLParser.parse(content) + + assert "connections" in result + assert "dev" in result["connections"] + assert result["connections"]["dev"] == { + "account": "myaccount", + "user": "myuser", + "password": "mypass", + } + + def test_parse_multiple_connections(self): + """Test parsing multiple connections.""" + content = """ +[connections.dev] +accountname = dev_account +username = dev_user + +[connections.prod] +accountname = prod_account +username = prod_user +""" + result = SnowSQLParser.parse(content) + + assert "connections" in result + assert len(result["connections"]) == 2 + assert result["connections"]["dev"]["account"] == "dev_account" + assert result["connections"]["prod"]["account"] == "prod_account" + + def test_parse_default_connection(self): + """Test parsing default connection (no name suffix).""" + content = """ +[connections] +accountname = default_account +username = default_user +""" + result = SnowSQLParser.parse(content) + + assert "connections" in result + assert "default" in result["connections"] + assert result["connections"]["default"]["account"] == "default_account" + + def test_key_mapping_accountname_to_account(self): + """Test that accountname is mapped to account.""" + content = """ +[connections.test] +accountname = test_account +""" + result = SnowSQLParser.parse(content) + + assert "account" in result["connections"]["test"] + assert result["connections"]["test"]["account"] == "test_account" + + def test_key_mapping_username_to_user(self): + """Test that username is mapped to user.""" + content = """ +[connections.test] +username = test_user +""" + result = SnowSQLParser.parse(content) + + assert "user" in result["connections"]["test"] + assert result["connections"]["test"]["user"] == "test_user" + + def test_key_mapping_dbname_to_database(self): + """Test that dbname is mapped to database.""" + content = """ +[connections.test] +dbname = test_db +""" + result = SnowSQLParser.parse(content) + + assert "database" in result["connections"]["test"] + assert result["connections"]["test"]["database"] == "test_db" + + def test_key_mapping_pwd_to_password(self): + """Test that pwd is mapped to password.""" + content = """ +[connections.test] +pwd = test_pass +""" + result = SnowSQLParser.parse(content) + + assert "password" in result["connections"]["test"] + assert result["connections"]["test"]["password"] == "test_pass" + + def test_key_mapping_multiple_keys(self): + """Test mapping multiple keys at once.""" + content = """ +[connections.test] +accountname = acc +username = usr +dbname = db +schemaname = sch +warehousename = wh +rolename = rol +""" + result = SnowSQLParser.parse(content) + + conn = result["connections"]["test"] + assert conn["account"] == "acc" + assert conn["user"] == "usr" + assert conn["database"] == "db" + assert conn["schema"] == "sch" + assert conn["warehouse"] == "wh" + assert conn["role"] == "rol" + + def test_parse_variables_section(self): + """Test parsing variables section.""" + content = """ +[variables] +stage = mystage +table = mytable +schema = myschema +""" + result = SnowSQLParser.parse(content) + + assert "variables" in result + assert result["variables"]["stage"] == "mystage" + assert result["variables"]["table"] == "mytable" + assert result["variables"]["schema"] == "myschema" + + def test_parse_connections_and_variables(self): + """Test parsing both connections and variables.""" + content = """ +[connections.dev] +accountname = dev_account + +[variables] +env = development +""" + result = SnowSQLParser.parse(content) + + assert "connections" in result + assert "variables" in result + assert result["connections"]["dev"]["account"] == "dev_account" + assert result["variables"]["env"] == "development" + + def test_parse_empty_content(self): + """Test parsing empty content.""" + result = SnowSQLParser.parse("") + + assert result == {} + + def test_parse_no_connections_section(self): + """Test parsing config without connections section.""" + content = """ +[variables] +key = value +""" + result = SnowSQLParser.parse(content) + + assert "connections" not in result + assert "variables" in result + + def test_parse_preserves_unmapped_keys(self): + """Test that unmapped keys are preserved as-is.""" + content = """ +[connections.test] +custom_key = custom_value +another_key = another_value +""" + result = SnowSQLParser.parse(content) + + conn = result["connections"]["test"] + assert conn["custom_key"] == "custom_value" + assert conn["another_key"] == "another_value" + + def test_parse_connection_with_special_characters(self): + """Test parsing connection with special characters in name.""" + content = """ +[connections.my-test_conn] +accountname = test +""" + result = SnowSQLParser.parse(content) + + assert "my-test_conn" in result["connections"] + + def test_parse_values_with_spaces(self): + """Test parsing values that contain spaces.""" + content = """ +[connections.test] +accountname = my account name +""" + result = SnowSQLParser.parse(content) + + assert result["connections"]["test"]["account"] == "my account name" + + +class TestTOMLParser: + """Test TOML parser.""" + + def test_parse_simple_toml(self): + """Test parsing simple TOML.""" + content = """ +[connections.test] +account = "test_account" +user = "test_user" +""" + result = TOMLParser.parse(content) + + assert "connections" in result + assert "test" in result["connections"] + assert result["connections"]["test"]["account"] == "test_account" + assert result["connections"]["test"]["user"] == "test_user" + + def test_parse_nested_toml(self): + """Test parsing nested TOML structure.""" + content = """ +[cli] +enable_diag = true + +[cli.logs] +save_logs = true + +[connections.prod] +account = "prod_account" +""" + result = TOMLParser.parse(content) + + assert "cli" in result + assert result["cli"]["enable_diag"] is True + assert result["cli"]["logs"]["save_logs"] is True + assert result["connections"]["prod"]["account"] == "prod_account" + + def test_parse_multiple_connections(self): + """Test parsing multiple connections in TOML.""" + content = """ +[connections.dev] +account = "dev_account" + +[connections.prod] +account = "prod_account" +""" + result = TOMLParser.parse(content) + + assert len(result["connections"]) == 2 + assert result["connections"]["dev"]["account"] == "dev_account" + assert result["connections"]["prod"]["account"] == "prod_account" + + def test_parse_variables(self): + """Test parsing variables section.""" + content = """ +[variables] +stage = "mystage" +table = "mytable" +""" + result = TOMLParser.parse(content) + + assert "variables" in result + assert result["variables"]["stage"] == "mystage" + assert result["variables"]["table"] == "mytable" + + def test_parse_empty_content(self): + """Test parsing empty TOML.""" + result = TOMLParser.parse("") + + assert result == {} + + def test_parse_toml_with_types(self): + """Test parsing TOML with different value types.""" + content = """ +[test] +string_val = "text" +int_val = 42 +float_val = 3.14 +bool_val = true +array_val = ["a", "b", "c"] +""" + result = TOMLParser.parse(content) + + assert result["test"]["string_val"] == "text" + assert result["test"]["int_val"] == 42 + assert result["test"]["float_val"] == 3.14 + assert result["test"]["bool_val"] is True + assert result["test"]["array_val"] == ["a", "b", "c"] + + def test_parse_malformed_toml_raises_error(self): + """Test that malformed TOML raises an error.""" + content = """ +[connections.test +account = "broken +""" + with pytest.raises(Exception): # tomllib raises TOMLDecodeError + TOMLParser.parse(content) + + def test_parse_toml_with_inline_table(self): + """Test parsing TOML with inline tables.""" + content = """ +[connections] +dev = { account = "dev_acc", user = "dev_user" } +""" + result = TOMLParser.parse(content) + + assert result["connections"]["dev"]["account"] == "dev_acc" + assert result["connections"]["dev"]["user"] == "dev_user" + + def test_parse_legacy_connections_format(self): + """Test parsing legacy connections.toml format (direct sections).""" + content = """ +[dev] +account = "dev_account" +user = "dev_user" + +[prod] +account = "prod_account" +user = "prod_user" +""" + result = TOMLParser.parse(content) + + # Note: TOMLParser just parses, doesn't normalize + assert "dev" in result + assert "prod" in result + assert result["dev"]["account"] == "dev_account" + assert result["prod"]["account"] == "prod_account" diff --git a/tests/config_ng/test_snowsql_variables.py b/tests/config_ng/test_snowsql_variables.py index f7ae946a51..15bd91621c 100644 --- a/tests/config_ng/test_snowsql_variables.py +++ b/tests/config_ng/test_snowsql_variables.py @@ -48,23 +48,20 @@ def test_read_variables_section_from_snowsql_config(self): """ ) - source = SnowSQLConfigFile() - setattr(source, "_config_files", [config_file]) + source = SnowSQLConfigFile(config_paths=[config_file]) discovered = source.discover() - # Check that variables are discovered with proper prefix - assert "variables.var1" in discovered - assert "variables.var2" in discovered - assert "variables.example_variable" in discovered + # Check nested structure + assert "variables" in discovered + assert "var1" in discovered["variables"] + assert "var2" in discovered["variables"] + assert "example_variable" in discovered["variables"] - # Check values - assert discovered["variables.var1"].value == "value1" - assert discovered["variables.var2"].value == "value2" - assert discovered["variables.example_variable"].value == "27" - - # Check source name - assert discovered["variables.var1"].source_name == "snowsql_config" + # Values are plain strings now (not ConfigValue objects) + assert discovered["variables"]["var1"] == "value1" + assert discovered["variables"]["var2"] == "value2" + assert discovered["variables"]["example_variable"] == "27" def test_variables_section_empty(self): """Test that empty [variables] section doesn't cause errors.""" @@ -79,14 +76,13 @@ def test_variables_section_empty(self): """ ) - source = SnowSQLConfigFile() - setattr(source, "_config_files", [config_file]) + source = SnowSQLConfigFile(config_paths=[config_file]) discovered = source.discover() - # Should have connections but no variables - assert any(k.startswith("connections.") for k in discovered.keys()) - assert not any(k.startswith("variables.") for k in discovered.keys()) + # Should have connections but no variables (or empty variables dict) + assert "connections" in discovered + assert not discovered.get("variables", {}) def test_no_variables_section(self): """Test that config without [variables] section works correctly.""" @@ -100,14 +96,13 @@ def test_no_variables_section(self): """ ) - source = SnowSQLConfigFile() - setattr(source, "_config_files", [config_file]) + source = SnowSQLConfigFile(config_paths=[config_file]) discovered = source.discover() - # Should have connections but no variables - assert any(k.startswith("connections.") for k in discovered.keys()) - assert not any(k.startswith("variables.") for k in discovered.keys()) + # Should have connections but no variables key + assert "connections" in discovered + assert "variables" not in discovered def test_variables_merged_from_multiple_files(self): """Test that variables from multiple SnowSQL config files are merged.""" @@ -130,19 +125,14 @@ def test_variables_merged_from_multiple_files(self): """ ) - source = SnowSQLConfigFile() - setattr(source, "_config_files", [config_file1, config_file2]) + source = SnowSQLConfigFile(config_paths=[config_file1, config_file2]) discovered = source.discover() - # var1 from file1 should be present - assert discovered["variables.var1"].value == "value1" - - # var2 should be overridden by file2 - assert discovered["variables.var2"].value == "overridden_value2" - - # var3 from file2 should be present - assert discovered["variables.var3"].value == "value3" + # Check nested structure with merged values + assert discovered["variables"]["var1"] == "value1" + assert discovered["variables"]["var2"] == "overridden_value2" + assert discovered["variables"]["var3"] == "value3" def test_variables_with_special_characters(self): """Test that variables with special characters in values are handled.""" @@ -157,48 +147,48 @@ def test_variables_with_special_characters(self): """ ) - source = SnowSQLConfigFile() - setattr(source, "_config_files", [config_file]) + source = SnowSQLConfigFile(config_paths=[config_file]) discovered = source.discover() - assert discovered["variables.var_with_equals"].value == "key=value" - assert discovered["variables.var_with_spaces"].value == "value with spaces" - assert discovered["variables.var_with_quotes"].value == "'quoted value'" + assert discovered["variables"]["var_with_equals"] == "key=value" + assert discovered["variables"]["var_with_spaces"] == "value with spaces" + assert discovered["variables"]["var_with_quotes"] == "'quoted value'" class TestAlternativeConfigProviderVariables: """Tests for getting variables section from AlternativeConfigProvider.""" def test_get_variables_section(self): - """Test get_section('variables') returns flat dict without prefix.""" + """Test get_section('variables') returns nested dict.""" from snowflake.cli.api.config_provider import AlternativeConfigProvider provider = AlternativeConfigProvider() - with mock.patch.object(provider, "_resolver") as mock_resolver: - mock_resolver.resolve.return_value = { - "variables.var1": "value1", - "variables.var2": "value2", - "connections.default.account": "test_account", - } - setattr(provider, "_initialized", True) - # Prevent re-initialization - from snowflake.cli.api.cli_global_context import get_cli_context + # Mock with nested structure + mock_cache = { + "variables": {"var1": "value1", "var2": "value2"}, + "connections": {"default": {"account": "test_account"}}, + } + + setattr(provider, "_initialized", True) + setattr(provider, "_config_cache", mock_cache) + + from snowflake.cli.api.cli_global_context import get_cli_context - try: - setattr( - provider, - "_last_config_override", - get_cli_context().config_file_override, - ) - except Exception: - setattr(provider, "_last_config_override", None) + try: + setattr( + provider, + "_last_config_override", + get_cli_context().config_file_override, + ) + except Exception: + setattr(provider, "_last_config_override", None) - result = provider.get_section("variables") + result = provider.get_section("variables") - # Should return flat dict without variables. prefix - assert result == {"var1": "value1", "var2": "value2"} + # Should return nested dict under "variables" key + assert result == {"var1": "value1", "var2": "value2"} def test_get_variables_section_empty(self): """Test get_section('variables') with no variables returns empty dict.""" @@ -206,26 +196,27 @@ def test_get_variables_section_empty(self): provider = AlternativeConfigProvider() - with mock.patch.object(provider, "_resolver") as mock_resolver: - mock_resolver.resolve.return_value = { - "connections.default.account": "test_account", - } - setattr(provider, "_initialized", True) - # Prevent re-initialization - from snowflake.cli.api.cli_global_context import get_cli_context + mock_cache = { + "connections": {"default": {"account": "test_account"}}, + } - try: - setattr( - provider, - "_last_config_override", - get_cli_context().config_file_override, - ) - except Exception: - setattr(provider, "_last_config_override", None) + setattr(provider, "_initialized", True) + setattr(provider, "_config_cache", mock_cache) - result = provider.get_section("variables") + from snowflake.cli.api.cli_global_context import get_cli_context - assert result == {} + try: + setattr( + provider, + "_last_config_override", + get_cli_context().config_file_override, + ) + except Exception: + setattr(provider, "_last_config_override", None) + + result = provider.get_section("variables") + + assert result == {} class TestGetMergedVariables: @@ -353,16 +344,17 @@ def test_resolver_with_variables(self): """ ) - source = SnowSQLConfigFile() - setattr(source, "_config_files", [config_file]) + source = SnowSQLConfigFile(config_paths=[config_file]) resolver = ConfigurationResolver(sources=[source]) config = resolver.resolve() - assert "variables.var1" in config - assert "variables.var2" in config - assert config["variables.var1"] == "value1" - assert config["variables.var2"] == "value2" + # Check nested structure + assert "variables" in config + assert "var1" in config["variables"] + assert "var2" in config["variables"] + assert config["variables"]["var1"] == "value1" + assert config["variables"]["var2"] == "value2" class TestSnowSQLSectionEnum: @@ -388,11 +380,11 @@ def test_section_enum_in_snowsql_source(self): """ ) - source = SnowSQLConfigFile() - setattr(source, "_config_files", [config_file]) + source = SnowSQLConfigFile(config_paths=[config_file]) # Should discover both connections and variables discovered = source.discover() - assert any(k.startswith("connections.") for k in discovered.keys()) - assert any(k.startswith("variables.") for k in discovered.keys()) + # Check nested structure contains both sections + assert "connections" in discovered + assert "variables" in discovered diff --git a/tests/config_ng/test_sources.py b/tests/config_ng/test_sources.py new file mode 100644 index 0000000000..8ac9e4e2c5 --- /dev/null +++ b/tests/config_ng/test_sources.py @@ -0,0 +1,380 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for configuration sources with string-based testing.""" + +from snowflake.cli.api.config_ng.sources import ( + CliConfigFile, + ConnectionsConfigFile, + SnowSQLConfigFile, +) + + +class TestSnowSQLConfigFileFromString: + """Test SnowSQLConfigFile with string-based initialization.""" + + def test_from_string_single_connection(self): + """Test creating source from string with single connection.""" + content = """ +[connections.dev] +accountname = test_account +username = test_user +password = test_pass +""" + source = SnowSQLConfigFile.from_string(content) + result = source.discover() + + assert "connections" in result + assert "dev" in result["connections"] + assert result["connections"]["dev"]["account"] == "test_account" + assert result["connections"]["dev"]["user"] == "test_user" + assert result["connections"]["dev"]["password"] == "test_pass" + + def test_from_string_multiple_connections(self): + """Test creating source from string with multiple connections.""" + content = """ +[connections.dev] +accountname = dev_account + +[connections.prod] +accountname = prod_account +""" + source = SnowSQLConfigFile.from_string(content) + result = source.discover() + + assert len(result["connections"]) == 2 + assert result["connections"]["dev"]["account"] == "dev_account" + assert result["connections"]["prod"]["account"] == "prod_account" + + def test_from_string_with_variables(self): + """Test creating source from string with variables section.""" + content = """ +[connections.test] +accountname = test_account + +[variables] +stage = mystage +table = mytable +""" + source = SnowSQLConfigFile.from_string(content) + result = source.discover() + + assert "variables" in result + assert result["variables"]["stage"] == "mystage" + assert result["variables"]["table"] == "mytable" + + def test_from_string_key_mapping(self): + """Test that SnowSQL key mapping works with string source.""" + content = """ +[connections.test] +accountname = acc +username = usr +dbname = db +schemaname = sch +warehousename = wh +rolename = rol +pwd = pass +""" + source = SnowSQLConfigFile.from_string(content) + result = source.discover() + + conn = result["connections"]["test"] + assert conn["account"] == "acc" + assert conn["user"] == "usr" + assert conn["database"] == "db" + assert conn["schema"] == "sch" + assert conn["warehouse"] == "wh" + assert conn["role"] == "rol" + assert conn["password"] == "pass" + + def test_from_string_empty_content(self): + """Test creating source from empty string.""" + source = SnowSQLConfigFile.from_string("") + result = source.discover() + + assert result == {} + + def test_from_string_default_connection(self): + """Test creating source with default connection (no name).""" + content = """ +[connections] +accountname = default_account +""" + source = SnowSQLConfigFile.from_string(content) + result = source.discover() + + assert "default" in result["connections"] + assert result["connections"]["default"]["account"] == "default_account" + + +class TestCliConfigFileFromString: + """Test CliConfigFile with string-based initialization.""" + + def test_from_string_single_connection(self): + """Test creating CLI config source from string.""" + content = """ +[connections.dev] +account = "test_account" +user = "test_user" +""" + source = CliConfigFile.from_string(content) + result = source.discover() + + assert "connections" in result + assert "dev" in result["connections"] + assert result["connections"]["dev"]["account"] == "test_account" + assert result["connections"]["dev"]["user"] == "test_user" + + def test_from_string_multiple_connections(self): + """Test creating CLI config with multiple connections.""" + content = """ +[connections.dev] +account = "dev_acc" + +[connections.prod] +account = "prod_acc" +""" + source = CliConfigFile.from_string(content) + result = source.discover() + + assert len(result["connections"]) == 2 + assert result["connections"]["dev"]["account"] == "dev_acc" + assert result["connections"]["prod"]["account"] == "prod_acc" + + def test_from_string_with_cli_section(self): + """Test creating CLI config with cli section.""" + content = """ +[cli] +enable_diag = true + +[cli.logs] +save_logs = true + +[connections.test] +account = "test_account" +""" + source = CliConfigFile.from_string(content) + result = source.discover() + + assert "cli" in result + assert result["cli"]["enable_diag"] is True + assert result["cli"]["logs"]["save_logs"] is True + assert result["connections"]["test"]["account"] == "test_account" + + def test_from_string_with_variables(self): + """Test creating CLI config with variables.""" + content = """ +[variables] +stage = "mystage" +env = "dev" + +[connections.test] +account = "test_account" +""" + source = CliConfigFile.from_string(content) + result = source.discover() + + assert "variables" in result + assert result["variables"]["stage"] == "mystage" + assert result["variables"]["env"] == "dev" + + def test_from_string_empty_content(self): + """Test creating CLI config from empty string.""" + source = CliConfigFile.from_string("") + result = source.discover() + + assert result == {} + + def test_from_string_nested_structure(self): + """Test creating CLI config with deeply nested structure.""" + content = """ +[cli.features] +feature1 = true +feature2 = false + +[cli.logs] +level = "INFO" +path = "/var/log" +""" + source = CliConfigFile.from_string(content) + result = source.discover() + + assert result["cli"]["features"]["feature1"] is True + assert result["cli"]["logs"]["level"] == "INFO" + + +class TestConnectionsConfigFileFromString: + """Test ConnectionsConfigFile with string-based initialization.""" + + def test_from_string_nested_format(self): + """Test creating connections file with nested format.""" + content = """ +[connections.dev] +account = "dev_account" +user = "dev_user" + +[connections.prod] +account = "prod_account" +user = "prod_user" +""" + source = ConnectionsConfigFile.from_string(content) + result = source.discover() + + assert "connections" in result + assert len(result["connections"]) == 2 + assert result["connections"]["dev"]["account"] == "dev_account" + assert result["connections"]["prod"]["account"] == "prod_account" + + def test_from_string_legacy_format(self): + """Test creating connections file with legacy format (direct sections).""" + content = """ +[dev] +account = "dev_account" +user = "dev_user" + +[prod] +account = "prod_account" +user = "prod_user" +""" + source = ConnectionsConfigFile.from_string(content) + result = source.discover() + + # Legacy format should be normalized to nested format + assert "connections" in result + assert len(result["connections"]) == 2 + assert result["connections"]["dev"]["account"] == "dev_account" + assert result["connections"]["prod"]["account"] == "prod_account" + + def test_from_string_mixed_format(self): + """Test creating connections file with mixed legacy and nested format.""" + content = """ +[legacy_conn] +account = "legacy_account" + +[connections.new_conn] +account = "new_account" +""" + source = ConnectionsConfigFile.from_string(content) + result = source.discover() + + # Both should be normalized to nested format + assert "connections" in result + assert len(result["connections"]) == 2 + assert result["connections"]["legacy_conn"]["account"] == "legacy_account" + assert result["connections"]["new_conn"]["account"] == "new_account" + + def test_from_string_nested_takes_precedence(self): + """Test that nested format takes precedence over legacy format.""" + content = """ +[test] +account = "legacy_account" + +[connections.test] +account = "new_account" +""" + source = ConnectionsConfigFile.from_string(content) + result = source.discover() + + # Nested format should win + assert result["connections"]["test"]["account"] == "new_account" + + def test_from_string_empty_content(self): + """Test creating connections file from empty string.""" + source = ConnectionsConfigFile.from_string("") + result = source.discover() + + # Empty TOML should return empty dict (no connections) + assert result == {} + + def test_from_string_single_connection(self): + """Test creating connections file with single connection.""" + content = """ +[connections.default] +account = "test_account" +user = "test_user" +password = "test_pass" +""" + source = ConnectionsConfigFile.from_string(content) + result = source.discover() + + assert "default" in result["connections"] + assert result["connections"]["default"]["account"] == "test_account" + + def test_get_defined_connections(self): + """Test getting defined connection names.""" + content = """ +[connections.dev] +account = "dev_acc" + +[connections.prod] +account = "prod_acc" +""" + source = ConnectionsConfigFile.from_string(content) + defined_connections = source.get_defined_connections() + + assert defined_connections == {"dev", "prod"} + + def test_get_defined_connections_legacy_format(self): + """Test getting defined connections with legacy format.""" + content = """ +[dev] +account = "dev_acc" + +[prod] +account = "prod_acc" +""" + source = ConnectionsConfigFile.from_string(content) + defined_connections = source.get_defined_connections() + + assert defined_connections == {"dev", "prod"} + + +class TestSourceProperties: + """Test source properties and metadata.""" + + def test_snowsql_config_source_name(self): + """Test SnowSQLConfigFile source name.""" + source = SnowSQLConfigFile.from_string("") + assert source.source_name == "snowsql_config" + + def test_cli_config_source_name(self): + """Test CliConfigFile source name.""" + source = CliConfigFile.from_string("") + assert source.source_name == "cli_config_toml" + + def test_connections_config_source_name(self): + """Test ConnectionsConfigFile source name.""" + source = ConnectionsConfigFile.from_string("") + assert source.source_name == "connections_toml" + + def test_connections_file_marker(self): + """Test that ConnectionsConfigFile is marked as connections file.""" + source = ConnectionsConfigFile.from_string("") + assert source.is_connections_file is True + + def test_non_connections_file_marker(self): + """Test that other sources don't have is_connections_file property.""" + cli_source = CliConfigFile.from_string("") + snowsql_source = SnowSQLConfigFile.from_string("") + + # These should not have the is_connections_file property + # or it should be False (default) + assert ( + not hasattr(cli_source, "is_connections_file") + or not cli_source.is_connections_file + ) + assert ( + not hasattr(snowsql_source, "is_connections_file") + or not snowsql_source.is_connections_file + ) diff --git a/tests/test_config_provider_integration.py b/tests/test_config_provider_integration.py index 836e27cbc8..99cbfdd288 100644 --- a/tests/test_config_provider_integration.py +++ b/tests/test_config_provider_integration.py @@ -51,37 +51,13 @@ def test_default_provider_is_legacy(self): provider = get_config_provider() assert isinstance(provider, LegacyConfigProvider) - def test_alternative_provider_enabled_with_true(self): - """Test enabling alternative provider with 'true'.""" - with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): - provider = get_config_provider() - assert isinstance(provider, AlternativeConfigProvider) - - def test_alternative_provider_enabled_with_1(self): - """Test enabling alternative provider with '1'.""" - with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "1"}): - provider = get_config_provider() - assert isinstance(provider, AlternativeConfigProvider) - - def test_alternative_provider_enabled_with_yes(self): - """Test enabling alternative provider with 'yes'.""" - with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "yes"}): - provider = get_config_provider() - assert isinstance(provider, AlternativeConfigProvider) - - def test_alternative_provider_enabled_with_on(self): - """Test enabling alternative provider with 'on'.""" - with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "on"}): - provider = get_config_provider() - assert isinstance(provider, AlternativeConfigProvider) - - def test_alternative_provider_case_insensitive(self): - """Test that environment variable is case-insensitive.""" - with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "TRUE"}): - provider = get_config_provider() - assert isinstance(provider, AlternativeConfigProvider) - - with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "Yes"}): + @pytest.mark.parametrize( + "env_value", + ["true", "1", "yes", "on", "TRUE", "True", "Yes", "YES", "ON"], + ) + def test_alternative_provider_enabled_with_various_values(self, env_value): + """Test enabling alternative provider with various truthy values.""" + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: env_value}): provider = get_config_provider() assert isinstance(provider, AlternativeConfigProvider) @@ -152,10 +128,12 @@ def test_section_exists_with_prefix(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = { - "connections.default.account": "test_account", - "connections.default.user": "test_user", + "connections": { + "default": {"account": "test_account", "user": "test_user"} + } } provider._initialized = True + provider._config_cache = mock_resolver.resolve.return_value assert provider.section_exists("connections") assert provider.section_exists("connections", "default") @@ -168,6 +146,7 @@ def test_get_value_simple(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = {"account": "test_account"} provider._initialized = True + provider._config_cache = mock_resolver.resolve.return_value # Prevent re-initialization due to config_file_override check from snowflake.cli.api.cli_global_context import get_cli_context @@ -185,9 +164,10 @@ def test_get_value_with_path(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = { - "connections.default.account": "test_account" + "connections": {"default": {"account": "test_account"}} } provider._initialized = True + provider._config_cache = mock_resolver.resolve.return_value # Prevent re-initialization due to config_file_override check from snowflake.cli.api.cli_global_context import get_cli_context @@ -218,6 +198,7 @@ def test_get_section_root(self): config_data = {"key1": "value1", "key2": "value2"} mock_resolver.resolve.return_value = config_data provider._initialized = True + provider._config_cache = config_data # Prevent re-initialization due to config_file_override check from snowflake.cli.api.cli_global_context import get_cli_context @@ -235,11 +216,13 @@ def test_get_section_connections(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = { - "connections.default.account": "test_account", - "connections.default.user": "test_user", - "connections.prod.account": "prod_account", + "connections": { + "default": {"account": "test_account", "user": "test_user"}, + "prod": {"account": "prod_account"}, + } } provider._initialized = True + provider._config_cache = mock_resolver.resolve.return_value # Prevent re-initialization due to config_file_override check from snowflake.cli.api.cli_global_context import get_cli_context @@ -260,10 +243,12 @@ def test_get_section_specific_connection(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = { - "connections.default.account": "test_account", - "connections.default.user": "test_user", + "connections": { + "default": {"account": "test_account", "user": "test_user"} + } } provider._initialized = True + provider._config_cache = mock_resolver.resolve.return_value # Prevent re-initialization due to config_file_override check from snowflake.cli.api.cli_global_context import get_cli_context @@ -285,11 +270,16 @@ def test_get_connection_dict(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = { - "connections.default.account": "test_account", - "connections.default.user": "test_user", - "connections.default.password": "secret", + "connections": { + "default": { + "account": "test_account", + "user": "test_user", + "password": "secret", + } + } } provider._initialized = True + provider._config_cache = mock_resolver.resolve.return_value # Prevent re-initialization due to config_file_override check from snowflake.cli.api.cli_global_context import get_cli_context @@ -322,12 +312,13 @@ def test_get_all_connections_dict(self): with mock.patch.object(provider, "_resolver") as mock_resolver: mock_resolver.resolve.return_value = { - "connections.default.account": "test_account", - "connections.default.user": "test_user", - "connections.prod.account": "prod_account", - "connections.prod.user": "prod_user", + "connections": { + "default": {"account": "test_account", "user": "test_user"}, + "prod": {"account": "prod_account", "user": "prod_user"}, + } } provider._initialized = True + provider._config_cache = mock_resolver.resolve.return_value # Prevent re-initialization due to config_file_override check from snowflake.cli.api.cli_global_context import get_cli_context From 5a9b74a4647879edb655614e693f6ebc1e068056 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 21 Oct 2025 10:37:27 +0200 Subject: [PATCH 55/78] SNOW-2306184: config refactor - Release Notes --- RELEASE-NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index a1b4af1ca8..5ab9f66845 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -15,6 +15,7 @@ --> # Unreleased version ## Backward incompatibility +* **Configuration System (NG)**: File-based configuration sources (`snowsql_config`, `cli_config_toml`, `connections_toml`) now use **connection-level replacement** instead of field-level merging. When a later file source defines a connection, it completely replaces the entire connection from earlier file sources - fields are NOT inherited. Environment variables and CLI arguments continue to overlay per-field on top of the file-derived connection. This provides more predictable configuration behavior where file-defined connections are atomic units. ## Deprecations From 62db9ab85f5f68978d44dedec6ce8b04eff9f60a Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 22 Oct 2025 10:46:19 +0200 Subject: [PATCH 56/78] SNOW-2306184: config refactor - telemetry --- src/snowflake/cli/_app/telemetry.py | 55 +++++ src/snowflake/cli/api/config_ng/__init__.py | 6 + .../api/config_ng/telemetry_integration.py | 100 +++++++++ src/snowflake/cli/api/config_provider.py | 18 ++ src/snowflake/cli/api/metrics.py | 22 ++ tests/app/test_telemetry.py | 158 ++++++++++---- tests/config_ng/test_telemetry_integration.py | 204 ++++++++++++++++++ 7 files changed, 521 insertions(+), 42 deletions(-) create mode 100644 src/snowflake/cli/api/config_ng/telemetry_integration.py create mode 100644 tests/config_ng/test_telemetry_integration.py diff --git a/src/snowflake/cli/_app/telemetry.py b/src/snowflake/cli/_app/telemetry.py index 7e6f956a57..827ea89398 100644 --- a/src/snowflake/cli/_app/telemetry.py +++ b/src/snowflake/cli/_app/telemetry.py @@ -61,6 +61,11 @@ class CLITelemetryField(Enum): COMMAND_CI_ENVIRONMENT = "command_ci_environment" # Configuration CONFIG_FEATURE_FLAGS = "config_feature_flags" + CONFIG_PROVIDER_TYPE = "config_provider_type" + CONFIG_SOURCES_USED = "config_sources_used" + CONFIG_SOURCE_WINS = "config_source_wins" + CONFIG_TOTAL_KEYS_RESOLVED = "config_total_keys_resolved" + CONFIG_KEYS_WITH_OVERRIDES = "config_keys_with_overrides" # Metrics COUNTERS = "counters" SPANS = "spans" @@ -219,6 +224,55 @@ def python_version() -> str: return f"{py_ver.major}.{py_ver.minor}.{py_ver.micro}" +def _get_config_telemetry() -> TelemetryDict: + """Get configuration resolution telemetry data.""" + try: + from snowflake.cli.api.config_ng.telemetry_integration import ( + get_config_telemetry_payload, + ) + from snowflake.cli.api.config_provider import ( + AlternativeConfigProvider, + get_config_provider_singleton, + ) + + provider = get_config_provider_singleton() + + # Identify which config provider is being used + provider_type = ( + "ng" if isinstance(provider, AlternativeConfigProvider) else "legacy" + ) + + result: TelemetryDict = {CLITelemetryField.CONFIG_PROVIDER_TYPE: provider_type} + + # Get detailed telemetry if using ng config + if isinstance(provider, AlternativeConfigProvider): + provider._ensure_initialized() # noqa: SLF001 + payload = get_config_telemetry_payload(provider._resolver) # noqa: SLF001 + + # Map payload keys to telemetry fields + if payload: + if "config_sources_used" in payload: + result[CLITelemetryField.CONFIG_SOURCES_USED] = payload[ + "config_sources_used" + ] + if "config_source_wins" in payload: + result[CLITelemetryField.CONFIG_SOURCE_WINS] = payload[ + "config_source_wins" + ] + if "config_total_keys_resolved" in payload: + result[CLITelemetryField.CONFIG_TOTAL_KEYS_RESOLVED] = payload[ + "config_total_keys_resolved" + ] + if "config_keys_with_overrides" in payload: + result[CLITelemetryField.CONFIG_KEYS_WITH_OVERRIDES] = payload[ + "config_keys_with_overrides" + ] + + return result + except Exception: + return {} + + class CLITelemetryClient: @property def _ctx(self) -> _CliGlobalContextAccess: @@ -239,6 +293,7 @@ def generate_telemetry_data_dict( k: str(v) for k, v in get_feature_flags_section().items() }, **_find_command_info(), + **_get_config_telemetry(), **telemetry_payload, } # To map Enum to string, so we don't have to use .value every time diff --git a/src/snowflake/cli/api/config_ng/__init__.py b/src/snowflake/cli/api/config_ng/__init__.py index cbe465eaa6..ab7e7ddded 100644 --- a/src/snowflake/cli/api/config_ng/__init__.py +++ b/src/snowflake/cli/api/config_ng/__init__.py @@ -72,6 +72,10 @@ SnowSQLSection, get_merged_variables, ) +from snowflake.cli.api.config_ng.telemetry_integration import ( + get_config_telemetry_payload, + record_config_source_usage, +) __all__ = [ "check_value_source", @@ -91,12 +95,14 @@ "extract_root_level_connection_params", "FILE_SOURCE_NAMES", "format_summary_for_display", + "get_config_telemetry_payload", "get_merged_variables", "get_resolution_summary", "get_resolver", "INTERNAL_CLI_PARAMETERS", "is_resolution_logging_available", "merge_params_into_connections", + "record_config_source_usage", "ResolutionEntry", "ResolutionHistory", "ResolutionHistoryTracker", diff --git a/src/snowflake/cli/api/config_ng/telemetry_integration.py b/src/snowflake/cli/api/config_ng/telemetry_integration.py new file mode 100644 index 0000000000..f2c4d84a2f --- /dev/null +++ b/src/snowflake/cli/api/config_ng/telemetry_integration.py @@ -0,0 +1,100 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Telemetry integration for config_ng system. + +This module provides functions to track configuration source usage +and integrate with the CLI's telemetry system. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, Optional + +if TYPE_CHECKING: + from snowflake.cli.api.config_ng.resolver import ConfigurationResolver + + +# Map source names to counter field names +SOURCE_TO_COUNTER = { + "snowsql_config": "config_source_snowsql", + "cli_config_toml": "config_source_cli_toml", + "connections_toml": "config_source_connections_toml", + "snowsql_env": "config_source_snowsql_env", + "connection_specific_env": "config_source_connection_env", + "cli_env": "config_source_cli_env", + "cli_arguments": "config_source_cli_args", +} + + +def record_config_source_usage(resolver: ConfigurationResolver) -> None: + """ + Record configuration source usage to CLI metrics. + + This should be called after configuration resolution completes. + Sets counters to 1 for sources that provided winning values, 0 otherwise. + + Args: + resolver: The ConfigurationResolver instance + """ + try: + from snowflake.cli.api.cli_global_context import get_cli_context + from snowflake.cli.api.metrics import CLICounterField + + cli_context = get_cli_context() + summary = resolver.get_tracker().get_summary() + + # Track which sources won (provided final values) + source_wins = summary.get("source_wins", {}) + + # Set counters for each source + for source_name, counter_name in SOURCE_TO_COUNTER.items(): + # Set to 1 if this source provided any winning values, 0 otherwise + value = 1 if source_wins.get(source_name, 0) > 0 else 0 + counter_field = getattr(CLICounterField, counter_name.upper(), None) + if counter_field: + cli_context.metrics.set_counter(counter_field, value) + + except Exception: + # Don't break execution if telemetry fails + pass + + +def get_config_telemetry_payload( + resolver: Optional[ConfigurationResolver], +) -> Dict[str, Any]: + """ + Get configuration telemetry payload for inclusion in command telemetry. + + Args: + resolver: Optional ConfigurationResolver instance + + Returns: + Dictionary with config telemetry data + """ + if resolver is None: + return {} + + try: + summary = resolver.get_tracker().get_summary() + + return { + "config_sources_used": list(summary.get("source_usage", {}).keys()), + "config_source_wins": summary.get("source_wins", {}), + "config_total_keys_resolved": summary.get("total_keys_resolved", 0), + "config_keys_with_overrides": summary.get("keys_with_overrides", 0), + } + except Exception: + return {} diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 19088fc7d7..62ce0e0de5 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -293,8 +293,26 @@ def _ensure_initialized(self) -> None: if not self._config_cache: self._config_cache = self._resolver.resolve() + # Record telemetry about config sources used + self._record_config_telemetry() + self._initialized = True + def _record_config_telemetry(self) -> None: + """Record configuration source usage to telemetry system.""" + if self._resolver is None: + return + + try: + from snowflake.cli.api.config_ng.telemetry_integration import ( + record_config_source_usage, + ) + + record_config_source_usage(self._resolver) + except Exception: + # Don't break initialization if telemetry fails + pass + def read_config(self) -> None: """ Load configuration from all sources. diff --git a/src/snowflake/cli/api/metrics.py b/src/snowflake/cli/api/metrics.py index 69778872e1..94c898db2f 100644 --- a/src/snowflake/cli/api/metrics.py +++ b/src/snowflake/cli/api/metrics.py @@ -76,6 +76,28 @@ class CLICounterField: EVENT_SHARING_ERROR = ( f"{_TypePrefix.FEATURES}.{_DomainPrefix.APP}.event_sharing_error" ) + # Config source usage tracking + CONFIG_SOURCE_SNOWSQL = ( + f"{_TypePrefix.FEATURES}.{_DomainPrefix.GLOBAL}.config_source_snowsql" + ) + CONFIG_SOURCE_CLI_TOML = ( + f"{_TypePrefix.FEATURES}.{_DomainPrefix.GLOBAL}.config_source_cli_toml" + ) + CONFIG_SOURCE_CONNECTIONS_TOML = ( + f"{_TypePrefix.FEATURES}.{_DomainPrefix.GLOBAL}.config_source_connections_toml" + ) + CONFIG_SOURCE_SNOWSQL_ENV = ( + f"{_TypePrefix.FEATURES}.{_DomainPrefix.GLOBAL}.config_source_snowsql_env" + ) + CONFIG_SOURCE_CONNECTION_ENV = ( + f"{_TypePrefix.FEATURES}.{_DomainPrefix.GLOBAL}.config_source_connection_env" + ) + CONFIG_SOURCE_CLI_ENV = ( + f"{_TypePrefix.FEATURES}.{_DomainPrefix.GLOBAL}.config_source_cli_env" + ) + CONFIG_SOURCE_CLI_ARGS = ( + f"{_TypePrefix.FEATURES}.{_DomainPrefix.GLOBAL}.config_source_cli_args" + ) @dataclass diff --git a/tests/app/test_telemetry.py b/tests/app/test_telemetry.py index 8792e1f5e1..a4967cf0c1 100644 --- a/tests/app/test_telemetry.py +++ b/tests/app/test_telemetry.py @@ -19,6 +19,10 @@ import pytest import typer from click import ClickException +from snowflake.cli.api.config_provider import ( + ALTERNATIVE_CONFIG_ENV_VAR, + reset_config_provider, +) from snowflake.cli.api.constants import ObjectType from snowflake.cli.api.exceptions import CouldNotUseObjectError from snowflake.cli.api.feature_flags import BooleanFlag, FeatureFlagMixin @@ -41,52 +45,122 @@ class _TestFlags(FeatureFlagMixin): @mock.patch("snowflake.connector.connect") @mock.patch("snowflake.cli._plugins.connection.commands.ObjectManager") @with_feature_flags({_TestFlags.FOO: False}) -def test_executing_command_sends_telemetry_usage_data( +def test_executing_command_sends_telemetry_usage_data_legacy_config( _, mock_conn, mock_time, mock_uuid4, mock_platform, mock_version, runner ): - mock_time.return_value = "123" - mock_platform.return_value = "FancyOS" - mock_version.return_value = "2.3.4" - mock_uuid4.return_value = uuid.UUID("8a2225b3800c4017a4a9eab941db58fa") - result = runner.invoke(["connection", "test"], catch_exceptions=False) - assert result.exit_code == 0, result.output - # The method is called with a TelemetryData type, so we cast it to dict for simpler comparison - usage_command_event = ( - mock_conn.return_value._telemetry.try_add_log_to_batch.call_args_list[ # noqa: SLF001 - 0 - ] - .args[0] - .to_dict() - ) + """Test telemetry with legacy config provider.""" + # Ensure legacy config is used + with mock.patch.dict(os.environ, {}, clear=False): + if ALTERNATIVE_CONFIG_ENV_VAR in os.environ: + del os.environ[ALTERNATIVE_CONFIG_ENV_VAR] + reset_config_provider() - del usage_command_event["message"][ - "command_ci_environment" - ] # to avoid side effect from CI - assert usage_command_event == { - "message": { - "driver_type": "PythonConnector", - "driver_version": ".".join(str(s) for s in DRIVER_VERSION[:3]), - "source": "snowcli", - "version_cli": "0.0.0-test_patched", - "version_os": "FancyOS", - "version_python": "2.3.4", - "installation_source": "pypi", - "command": ["connection", "test"], - "command_group": "connection", - "command_execution_id": "8a2225b3800c4017a4a9eab941db58fa", - "command_flags": {"diag_log_path": "DEFAULT", "format": "DEFAULT"}, - "command_output_type": "TABLE", - "type": "executing_command", - "project_definition_version": "None", - "config_feature_flags": { - "dummy_flag": "True", - "foo": "False", - "wrong_type_flag": "UNKNOWN", + mock_time.return_value = "123" + mock_platform.return_value = "FancyOS" + mock_version.return_value = "2.3.4" + mock_uuid4.return_value = uuid.UUID("8a2225b3800c4017a4a9eab941db58fa") + result = runner.invoke(["connection", "test"], catch_exceptions=False) + assert result.exit_code == 0, result.output + # The method is called with a TelemetryData type, so we cast it to dict for simpler comparison + usage_command_event = ( + mock_conn.return_value._telemetry.try_add_log_to_batch.call_args_list[ # noqa: SLF001 + 0 + ] + .args[0] + .to_dict() + ) + + del usage_command_event["message"][ + "command_ci_environment" + ] # to avoid side effect from CI + assert usage_command_event == { + "message": { + "driver_type": "PythonConnector", + "driver_version": ".".join(str(s) for s in DRIVER_VERSION[:3]), + "source": "snowcli", + "version_cli": "0.0.0-test_patched", + "version_os": "FancyOS", + "version_python": "2.3.4", + "installation_source": "pypi", + "command": ["connection", "test"], + "command_group": "connection", + "command_execution_id": "8a2225b3800c4017a4a9eab941db58fa", + "command_flags": {"diag_log_path": "DEFAULT", "format": "DEFAULT"}, + "command_output_type": "TABLE", + "type": "executing_command", + "project_definition_version": "None", + "config_feature_flags": { + "dummy_flag": "True", + "foo": "False", + "wrong_type_flag": "UNKNOWN", + }, + "config_provider_type": "legacy", + "mode": "cmd", }, - "mode": "cmd", - }, - "timestamp": "123", - } + "timestamp": "123", + } + + +@mock.patch( + "snowflake.cli._app.telemetry.python_version", +) +@mock.patch("snowflake.cli._app.telemetry.platform.platform") +@mock.patch("uuid.uuid4") +@mock.patch("snowflake.cli._app.telemetry.get_time_millis") +@mock.patch("snowflake.connector.connect") +@mock.patch("snowflake.cli._plugins.connection.commands.ObjectManager") +@with_feature_flags({_TestFlags.FOO: False}) +def test_executing_command_sends_telemetry_usage_data_ng_config( + _, mock_conn, mock_time, mock_uuid4, mock_platform, mock_version, runner +): + """Test telemetry with NG config provider.""" + # Enable NG config + with mock.patch.dict(os.environ, {ALTERNATIVE_CONFIG_ENV_VAR: "true"}): + reset_config_provider() + + mock_time.return_value = "123" + mock_platform.return_value = "FancyOS" + mock_version.return_value = "2.3.4" + mock_uuid4.return_value = uuid.UUID("8a2225b3800c4017a4a9eab941db58fa") + result = runner.invoke(["connection", "test"], catch_exceptions=False) + assert result.exit_code == 0, result.output + + # The method is called with a TelemetryData type, so we cast it to dict for simpler comparison + usage_command_event = ( + mock_conn.return_value._telemetry.try_add_log_to_batch.call_args_list[ # noqa: SLF001 + 0 + ] + .args[0] + .to_dict() + ) + + del usage_command_event["message"][ + "command_ci_environment" + ] # to avoid side effect from CI + + # Verify common fields + message = usage_command_event["message"] + assert message["driver_type"] == "PythonConnector" + assert message["source"] == "snowcli" + assert message["version_cli"] == "0.0.0-test_patched" + assert message["version_os"] == "FancyOS" + assert message["version_python"] == "2.3.4" + assert message["command"] == ["connection", "test"] + assert message["command_group"] == "connection" + assert message["type"] == "executing_command" + assert message["config_provider_type"] == "ng" + + # Verify NG-specific config fields are present + assert "config_sources_used" in message + assert "config_source_wins" in message + assert "config_total_keys_resolved" in message + assert "config_keys_with_overrides" in message + + # These fields should be present (values will vary based on test config) + assert isinstance(message["config_sources_used"], list) + assert isinstance(message["config_source_wins"], dict) + assert isinstance(message["config_total_keys_resolved"], int) + assert isinstance(message["config_keys_with_overrides"], int) @pytest.mark.parametrize( diff --git a/tests/config_ng/test_telemetry_integration.py b/tests/config_ng/test_telemetry_integration.py new file mode 100644 index 0000000000..129ad19c1a --- /dev/null +++ b/tests/config_ng/test_telemetry_integration.py @@ -0,0 +1,204 @@ +# Copyright (c) 2024 Snowflake Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests for config_ng telemetry integration.""" + +from unittest.mock import MagicMock, patch + +from snowflake.cli.api.config_ng import ( + CliParameters, + ConfigurationResolver, + get_config_telemetry_payload, + record_config_source_usage, +) +from snowflake.cli.api.config_ng.sources import CliConfigFile + + +class TestRecordConfigSourceUsage: + """Tests for record_config_source_usage function.""" + + def test_records_winning_sources(self): + """Test that winning sources are recorded as counters.""" + # Create resolver with some sources + cli_config = CliConfigFile.from_string( + """ + [connections.test] + account = "test_account" + user = "test_user" + """ + ) + cli_params = CliParameters(cli_context={"password": "secret"}) + resolver = ConfigurationResolver(sources=[cli_config, cli_params]) + + # Resolve to populate history + resolver.resolve() + + # Mock CLI context + mock_context = MagicMock() + mock_metrics = MagicMock() + mock_context.metrics = mock_metrics + + with patch( + "snowflake.cli.api.cli_global_context.get_cli_context", + return_value=mock_context, + ): + record_config_source_usage(resolver) + + # Verify counters were set + assert mock_metrics.set_counter.called + # Should have calls for all sources + call_args = [call[0] for call in mock_metrics.set_counter.call_args_list] + counter_fields = [arg[0] for arg in call_args] + + # Verify some expected counter fields are present + assert any("config_source" in str(field) for field in counter_fields) + + def test_handles_no_cli_context_gracefully(self): + """Test that function doesn't fail if CLI context unavailable.""" + cli_config = CliConfigFile.from_string( + """ + [connections.test] + account = "test_account" + """ + ) + resolver = ConfigurationResolver(sources=[cli_config]) + resolver.resolve() + + with patch( + "snowflake.cli.api.cli_global_context.get_cli_context", + side_effect=Exception("No context"), + ): + # Should not raise + record_config_source_usage(resolver) + + def test_sets_counter_to_zero_for_unused_sources(self): + """Test that unused sources get counter value 0.""" + # Only use CLI config, not CLI params + cli_config = CliConfigFile.from_string( + """ + [connections.test] + account = "test_account" + """ + ) + resolver = ConfigurationResolver(sources=[cli_config]) + resolver.resolve() + + mock_context = MagicMock() + mock_metrics = MagicMock() + mock_context.metrics = mock_metrics + + with patch( + "snowflake.cli.api.cli_global_context.get_cli_context", + return_value=mock_context, + ): + record_config_source_usage(resolver) + + # Check that at least one source was set to 0 + call_args = mock_metrics.set_counter.call_args_list + values = [call[0][1] for call in call_args] + assert 0 in values + + +class TestGetConfigTelemetryPayload: + """Tests for get_config_telemetry_payload function.""" + + def test_returns_empty_dict_for_none_resolver(self): + """Test that None resolver returns empty dict.""" + result = get_config_telemetry_payload(None) + assert result == {} + + def test_returns_summary_data(self): + """Test that function returns summary data from resolver.""" + cli_config = CliConfigFile.from_string( + """ + [connections.test] + account = "test_account" + user = "test_user" + """ + ) + cli_params = CliParameters(cli_context={"password": "secret"}) + resolver = ConfigurationResolver(sources=[cli_config, cli_params]) + + # Resolve to populate history + resolver.resolve() + + result = get_config_telemetry_payload(resolver) + + # Verify expected keys are present + assert "config_sources_used" in result + assert "config_source_wins" in result + assert "config_total_keys_resolved" in result + assert "config_keys_with_overrides" in result + + # Verify data types + assert isinstance(result["config_sources_used"], list) + assert isinstance(result["config_source_wins"], dict) + assert isinstance(result["config_total_keys_resolved"], int) + assert isinstance(result["config_keys_with_overrides"], int) + + def test_handles_resolver_errors_gracefully(self): + """Test that function handles resolver errors gracefully.""" + mock_resolver = MagicMock() + mock_resolver.get_tracker.side_effect = Exception("Tracker error") + + result = get_config_telemetry_payload(mock_resolver) + assert result == {} + + def test_tracks_source_wins_correctly(self): + """Test that source wins are tracked correctly.""" + cli_config = CliConfigFile.from_string( + """ + [connections.test] + account = "test_account" + """ + ) + # CLI params should win for password + cli_params = CliParameters(cli_context={"password": "override"}) + resolver = ConfigurationResolver(sources=[cli_config, cli_params]) + + resolver.resolve() + + result = get_config_telemetry_payload(resolver) + + # Verify that cli_arguments won for password + source_wins = result["config_source_wins"] + assert "cli_arguments" in source_wins + assert source_wins["cli_arguments"] > 0 + + +class TestTelemetryIntegration: + """Integration tests for telemetry system.""" + + def test_telemetry_records_from_config_provider(self): + """Test that config provider records telemetry on initialization.""" + from snowflake.cli.api.config_provider import AlternativeConfigProvider + + mock_context = MagicMock() + mock_metrics = MagicMock() + mock_context.metrics = mock_metrics + mock_context.connection_context.present_values_as_dict.return_value = {} + mock_context.config_file_override = None + + def mock_getter(): + return mock_context + + with patch( + "snowflake.cli.api.cli_global_context.get_cli_context", + return_value=mock_context, + ): + provider = AlternativeConfigProvider(cli_context_getter=mock_getter) + provider._ensure_initialized() # noqa: SLF001 + + # Verify that telemetry was recorded + assert mock_metrics.set_counter.called From 51f7f9dbd5b97501326ae967612ad9d24da8ded5 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 22 Oct 2025 15:41:51 +0200 Subject: [PATCH 57/78] SNOW-2306184: config refactor - telemetry test update --- tests_integration/nativeapp/test_metrics.py | 65 ++++++++++----------- 1 file changed, 30 insertions(+), 35 deletions(-) diff --git a/tests_integration/nativeapp/test_metrics.py b/tests_integration/nativeapp/test_metrics.py index fb794bf418..f86d15d893 100644 --- a/tests_integration/nativeapp/test_metrics.py +++ b/tests_integration/nativeapp/test_metrics.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from shlex import split -from typing import Dict, Callable +from typing import Callable, Dict, List from unittest import mock from snowflake.cli._app.telemetry import TelemetryEvent, CLITelemetryField @@ -111,13 +111,12 @@ def test_feature_counters_v1_post_deploy_set_and_package_scripts_available( mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value ) - assert message[CLITelemetryField.COUNTERS.value] == { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 0, - CLICounterField.PDF_TEMPLATES: 0, - CLICounterField.POST_DEPLOY_SCRIPTS: 1, - CLICounterField.PACKAGE_SCRIPTS: 0, - } + counters = message[CLITelemetryField.COUNTERS.value] + assert counters[CLICounterField.SNOWPARK_PROCESSOR] == 0 + assert counters[CLICounterField.TEMPLATES_PROCESSOR] == 0 + assert counters[CLICounterField.PDF_TEMPLATES] == 0 + assert counters[CLICounterField.POST_DEPLOY_SCRIPTS] == 1 + assert counters[CLICounterField.PACKAGE_SCRIPTS] == 0 @pytest.mark.integration @@ -161,11 +160,10 @@ def test_feature_counters_v2_post_deploy_not_available_in_bundle( mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value ) - assert message[CLITelemetryField.COUNTERS.value] == { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 0, - CLICounterField.PDF_TEMPLATES: 1, - } + counters = message[CLITelemetryField.COUNTERS.value] + assert counters[CLICounterField.SNOWPARK_PROCESSOR] == 0 + assert counters[CLICounterField.TEMPLATES_PROCESSOR] == 0 + assert counters[CLICounterField.PDF_TEMPLATES] == 1 @pytest.mark.integration @@ -207,15 +205,14 @@ def test_feature_counter_v2_templates_processor_set( mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value ) - assert message[CLITelemetryField.COUNTERS.value] == { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 1, - CLICounterField.PDF_TEMPLATES: 0, - CLICounterField.POST_DEPLOY_SCRIPTS: 0, - CLICounterField.EVENT_SHARING: 0, - CLICounterField.EVENT_SHARING_ERROR: 0, - CLICounterField.EVENT_SHARING_WARNING: 0, - } + counters = message[CLITelemetryField.COUNTERS.value] + assert counters[CLICounterField.SNOWPARK_PROCESSOR] == 0 + assert counters[CLICounterField.TEMPLATES_PROCESSOR] == 1 + assert counters[CLICounterField.PDF_TEMPLATES] == 0 + assert counters[CLICounterField.POST_DEPLOY_SCRIPTS] == 0 + assert counters[CLICounterField.EVENT_SHARING] == 0 + assert counters[CLICounterField.EVENT_SHARING_ERROR] == 0 + assert counters[CLICounterField.EVENT_SHARING_WARNING] == 0 @pytest.mark.integration @@ -244,13 +241,12 @@ def test_feature_counter_v1_package_scripts_converted_to_post_deploy_and_both_se mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value ) - assert message[CLITelemetryField.COUNTERS.value] == { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 0, - CLICounterField.PDF_TEMPLATES: 0, - CLICounterField.POST_DEPLOY_SCRIPTS: 1, - CLICounterField.PACKAGE_SCRIPTS: 1, - } + counters = message[CLITelemetryField.COUNTERS.value] + assert counters[CLICounterField.SNOWPARK_PROCESSOR] == 0 + assert counters[CLICounterField.TEMPLATES_PROCESSOR] == 0 + assert counters[CLICounterField.PDF_TEMPLATES] == 0 + assert counters[CLICounterField.POST_DEPLOY_SCRIPTS] == 1 + assert counters[CLICounterField.PACKAGE_SCRIPTS] == 1 @pytest.mark.integration @@ -289,12 +285,11 @@ def test_feature_counter_v2_post_deploy_set_and_package_scripts_not_available( mock_telemetry, TelemetryEvent.CMD_EXECUTION_RESULT.value ) - assert message[CLITelemetryField.COUNTERS.value] == { - CLICounterField.SNOWPARK_PROCESSOR: 0, - CLICounterField.TEMPLATES_PROCESSOR: 0, - CLICounterField.PDF_TEMPLATES: 1, - CLICounterField.POST_DEPLOY_SCRIPTS: 1, - } + counters = message[CLITelemetryField.COUNTERS.value] + assert counters[CLICounterField.SNOWPARK_PROCESSOR] == 0 + assert counters[CLICounterField.TEMPLATES_PROCESSOR] == 0 + assert counters[CLICounterField.PDF_TEMPLATES] == 1 + assert counters[CLICounterField.POST_DEPLOY_SCRIPTS] == 1 @pytest.mark.integration From 7683f22639ffd0679cfcc926993317dd3d7cc991 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 23 Oct 2025 07:10:16 +0200 Subject: [PATCH 58/78] SNOW-2306184: config refactor - allow empty connections --- src/snowflake/cli/api/config_provider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 62ce0e0de5..04f6c59d4d 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -448,8 +448,8 @@ def _get_connection_dict_internal(self, connection_name: str) -> Dict[str, Any]: connections[connection_name], dict ): result = connections[connection_name] - if result: - return result + # Allow empty connections - they're valid (just have no parameters set) + return result raise MissingConfigurationError( f"Connection {connection_name} is not configured" From aee3874f850d4fafdbd765ee84e986faf41a8f1e Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 23 Oct 2025 10:21:18 +0200 Subject: [PATCH 59/78] SNOW-2306184: config refactor - allow empty connections test fix --- .../config_ng/test_connection_replacement.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/tests/config_ng/test_connection_replacement.py b/tests/config_ng/test_connection_replacement.py index 9d8787780f..9f572d8efb 100644 --- a/tests/config_ng/test_connection_replacement.py +++ b/tests/config_ng/test_connection_replacement.py @@ -320,7 +320,11 @@ def test_multiple_connections_independent_replacement(config_ng_setup): def test_empty_connection_replacement(config_ng_setup): """ Test that an empty connection in a later FILE source still replaces - the entire connection from earlier sources, resulting in no configured connection. + the entire connection from earlier sources. + + The empty connection is considered valid (it exists in config), but has + no parameters. Validation of required fields happens when actually using + the connection to connect to Snowflake. """ cli_config = """ [connections.test] @@ -335,15 +339,16 @@ def test_empty_connection_replacement(config_ng_setup): """ with config_ng_setup(cli_config=cli_config, connections_toml=connections_toml): - import pytest from snowflake.cli.api.config import get_connection_dict - from snowflake.cli.api.exceptions import MissingConfigurationError - # Empty connection replacement means no parameters, which raises an error - with pytest.raises( - MissingConfigurationError, match="Connection test is not configured" - ): - get_connection_dict("test") + # Empty connection replacement: connection exists but has no parameters + conn = get_connection_dict("test") + assert conn == {} # Connection exists but is empty + + # No parameters from cli_config are inherited (connection was replaced) + assert "account" not in conn + assert "user" not in conn + assert "warehouse" not in conn def test_overlay_precedence_connection_specific_over_global(config_ng_setup): From f3effcc72cb7937cc77c6df3b77429ad6d07426b Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 23 Oct 2025 16:35:22 +0200 Subject: [PATCH 60/78] SNOW-2306184: config refactor - after rebase fixes --- tests/output/test_format_silent_enforcement.py | 2 ++ tests/output/test_printing.py | 9 +++++++++ 2 files changed, 11 insertions(+) diff --git a/tests/output/test_format_silent_enforcement.py b/tests/output/test_format_silent_enforcement.py index 7f2823dc9b..0ae5aae3bc 100644 --- a/tests/output/test_format_silent_enforcement.py +++ b/tests/output/test_format_silent_enforcement.py @@ -27,6 +27,7 @@ def test_table_result_with_silent_enabled(runner): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ + """ ) @@ -47,6 +48,7 @@ def test_table_result_with_silent_disabled(runner): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ + """ ) diff --git a/tests/output/test_printing.py b/tests/output/test_printing.py index 45d9862ec7..f4af9ab09f 100644 --- a/tests/output/test_printing.py +++ b/tests/output/test_printing.py @@ -115,6 +115,7 @@ def test_single_collection_result(capsys, mock_cursor): | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------+ + """ ) @@ -146,6 +147,7 @@ def test_print_markup_tags_in_output_do_not_raise_errors(capsys, mock_cursor): |------------------------------------------------| | [INST]footranscript[/INST] | +------------------------------------------------+ + """ ) @@ -161,12 +163,14 @@ def test_print_multi_results_table(capsys, _multiple_results): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ + +---------------------------------------------------------------------+ | string | number | array | object | date | |--------+--------+-----------+-----------------+---------------------| | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ + """ ) @@ -199,12 +203,14 @@ def test_print_different_multi_results_table(capsys, _multiple_different_results | string | 42 | | string | 43 | +-----------------+ + +---------------------------------------------------+ | array | object | date | |-----------+-----------------+---------------------| | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------+ + """ ) @@ -237,6 +243,7 @@ def test_print_different_data_sources_table(capsys, _multiple_data_sources): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ + Command done +---------+ | key | @@ -244,6 +251,7 @@ def test_print_different_data_sources_table(capsys, _multiple_data_sources): | value_0 | | value_1 | +---------+ + """ ) @@ -401,6 +409,7 @@ def test_print_bytearray(capsys, _bytearray_result): |----------------------------------| | 544849532053484f554c4420574f524b | +----------------------------------+ + """ ) From daa81f43727836db537920a7654a65762abd737c Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Thu, 23 Oct 2025 21:41:38 +0200 Subject: [PATCH 61/78] SNOW-2306184: config refactor - after rebase fixes 2 --- tests/output/test_format_silent_enforcement.py | 2 -- tests/output/test_printing.py | 9 --------- 2 files changed, 11 deletions(-) diff --git a/tests/output/test_format_silent_enforcement.py b/tests/output/test_format_silent_enforcement.py index 0ae5aae3bc..7f2823dc9b 100644 --- a/tests/output/test_format_silent_enforcement.py +++ b/tests/output/test_format_silent_enforcement.py @@ -27,7 +27,6 @@ def test_table_result_with_silent_enabled(runner): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ - """ ) @@ -48,7 +47,6 @@ def test_table_result_with_silent_disabled(runner): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ - """ ) diff --git a/tests/output/test_printing.py b/tests/output/test_printing.py index f4af9ab09f..45d9862ec7 100644 --- a/tests/output/test_printing.py +++ b/tests/output/test_printing.py @@ -115,7 +115,6 @@ def test_single_collection_result(capsys, mock_cursor): | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------+ - """ ) @@ -147,7 +146,6 @@ def test_print_markup_tags_in_output_do_not_raise_errors(capsys, mock_cursor): |------------------------------------------------| | [INST]footranscript[/INST] | +------------------------------------------------+ - """ ) @@ -163,14 +161,12 @@ def test_print_multi_results_table(capsys, _multiple_results): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ - +---------------------------------------------------------------------+ | string | number | array | object | date | |--------+--------+-----------+-----------------+---------------------| | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ - """ ) @@ -203,14 +199,12 @@ def test_print_different_multi_results_table(capsys, _multiple_different_results | string | 42 | | string | 43 | +-----------------+ - +---------------------------------------------------+ | array | object | date | |-----------+-----------------+---------------------| | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------+ - """ ) @@ -243,7 +237,6 @@ def test_print_different_data_sources_table(capsys, _multiple_data_sources): | string | 42 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | | string | 43 | ['array'] | {'k': 'object'} | 2022-03-21 00:00:00 | +---------------------------------------------------------------------+ - Command done +---------+ | key | @@ -251,7 +244,6 @@ def test_print_different_data_sources_table(capsys, _multiple_data_sources): | value_0 | | value_1 | +---------+ - """ ) @@ -409,7 +401,6 @@ def test_print_bytearray(capsys, _bytearray_result): |----------------------------------| | 544849532053484f554c4420574f524b | +----------------------------------+ - """ ) From f97503412a1b610d93b06340517f3177040eb729 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Fri, 24 Oct 2025 07:43:26 +0200 Subject: [PATCH 62/78] SNOW-2306184: config refactor - remove comments --- src/snowflake/cli/api/config_ng/parsers.py | 3 -- .../cli/api/config_ng/presentation.py | 9 ---- .../cli/api/config_ng/resolution_logger.py | 7 --- src/snowflake/cli/api/config_ng/resolver.py | 49 ++----------------- src/snowflake/cli/api/config_ng/sources.py | 36 -------------- .../api/config_ng/telemetry_integration.py | 4 -- 6 files changed, 3 insertions(+), 105 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/parsers.py b/src/snowflake/cli/api/config_ng/parsers.py index 8d6754714f..5ca6cf6f53 100644 --- a/src/snowflake/cli/api/config_ng/parsers.py +++ b/src/snowflake/cli/api/config_ng/parsers.py @@ -95,19 +95,16 @@ def parse(cls, content: str) -> Dict[str, Any]: section.split(".", 1)[1] if "." in section else "default" ) - # Ensure connections dict exists if "connections" not in result: result["connections"] = {} if conn_name not in result["connections"]: result["connections"][conn_name] = {} - # Map keys and add to connection for key, value in config[section].items(): mapped_key = cls.SNOWSQL_KEY_MAP.get(key, key) result["connections"][conn_name][mapped_key] = value elif section == "variables": - # Process variables section result["variables"] = dict(config[section]) return result diff --git a/src/snowflake/cli/api/config_ng/presentation.py b/src/snowflake/cli/api/config_ng/presentation.py index c626a77e3c..549c5c659f 100644 --- a/src/snowflake/cli/api/config_ng/presentation.py +++ b/src/snowflake/cli/api/config_ng/presentation.py @@ -99,11 +99,9 @@ def _should_mask_value(key: str) -> bool: """ key_lower = key.lower() - # Check if it's a path key (paths are OK to display) if any(path_key in key_lower for path_key in PATH_KEYS): return False - # Check if it contains sensitive keywords return any(sensitive_key in key_lower for sensitive_key in SENSITIVE_KEYS) @@ -168,13 +166,10 @@ def build_sources_table(self, key: Optional[str] = None) -> CollectionResult: Args: key: Optional specific key to build table for, or None for all keys """ - # Ensure history is populated tracker = self._resolver.get_tracker() if key is None and not tracker.get_all_histories(): - # Resolve all keys to populate history self._resolver.resolve() elif key is not None and tracker.get_history(key) is None: - # Resolve only the specific key self._resolver.resolve(key=key) histories = ( @@ -187,21 +182,17 @@ def _row_items(): for k, history in histories.items(): if history is None: continue - # Initialize row with fixed columns row: Dict[str, Any] = {c: "" for c in TABLE_COLUMNS} row["key"] = k - # Final value (masked) masked_final = _mask_sensitive_value(k, history.final_value) row["value"] = masked_final - # Mark presence per source for entry in history.entries: source_column = SOURCE_TO_COLUMN.get(entry.config_value.source_name) if source_column is not None: row[source_column] = "+" - # Ensure result preserves the column order ordered_row = {column: row[column] for column in TABLE_COLUMNS} yield ordered_row diff --git a/src/snowflake/cli/api/config_ng/resolution_logger.py b/src/snowflake/cli/api/config_ng/resolution_logger.py index 4af8c73678..e64cf97a37 100644 --- a/src/snowflake/cli/api/config_ng/resolution_logger.py +++ b/src/snowflake/cli/api/config_ng/resolution_logger.py @@ -65,7 +65,6 @@ def get_resolver() -> Optional[ConfigurationResolver]: if not isinstance(provider, AlternativeConfigProvider): return None - # Ensure provider is initialized provider._ensure_initialized() # noqa: SLF001 return provider._resolver # noqa: SLF001 @@ -87,7 +86,6 @@ def show_resolution_chain(key: str) -> None: provider = get_config_provider_singleton() - # Force configuration resolution to populate history provider.read_config() resolver = get_resolver() @@ -114,7 +112,6 @@ def show_all_resolution_chains() -> None: provider = get_config_provider_singleton() - # Force configuration resolution to populate history provider.read_config() resolver = get_resolver() @@ -148,7 +145,6 @@ def get_resolution_summary() -> Optional[Dict]: provider = get_config_provider_singleton() - # Force configuration resolution to populate history provider.read_config() resolver = get_resolver() @@ -179,7 +175,6 @@ def export_resolution_history(output_path: Path) -> bool: provider = get_config_provider_singleton() - # Force configuration resolution to populate history provider.read_config() resolver = get_resolver() @@ -253,7 +248,6 @@ def check_value_source(key: str) -> Optional[str]: provider = get_config_provider_singleton() - # Force configuration resolution to populate history provider.read_config() resolver = get_resolver() @@ -283,7 +277,6 @@ def explain_configuration(key: Optional[str] = None, verbose: bool = False) -> N provider = get_config_provider_singleton() - # Force configuration resolution to populate history provider.read_config() resolver = get_resolver() diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index 91a8d32921..6a14b8d3af 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -121,10 +121,8 @@ def record_nested_discovery( if not self._enabled: return - # Flatten the nested data flat_data = self._flatten_nested_dict(nested_data) - # Record each flat key timestamp = datetime.now() for flat_key, value in flat_data.items(): config_value = ConfigValue( @@ -157,7 +155,6 @@ def mark_selected(self, key: str, source_name: str) -> None: if not self._enabled or key not in self._discoveries: return - # Build resolution history for this key entries: List[ResolutionEntry] = [] selected_value = None @@ -191,12 +188,10 @@ def mark_default_used(self, key: str, default_value: Any) -> None: if not self._enabled: return - # Create or update history to indicate default usage if key in self._histories: self._histories[key].default_used = True self._histories[key].final_value = default_value else: - # No discoveries, only default self._histories[key] = ResolutionHistory( key=key, entries=[], final_value=default_value, default_used=True ) @@ -235,20 +230,14 @@ def finalize_with_result(self, final_config: Dict[str, Any]) -> None: if not self._enabled: return - # Flatten final config to identify which values were selected flat_final = self._flatten_nested_dict(final_config) - # For each flat key in final config, find which source provided it for flat_key, final_value in flat_final.items(): - # Check if this key has discoveries if flat_key not in self._discoveries: continue - # Find the entry with matching value (should be highest priority) discoveries = self._discoveries[flat_key] - for config_value, timestamp in reversed( - discoveries - ): # Check from highest to lowest + for config_value, timestamp in reversed(discoveries): if config_value.value == final_value: self.mark_selected(flat_key, config_value.source_name) break @@ -276,7 +265,6 @@ def record_general_params_merged_to_connections( timestamp = datetime.now() for param_key, param_value in general_params.items(): - # Record for each connection for conn_name in connection_names: flat_key = f"connections.{conn_name}.{param_key}" config_value = ConfigValue( @@ -301,10 +289,8 @@ def replicate_root_level_discoveries_to_connection( return for param_key in param_keys: - # Check if we have discoveries for the root-level key if param_key in self._discoveries: conn_key = f"connections.{connection_name}.{param_key}" - # Copy all discoveries from root to connection location for config_value, timestamp in self._discoveries[param_key]: self._discoveries[conn_key].append((config_value, timestamp)) @@ -574,23 +560,19 @@ def _resolve_file_sources(self, key: Optional[str] = None) -> Dict[str, Any]: for source in self._get_sources_by_type(SourceType.FILE): try: - source_data = source.discover(key) # Already nested! + source_data = source.discover(key) - # Record discoveries for history tracking self._history_tracker.record_nested_discovery( source_data, source.source_name ) - # For FILE sources: connection-level replacement if "connections" in source_data: if "connections" not in result: result["connections"] = {} - # Replace entire connections (not merge) for conn_name, conn_data in source_data["connections"].items(): result["connections"][conn_name] = conn_data - # Merge other top-level keys for k, v in source_data.items(): if k != "connections": result[k] = v @@ -617,11 +599,9 @@ def _merge_file_results( """ all_values: Dict[str, ConfigValue] = {} - # Add all connection parameters for conn_params in file_connections.values(): all_values.update(conn_params) - # Add flat values all_values.update(file_flat_values) return all_values @@ -655,20 +635,16 @@ def _apply_overlay_sources( try: source_data = source.discover(key) - # Record discoveries for history tracking self._history_tracker.record_nested_discovery( source_data, source.source_name ) - # Separate general connection params from other data general_params, other_data = extract_root_level_connection_params( source_data ) - # First, merge connection-specific data and internal params result = deep_merge(result, other_data) - # Then, merge general params into all existing connections if general_params and "connections" in result and result["connections"]: connection_names = [ name @@ -676,37 +652,29 @@ def _apply_overlay_sources( if isinstance(result["connections"][name], dict) ] - # Record history for general params being merged into connections self._history_tracker.record_general_params_merged_to_connections( general_params, connection_names, source.source_name ) - # Merge general params into existing connections result["connections"] = merge_params_into_connections( result["connections"], general_params ) elif general_params: - # No connections exist yet, keep general params at root - # for default connection creation later result = deep_merge(result, general_params) except Exception as e: log.warning("Error from source %s: %s", source.source_name, e) - # Final cleanup: merge any remaining root-level general params into all connections - # This handles params from early sources that were added before connections existed if "connections" in result and result["connections"]: remaining_general_params, _ = extract_root_level_connection_params(result) if remaining_general_params: - # Merge remaining params into connections (connection values take precedence) for conn_name in result["connections"]: if isinstance(result["connections"][conn_name], dict): result["connections"][conn_name] = deep_merge( remaining_general_params, result["connections"][conn_name] ) - # Remove general params from root since they're now in connections for key in remaining_general_params: if key in result: result.pop(key) @@ -733,12 +701,10 @@ def _ensure_default_connection(self, config: Dict[str, Any]) -> Dict[str, Any]: """ from snowflake.cli.api.config_ng.constants import INTERNAL_CLI_PARAMETERS - # Check if connections already exist connections = config.get("connections", {}) if connections: - return config # Connections exist, nothing to do + return config - # Identify general connection parameters (root-level, non-internal) general_params = {} for key, value in config.items(): if ( @@ -747,20 +713,16 @@ def _ensure_default_connection(self, config: Dict[str, Any]) -> Dict[str, Any]: ): general_params[key] = value - # If no general params, nothing to create if not general_params: return config - # Create default connection with general params result = config.copy() result["connections"] = {"default": general_params.copy()} - # Record history for moved parameters self._history_tracker.replicate_root_level_discoveries_to_connection( list(general_params.keys()), "default" ) - # Remove general params from root level (they're now in default connection) for key in general_params: result.pop(key, None) @@ -798,16 +760,12 @@ def resolve(self, key: Optional[str] = None, default: Any = None) -> Dict[str, A Returns: Nested dictionary of resolved configuration """ - # Phase A: FILE sources (connection-level replacement) result = self._resolve_file_sources(key) - # Phase B: OVERLAY sources (field-level overlay with deep merge) result = self._apply_overlay_sources(result, key) - # Phase C: Ensure default connection exists if general params present result = self._ensure_default_connection(result) - # Phase D: Finalize resolution history self._finalize_resolution_history(result) return result @@ -882,7 +840,6 @@ def get_resolution_history(self, key: str) -> Optional[ResolutionHistory]: Returns: ResolutionHistory showing the full precedence chain """ - # First, try exact match history = self._history_tracker.get_history(key) if history: return history diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 2eae2dab76..42814c8fdd 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -135,13 +135,11 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ from snowflake.cli.api.config_ng.parsers import SnowSQLParser - # Phase 1: Content acquisition if self._content is not None: content = self._content else: content = self._read_and_merge_files() - # Phase 2: Parse content return SnowSQLParser.parse(content) def _read_and_merge_files(self) -> str: @@ -160,7 +158,6 @@ def _read_and_merge_files(self) -> str: except Exception as e: log.debug("Failed to read SnowSQL config %s: %s", config_file, e) - # Convert merged config to string from io import StringIO output = StringIO() @@ -203,7 +200,6 @@ def __init__( @staticmethod def _get_default_paths() -> List[Path]: """Get standard CLI config search paths.""" - # Check for config file override from CLI context first try: from snowflake.cli.api.cli_global_context import get_cli_context @@ -214,14 +210,12 @@ def _get_default_paths() -> List[Path]: except Exception: log.debug("CLI context not available, using standard config paths") - # Use SNOWFLAKE_HOME if set and directory exists snowflake_home = os.environ.get(SNOWFLAKE_HOME_ENV) if snowflake_home: snowflake_home_path = Path(snowflake_home).expanduser() if snowflake_home_path.exists(): return [snowflake_home_path / "config.toml"] - # Standard paths return [ Path.cwd() / "config.toml", Path.home() / ".snowflake" / "config.toml", @@ -260,7 +254,6 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ from snowflake.cli.api.config_ng.parsers import TOMLParser - # Phase 1: Content acquisition if self._content is not None: content = self._content else: @@ -269,7 +262,6 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: if not content: return {} - # Phase 2: Parse content return TOMLParser.parse(content) def _read_first_file(self) -> str: @@ -390,7 +382,6 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ from snowflake.cli.api.config_ng.parsers import TOMLParser - # Phase 1: Content acquisition if self._content is not None: content = self._content else: @@ -402,14 +393,12 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: log.debug("Failed to read connections.toml: %s", e) return {} - # Phase 2: Parse TOML (generic parser) try: data = TOMLParser.parse(content) except Exception as e: log.debug("Failed to parse connections.toml: %s", e) return {} - # Phase 3: Normalize legacy format (connections.toml specific) return self._normalize_connections_format(data) @staticmethod @@ -429,21 +418,16 @@ def _normalize_connections_format(data: Dict[str, Any]) -> Dict[str, Any]: """ result: Dict[str, Any] = {} - # Handle direct connection sections (legacy format) - # Any top-level section that's not "connections" is treated as a connection for section_name, section_data in data.items(): if isinstance(section_data, dict) and section_name != "connections": if "connections" not in result: result["connections"] = {} result["connections"][section_name] = section_data - # Handle nested [connections] section (new format) connections_section = data.get("connections", {}) if isinstance(connections_section, dict) and connections_section: if "connections" not in result: result["connections"] = {} - # Merge with any legacy connections found above - # (nested format takes precedence if there's overlap) result["connections"].update(connections_section) return result @@ -507,15 +491,12 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: for env_var, config_key in self.ENV_VAR_MAPPING.items(): env_value = os.getenv(env_var) if env_value is not None: - # Only set if not already set by a previous env var - # (e.g., SNOWSQL_ACCOUNT takes precedence over SNOWSQL_ACCOUNTNAME) if config_key not in result: result[config_key] = env_value return result def supports_key(self, key: str) -> bool: - # Check if any env var for this key is set for env_var, config_key in self.ENV_VAR_MAPPING.items(): if config_key == key and os.getenv(env_var) is not None: return True @@ -588,15 +569,10 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ result: Dict[str, Any] = {} - # Scan all environment variables for env_name, env_value in os.environ.items(): - # Check for connection-specific pattern: SNOWFLAKE_CONNECTIONS__ if env_name.startswith("SNOWFLAKE_CONNECTIONS_"): - # Extract remainder after the prefix remainder = env_name[len("SNOWFLAKE_CONNECTIONS_") :] - # Find the longest matching key suffix from known config keys to - # correctly handle underscores both in connection names and keys match: tuple[str, str] | None = None for candidate in sorted(_ENV_CONFIG_KEYS, key=len, reverse=True): key_suffix = "_" + candidate.upper() @@ -607,13 +583,11 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: break if not match: - # Unknown/unsupported key suffix; ignore continue conn_name_upper, config_key = match conn_name = conn_name_upper.lower() - # Build nested structure if "connections" not in result: result["connections"] = {} if conn_name not in result["connections"]: @@ -624,7 +598,6 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: return result def supports_key(self, key: str) -> bool: - # Check if key matches pattern connections.{name}.{param} if key.startswith("connections."): parts = key.split(".", 2) if len(parts) == 3: @@ -666,16 +639,13 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: """ result: Dict[str, Any] = {} - # Scan all environment variables for env_name, env_value in os.environ.items(): if not env_name.startswith("SNOWFLAKE_"): continue - # Skip connection-specific variables if env_name.startswith("SNOWFLAKE_CONNECTIONS_"): continue - # Check for general pattern: SNOWFLAKE_ config_key_upper = env_name[len("SNOWFLAKE_") :] config_key = config_key_upper.lower() @@ -685,11 +655,9 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: return result def supports_key(self, key: str) -> bool: - # Only support flat keys (not prefixed with connections.) if "." in key: return False - # Check if the general env var exists env_var = f"SNOWFLAKE_{key.upper()}" return os.getenv(env_var) is not None @@ -733,7 +701,6 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: result: Dict[str, Any] = {} for k, v in self._cli_context.items(): - # Skip None values (not provided on CLI) if v is None: continue @@ -760,15 +727,12 @@ def get_merged_variables(cli_variables: Optional[List[str]] = None) -> Dict[str, """ from snowflake.cli.api.config_provider import get_config_provider_singleton - # Start with SnowSQL variables from config provider = get_config_provider_singleton() try: snowsql_vars = provider.get_section(SnowSQLSection.VARIABLES.value) except Exception: - # If variables section doesn't exist or provider not initialized, start with empty dict snowsql_vars = {} - # Parse and overlay -D parameters (higher precedence) if cli_variables: from snowflake.cli.api.commands.utils import parse_key_value_variables diff --git a/src/snowflake/cli/api/config_ng/telemetry_integration.py b/src/snowflake/cli/api/config_ng/telemetry_integration.py index f2c4d84a2f..56495c69cd 100644 --- a/src/snowflake/cli/api/config_ng/telemetry_integration.py +++ b/src/snowflake/cli/api/config_ng/telemetry_integration.py @@ -56,19 +56,15 @@ def record_config_source_usage(resolver: ConfigurationResolver) -> None: cli_context = get_cli_context() summary = resolver.get_tracker().get_summary() - # Track which sources won (provided final values) source_wins = summary.get("source_wins", {}) - # Set counters for each source for source_name, counter_name in SOURCE_TO_COUNTER.items(): - # Set to 1 if this source provided any winning values, 0 otherwise value = 1 if source_wins.get(source_name, 0) > 0 else 0 counter_field = getattr(CLICounterField, counter_name.upper(), None) if counter_field: cli_context.metrics.set_counter(counter_field, value) except Exception: - # Don't break execution if telemetry fails pass From 610a8daa487bddb3bd55bef7354458a42aa458aa Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 17 Nov 2025 15:44:39 +0100 Subject: [PATCH 63/78] SNOW-2306184: config refactor - value masking for ResolutionHistory --- src/snowflake/cli/api/config_ng/core.py | 12 ++- src/snowflake/cli/api/config_ng/masking.py | 80 +++++++++++++++++++ .../cli/api/config_ng/presentation.py | 76 +++--------------- tests/config_ng/test_resolution_logger.py | 33 ++++++++ 4 files changed, 134 insertions(+), 67 deletions(-) create mode 100644 src/snowflake/cli/api/config_ng/masking.py diff --git a/src/snowflake/cli/api/config_ng/core.py b/src/snowflake/cli/api/config_ng/core.py index 7d8bff20f0..31d3bdff32 100644 --- a/src/snowflake/cli/api/config_ng/core.py +++ b/src/snowflake/cli/api/config_ng/core.py @@ -29,6 +29,8 @@ from enum import Enum from typing import Any, Callable, Dict, List, Literal, Optional +from snowflake.cli.api.config_ng.masking import mask_sensitive_value + class SourceType(Enum): """ @@ -254,14 +256,18 @@ def to_dict(self) -> dict: """Convert to dictionary for JSON serialization/export.""" return { "key": self.key, - "final_value": self.final_value, + "final_value": mask_sensitive_value(self.key, self.final_value), "default_used": self.default_used, "sources_consulted": self.sources_consulted, "entries": [ { "source": entry.config_value.source_name, - "value": entry.config_value.value, - "raw_value": entry.config_value.raw_value, + "value": mask_sensitive_value( + entry.config_value.key, entry.config_value.value + ), + "raw_value": mask_sensitive_value( + entry.config_value.key, entry.config_value.raw_value + ), "was_used": entry.was_used, "overridden_by": entry.overridden_by, "timestamp": entry.timestamp.isoformat(), diff --git a/src/snowflake/cli/api/config_ng/masking.py b/src/snowflake/cli/api/config_ng/masking.py new file mode 100644 index 0000000000..79a9739f57 --- /dev/null +++ b/src/snowflake/cli/api/config_ng/masking.py @@ -0,0 +1,80 @@ +"""Utilities for masking sensitive configuration values.""" + +from __future__ import annotations + +from typing import Any, Final, Literal, Tuple + +MaskToken = Literal["****"] + +SENSITIVE_KEY_FRAGMENT = Literal[ + "password", + "pwd", + "oauth_client_secret", + "token", + "session_token", + "master_token", + "mfa_passcode", + "private_key", + "passphrase", + "secret", +] + +PATH_KEY_FRAGMENT = Literal[ + "private_key_file", + "private_key_path", + "token_file_path", +] + +MASKED_VALUE: Final[MaskToken] = "****" + +SENSITIVE_KEYS: Final[Tuple[SENSITIVE_KEY_FRAGMENT, ...]] = ( + "password", + "pwd", + "oauth_client_secret", + "token", + "session_token", + "master_token", + "mfa_passcode", + "private_key", + "passphrase", + "secret", +) + +PATH_KEYS: Final[Tuple[PATH_KEY_FRAGMENT, ...]] = ( + "private_key_file", + "private_key_path", + "token_file_path", +) + + +def should_mask_value(key: str) -> bool: + """ + Determine if the value associated with the key is sensitive. + + Keys containing path segments should not be masked because they refer to + file locations rather than secrets. + """ + key_lower = key.lower() + + if any(path_fragment in key_lower for path_fragment in PATH_KEYS): + return False + + return any(fragment in key_lower for fragment in SENSITIVE_KEYS) + + +def mask_sensitive_value(key: str, value: Any) -> Any: + """Mask sensitive values; otherwise return the original value.""" + if should_mask_value(key): + return MASKED_VALUE + + return value + + +def stringify_masked_value(key: str, value: Any) -> str: + """ + Helper for presentation components that expect string values. + + This avoids duplicating string coercion logic across call sites. + """ + masked = mask_sensitive_value(key, value) + return "None" if masked is None else str(masked) diff --git a/src/snowflake/cli/api/config_ng/presentation.py b/src/snowflake/cli/api/config_ng/presentation.py index 549c5c659f..a5ca9c5032 100644 --- a/src/snowflake/cli/api/config_ng/presentation.py +++ b/src/snowflake/cli/api/config_ng/presentation.py @@ -25,33 +25,13 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Literal, Optional, Tuple +from snowflake.cli.api.config_ng.masking import stringify_masked_value from snowflake.cli.api.console import cli_console from snowflake.cli.api.output.types import CollectionResult, MessageResult if TYPE_CHECKING: from snowflake.cli.api.config_ng.resolver import ConfigurationResolver -# Sensitive configuration keys that should be masked when displayed -SENSITIVE_KEYS = { - "password", - "pwd", - "oauth_client_secret", - "token", - "session_token", - "master_token", - "mfa_passcode", - "private_key", # Private key content (not path) - "passphrase", - "secret", -} - -# Keys that contain file paths (paths are OK to display, but not file contents) -PATH_KEYS = { - "private_key_file", - "private_key_path", - "token_file_path", -} - # Fixed table columns ordered from most important (left) to least (right) SourceColumn = Literal[ "params", @@ -87,40 +67,6 @@ } -def _should_mask_value(key: str) -> bool: - """ - Determine if a configuration value should be masked for security. - - Args: - key: Configuration key name - - Returns: - True if the value should be masked, False if it can be displayed - """ - key_lower = key.lower() - - if any(path_key in key_lower for path_key in PATH_KEYS): - return False - - return any(sensitive_key in key_lower for sensitive_key in SENSITIVE_KEYS) - - -def _mask_sensitive_value(key: str, value: Any) -> str: - """ - Mask sensitive configuration values for display. - - Args: - key: Configuration key name - value: Value to potentially mask - - Returns: - Masked string if sensitive, otherwise string representation of value - """ - if _should_mask_value(key): - return "****" - return str(value) - - class ResolutionPresenter: """ Handles all presentation, formatting, and export of resolution data. @@ -185,7 +131,7 @@ def _row_items(): row: Dict[str, Any] = {c: "" for c in TABLE_COLUMNS} row["key"] = k - masked_final = _mask_sensitive_value(k, history.final_value) + masked_final = stringify_masked_value(k, history.final_value) row["value"] = masked_final for entry in history.entries: @@ -227,7 +173,7 @@ def format_history_message(self, key: Optional[str] = None) -> MessageResult: lines.append(f"Key: {k}") lines.append( - f"Final Value: {_mask_sensitive_value(k, history.final_value)}" + f"Final Value: {stringify_masked_value(k, history.final_value)}" ) if history.entries: @@ -235,7 +181,7 @@ def format_history_message(self, key: Optional[str] = None) -> MessageResult: for i, entry in enumerate(history.entries, 1): cv = entry.config_value status = "SELECTED" if entry.was_used else "overridden" - masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_value = stringify_masked_value(cv.key, cv.value) lines.append(f" {i}. [{status}] {cv.source_name}: {masked_value}") if history.default_used: @@ -273,9 +219,9 @@ def print_resolution_chain(self, key: str) -> None: status_text = "(not used)" # Mask sensitive values - masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_value = stringify_masked_value(cv.key, cv.value) masked_raw = ( - _mask_sensitive_value(cv.key, cv.raw_value) + stringify_masked_value(cv.key, cv.raw_value) if cv.raw_value is not None else None ) @@ -290,7 +236,7 @@ def print_resolution_chain(self, key: str) -> None: ) if history.default_used: - masked_default = _mask_sensitive_value(key, history.final_value) + masked_default = stringify_masked_value(key, history.final_value) cli_console.step(f"Default value used: {masked_default}") def print_all_chains(self) -> None: @@ -324,9 +270,9 @@ def print_all_chains(self) -> None: status_text = "(not used)" # Mask sensitive values - masked_value = _mask_sensitive_value(cv.key, cv.value) + masked_value = stringify_masked_value(cv.key, cv.value) masked_raw = ( - _mask_sensitive_value(cv.key, cv.raw_value) + stringify_masked_value(cv.key, cv.raw_value) if cv.raw_value is not None else None ) @@ -341,7 +287,9 @@ def print_all_chains(self) -> None: ) if history.default_used: - masked_default = _mask_sensitive_value(key, history.final_value) + masked_default = stringify_masked_value( + key, history.final_value + ) cli_console.step(f"Default value used: {masked_default}") def export_history(self, filepath: Path) -> None: diff --git a/tests/config_ng/test_resolution_logger.py b/tests/config_ng/test_resolution_logger.py index 1349d82681..f448a24033 100644 --- a/tests/config_ng/test_resolution_logger.py +++ b/tests/config_ng/test_resolution_logger.py @@ -190,6 +190,39 @@ def test_check_value_source_returns_none_with_legacy(self): class TestExportResolutionHistory: """Tests for exporting resolution history.""" + def test_history_to_dict_masks_sensitive_data(self): + """ResolutionHistory.to_dict should mask sensitive fields.""" + from datetime import datetime + + from snowflake.cli.api.config_ng.core import ( + ConfigValue, + ResolutionEntry, + ResolutionHistory, + ) + + entry = ResolutionEntry( + config_value=ConfigValue( + key="password", + value="secret_value", + source_name="cli_arguments", + raw_value="secret_value", + ), + timestamp=datetime.now(), + was_used=True, + ) + + history = ResolutionHistory( + key="password", + entries=[entry], + final_value="secret_value", + ) + + history_dict = history.to_dict() + + assert history_dict["final_value"] == "****" + assert history_dict["entries"][0]["value"] == "****" + assert history_dict["entries"][0]["raw_value"] == "****" + def test_export_returns_false_with_legacy_provider(self, capsys): """Test that export_resolution_history returns False with legacy provider.""" with mock.patch.dict(os.environ, {}, clear=False): From acdfb8432da01ffc72aad8f0209982812ab049e5 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 08:48:41 +0100 Subject: [PATCH 64/78] SNOW-2306184: config refactor - in memory pk --- src/snowflake/cli/api/config.py | 13 ++- src/snowflake/cli/api/config_provider.py | 96 +-------------------- src/snowflake/cli/api/connections.py | 1 + tests/config_ng/test_private_key_cleanup.py | 24 ++---- 4 files changed, 18 insertions(+), 116 deletions(-) diff --git a/src/snowflake/cli/api/config.py b/src/snowflake/cli/api/config.py index 77c9aac461..8c8af282a0 100644 --- a/src/snowflake/cli/api/config.py +++ b/src/snowflake/cli/api/config.py @@ -20,7 +20,14 @@ from contextlib import contextmanager from dataclasses import asdict, dataclass, field from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import ( + Any, + Dict, + List, + Literal, + Optional, + Union, +) import tomlkit from click import ClickException @@ -99,6 +106,7 @@ class ConnectionConfig: authenticator: Optional[str] = None workload_identity_provider: Optional[str] = None private_key_file: Optional[str] = None + private_key_raw: Optional[str] = field(default=None, repr=False) private_key_passphrase: Optional[str] = field(default=None, repr=False) token: Optional[str] = field(default=None, repr=False) session_token: Optional[str] = field(default=None, repr=False) @@ -464,9 +472,6 @@ def _check_default_config_files_permissions() -> None: raise ConfigFileTooWidePermissionsError(CONFIG_FILE) -from typing import Literal - - def get_feature_flags_section() -> Dict[str, bool | Literal["UNKNOWN"]]: if not config_section_exists(*FEATURE_FLAGS_SECTION_PATH): return {} diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 04f6c59d4d..f84780d9bd 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -14,7 +14,6 @@ from __future__ import annotations -import atexit import os from abc import ABC, abstractmethod from pathlib import Path @@ -78,74 +77,6 @@ def get_all_connections(self, include_env_connections: bool = False) -> dict: """ ... - def _transform_private_key_raw(self, connection_dict: dict) -> dict: - """ - Transform private_key_raw to private_key_file for ConnectionContext compatibility. - - The ConnectionContext dataclass doesn't have a private_key_raw field, so it gets - filtered out by merge_with_config. To work around this, we write private_key_raw - content to a temporary file and return it as private_key_file. - - Args: - connection_dict: Connection configuration dictionary - - Returns: - Modified connection dictionary with private_key_raw transformed to private_key_file - """ - if "private_key_raw" not in connection_dict: - return connection_dict - - # Don't transform if private_key_file is already set - if "private_key_file" in connection_dict: - return connection_dict - - import tempfile - - try: - # Create a temporary file with the private key content - with tempfile.NamedTemporaryFile( - mode="w", suffix=".pem", delete=False - ) as f: - f.write(connection_dict["private_key_raw"]) - temp_file_path = f.name - - # Set restrictive permissions on the temporary file - os.chmod(temp_file_path, 0o600) - - # Create a copy of the connection dict with the transformation - result = connection_dict.copy() - result["private_key_file"] = temp_file_path - del result["private_key_raw"] - - # Track created temp file on the provider instance for cleanup - temp_files_attr = "_temp_private_key_files" - existing = getattr(self, temp_files_attr, None) - if existing is None: - setattr(self, temp_files_attr, {temp_file_path}) - else: - existing.add(temp_file_path) - - return result - - except Exception: - # If transformation fails, return original dict - # The error will be handled downstream - return connection_dict - - def cleanup_temp_files(self) -> None: - """Delete any temporary files created from private_key_raw transformation.""" - temp_files = getattr(self, "_temp_private_key_files", None) - if not temp_files: - return - to_remove = list(temp_files) - for path in to_remove: - try: - Path(path).unlink(missing_ok=True) - except Exception: - # Best-effort cleanup; ignore failures - pass - temp_files.clear() - class LegacyConfigProvider(ConfigProvider): """ @@ -190,8 +121,7 @@ def get_connection_dict(self, connection_name: str) -> dict: from snowflake.cli.api.config import get_config_section try: - result = get_config_section("connections", connection_name) - return self._transform_private_key_raw(result) + return get_config_section("connections", connection_name) except KeyError: from snowflake.cli.api.exceptions import MissingConfigurationError @@ -205,7 +135,7 @@ def get_all_connections(self, include_env_connections: bool = False) -> dict: # Legacy provider ignores the flag since it never had env connections connections = get_config_section("connections") return { - name: ConnectionConfig.from_dict(self._transform_private_key_raw(config)) + name: ConnectionConfig.from_dict(config) for name, config in connections.items() } @@ -465,8 +395,7 @@ def get_connection_dict(self, connection_name: str) -> dict: Returns: Dictionary of connection parameters """ - result = self._get_connection_dict_internal(connection_name) - return self._transform_private_key_raw(result) + return self._get_connection_dict_internal(connection_name) def _get_all_connections_dict(self) -> Dict[str, Dict[str, Any]]: """ @@ -596,23 +525,4 @@ def reset_config_provider(): Useful for testing and when config source changes. """ global _config_provider_instance - # Cleanup any temp files created by the current provider instance - if _config_provider_instance is not None: - try: - _config_provider_instance.cleanup_temp_files() - except Exception: - pass _config_provider_instance = None - - -def _cleanup_provider_at_exit() -> None: - """Process-exit cleanup for provider-managed temporary files.""" - global _config_provider_instance - if _config_provider_instance is not None: - try: - _config_provider_instance.cleanup_temp_files() - except Exception: - pass - - -atexit.register(_cleanup_provider_at_exit) diff --git a/src/snowflake/cli/api/connections.py b/src/snowflake/cli/api/connections.py index 671d9db3d8..c038661e7c 100644 --- a/src/snowflake/cli/api/connections.py +++ b/src/snowflake/cli/api/connections.py @@ -47,6 +47,7 @@ class ConnectionContext: authenticator: Optional[str] = None workload_identity_provider: Optional[str] = None private_key_file: Optional[str] = None + private_key_raw: Optional[str] = field(default=None, repr=False) private_key_passphrase: Optional[str] = field(default=None, repr=False) warehouse: Optional[str] = None mfa_passcode: Optional[str] = None diff --git a/tests/config_ng/test_private_key_cleanup.py b/tests/config_ng/test_private_key_cleanup.py index 99ad720596..116dce556e 100644 --- a/tests/config_ng/test_private_key_cleanup.py +++ b/tests/config_ng/test_private_key_cleanup.py @@ -1,9 +1,7 @@ -"""Tests for temporary private_key_raw file lifecycle and cleanup.""" +"""Tests for handling private_key_raw without persisting to disk.""" -from pathlib import Path - -def test_private_key_raw_creates_and_cleans_temp_file(config_ng_setup, tmp_path): +def test_private_key_raw_kept_in_memory(config_ng_setup): priv_key_content = ( """-----BEGIN PRIVATE KEY-----\nABC\n-----END PRIVATE KEY-----\n""" ) @@ -20,20 +18,8 @@ def test_private_key_raw_creates_and_cleans_temp_file(config_ng_setup, tmp_path) with config_ng_setup(cli_config=cli_config, env_vars=env_vars): from snowflake.cli.api.config import get_connection_dict - from snowflake.cli.api.config_provider import ( - get_config_provider_singleton, - reset_config_provider, - ) - - provider = get_config_provider_singleton() conn = get_connection_dict("test") - temp_path = Path(conn["private_key_file"]) # should exist now - assert temp_path.exists() - assert temp_path.read_text() == priv_key_content - - # Reset provider triggers cleanup - reset_config_provider() - - # File should be gone after cleanup - assert not temp_path.exists() + assert "private_key_raw" in conn + assert conn["private_key_raw"] == priv_key_content + assert "private_key_file" not in conn From 995f91aada96c2701ad2a20958161ac097132a52 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 10:34:55 +0100 Subject: [PATCH 65/78] SNOW-2306184: config refactor - ensure file permissions checks --- src/snowflake/cli/api/config_ng/sources.py | 21 +++++++++ tests/config_ng/conftest.py | 24 +++++++--- tests/config_ng/test_sources.py | 55 ++++++++++++++++++++++ 3 files changed, 94 insertions(+), 6 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 42814c8fdd..54a85afa22 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -37,10 +37,28 @@ from snowflake.cli.api.config_ng.constants import SNOWFLAKE_HOME_ENV from snowflake.cli.api.config_ng.core import SourceType, ValueSource +from snowflake.cli.api.exceptions import ConfigFileTooWidePermissionsError +from snowflake.cli.api.secure_utils import file_permissions_are_strict +from snowflake.connector.compat import IS_WINDOWS log = logging.getLogger(__name__) +def _ensure_strict_file_permissions(config_file: Path) -> None: + """ + Validate that configuration files have strict permissions before reading. + + Raises: + ConfigFileTooWidePermissionsError: If permissions are too wide on non-Windows. + """ + + if IS_WINDOWS or not config_file.exists(): + return + + if not file_permissions_are_strict(config_file): + raise ConfigFileTooWidePermissionsError(config_file) + + class SnowSQLSection(Enum): """ SnowSQL configuration file section names. @@ -153,6 +171,7 @@ def _read_and_merge_files(self) -> str: for config_file in self._config_paths: if config_file.exists(): + _ensure_strict_file_permissions(config_file) try: merged_config.read(config_file) except Exception as e: @@ -273,6 +292,7 @@ def _read_first_file(self) -> str: """ for config_file in self._search_paths: if config_file.exists(): + _ensure_strict_file_permissions(config_file) try: return config_file.read_text() except Exception as e: @@ -387,6 +407,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: else: if not self._file_path.exists(): return {} + _ensure_strict_file_permissions(self._file_path) try: content = self._file_path.read_text() except Exception as e: diff --git a/tests/config_ng/conftest.py b/tests/config_ng/conftest.py index adb09d3f96..08d9f1bf7a 100644 --- a/tests/config_ng/conftest.py +++ b/tests/config_ng/conftest.py @@ -24,9 +24,17 @@ from contextlib import contextmanager from pathlib import Path from textwrap import dedent -from typing import Dict, Optional +from typing import Dict, Literal, Optional import pytest +from snowflake.connector.compat import IS_WINDOWS + +STRICT_FILE_PERMISSIONS: Literal[0o600] = 0o600 + + +def _restrict_permissions(path: Path) -> None: + if not IS_WINDOWS: + path.chmod(STRICT_FILE_PERMISSIONS) @contextmanager @@ -96,13 +104,17 @@ def _setup( # Write config files if provided if snowsql_config: - (snowflake_home / "config").write_text(dedent(snowsql_config)) + snowsql_path = snowflake_home / "config" + snowsql_path.write_text(dedent(snowsql_config)) + _restrict_permissions(snowsql_path) if cli_config: - (snowflake_home / "config.toml").write_text(dedent(cli_config)) + cli_config_path = snowflake_home / "config.toml" + cli_config_path.write_text(dedent(cli_config)) + _restrict_permissions(cli_config_path) if connections_toml: - (snowflake_home / "connections.toml").write_text( - dedent(connections_toml) - ) + connections_path = snowflake_home / "connections.toml" + connections_path.write_text(dedent(connections_toml)) + _restrict_permissions(connections_path) # Prepare environment variables env_to_set = { diff --git a/tests/config_ng/test_sources.py b/tests/config_ng/test_sources.py index 8ac9e4e2c5..14fd41a3e1 100644 --- a/tests/config_ng/test_sources.py +++ b/tests/config_ng/test_sources.py @@ -14,11 +14,18 @@ """Tests for configuration sources with string-based testing.""" +from typing import Literal + +import pytest from snowflake.cli.api.config_ng.sources import ( CliConfigFile, ConnectionsConfigFile, SnowSQLConfigFile, ) +from snowflake.cli.api.exceptions import ConfigFileTooWidePermissionsError +from snowflake.connector.compat import IS_WINDOWS + +INSECURE_FILE_PERMISSIONS: Literal[0o644] = 0o644 class TestSnowSQLConfigFileFromString: @@ -378,3 +385,51 @@ def test_non_connections_file_marker(self): not hasattr(snowsql_source, "is_connections_file") or not snowsql_source.is_connections_file ) + + +@pytest.mark.skipif(IS_WINDOWS, reason="Permission checks disabled on Windows") +class TestFilePermissionValidation: + def test_snowsql_config_raises_on_insecure_file(self, tmp_path): + config_path = tmp_path / "snowsql.cnf" + config_path.write_text( + """ +[connections.test] +accountname = test_account +""" + ) + config_path.chmod(INSECURE_FILE_PERMISSIONS) + + source = SnowSQLConfigFile(config_paths=[config_path]) + + with pytest.raises(ConfigFileTooWidePermissionsError): + source.discover() + + def test_cli_config_raises_on_insecure_file(self, tmp_path): + config_path = tmp_path / "config.toml" + config_path.write_text( + """ +[connections.test] +account = "cli-account" +""" + ) + config_path.chmod(INSECURE_FILE_PERMISSIONS) + + source = CliConfigFile(search_paths=[config_path]) + + with pytest.raises(ConfigFileTooWidePermissionsError): + source.discover() + + def test_connections_config_raises_on_insecure_file(self, tmp_path): + config_path = tmp_path / "connections.toml" + config_path.write_text( + """ +[connections.test] +account = "connections-account" +""" + ) + config_path.chmod(INSECURE_FILE_PERMISSIONS) + + source = ConnectionsConfigFile(file_path=config_path) + + with pytest.raises(ConfigFileTooWidePermissionsError): + source.discover() From b849f52ff45c90db51b5d90b2388d106026ca8dd Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 10:48:26 +0100 Subject: [PATCH 66/78] SNOW-2306184: config refactor - tests for value masking --- .../cli/_plugins/connection/commands.py | 14 ++++----- tests/test_connection.py | 31 +++++++++++++++++++ 2 files changed, 38 insertions(+), 7 deletions(-) diff --git a/src/snowflake/cli/_plugins/connection/commands.py b/src/snowflake/cli/_plugins/connection/commands.py index 7d39c12e8f..17ee3bd3bd 100644 --- a/src/snowflake/cli/_plugins/connection/commands.py +++ b/src/snowflake/cli/_plugins/connection/commands.py @@ -62,6 +62,7 @@ set_config_value, unset_config_value, ) +from snowflake.cli.api.config_ng.masking import mask_sensitive_value from snowflake.cli.api.console import cli_console from snowflake.cli.api.constants import ObjectType from snowflake.cli.api.output.types import ( @@ -85,12 +86,11 @@ def __repr__(self): return "optional" -def _mask_sensitive_parameters(connection_params: dict): - if "password" in connection_params: - connection_params["password"] = "****" - if "oauth_client_secret" in connection_params: - connection_params["oauth_client_secret"] = "****" - return connection_params +def mask_sensitive_parameters(connection_params: dict): + return { + key: mask_sensitive_value(key, value) + for key, value in connection_params.items() + } @app.command(name="list") @@ -124,7 +124,7 @@ def list_connections( result = ( { "connection_name": connection_name, - "parameters": _mask_sensitive_parameters( + "parameters": mask_sensitive_parameters( connection_config.to_dict_of_known_non_empty_values() ), "is_default": connection_name == default_connection, diff --git a/tests/test_connection.py b/tests/test_connection.py index 5bbf32d928..4f32cba1e7 100644 --- a/tests/test_connection.py +++ b/tests/test_connection.py @@ -21,7 +21,9 @@ import pytest import tomlkit +from snowflake.cli._plugins.connection import commands as connection_commands from snowflake.cli.api.config import ConnectionConfig +from snowflake.cli.api.config_ng.masking import MASKED_VALUE from snowflake.cli.api.constants import ObjectType from snowflake.cli.api.secret import SecretType @@ -366,6 +368,35 @@ def test_lists_connection_information(mock_get_default_conn_name, runner): ] +def test_mask_sensitive_parameters_masks_all_known_sensitive_keys(): + params = { + "password": "hunter2", + "oauth_client_secret": "secret1", + "token": "token-value", + "session_token": "session", + "master_token": "master", + "private_key_passphrase": "pk-pass", + "mfa_passcode": "code", + "warehouse": "xs", + } + + masked = connection_commands.mask_sensitive_parameters(params) + + for key in ( + "password", + "oauth_client_secret", + "token", + "session_token", + "master_token", + "private_key_passphrase", + "mfa_passcode", + ): + assert masked[key] == MASKED_VALUE + + assert masked["warehouse"] == "xs" + assert params["password"] == "hunter2" + + @mock.patch.dict( os.environ, { From d1cb6f5660106bdc66872dd9f252dffae070ce48 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 11:38:36 +0100 Subject: [PATCH 67/78] SNOW-2306184: config refactor - oauth typo --- src/snowflake/cli/api/config.py | 20 +++++++++++++++++--- tests/test_config.py | 16 ++++++++++++++++ 2 files changed, 33 insertions(+), 3 deletions(-) diff --git a/src/snowflake/cli/api/config.py b/src/snowflake/cli/api/config.py index 8c8af282a0..b27ea49dbe 100644 --- a/src/snowflake/cli/api/config.py +++ b/src/snowflake/cli/api/config.py @@ -23,6 +23,7 @@ from typing import ( Any, Dict, + Final, List, Literal, Optional, @@ -91,6 +92,12 @@ class Empty: FEATURE_FLAGS_SECTION_PATH = [CLI_SECTION, "features"] +LEGACY_OAUTH_PKCE_KEY: Literal["oatuh_enable_pkce"] = "oatuh_enable_pkce" +LEGACY_CONNECTION_SETTING_ALIASES: Final[dict[str, str]] = { + LEGACY_OAUTH_PKCE_KEY: "oauth_enable_pkce", +} + + @dataclass class ConnectionConfig: account: Optional[str] = None @@ -130,12 +137,17 @@ def from_dict(cls, config_dict: dict) -> ConnectionConfig: known_settings = {} other_settings = {} for key, value in config_dict.items(): - if key in cls.__dict__: - known_settings[key] = value + normalized_key = cls._normalize_setting_key(key) + if normalized_key in cls.__dict__: + known_settings[normalized_key] = value else: other_settings[key] = value return cls(**known_settings, _other_settings=other_settings) + @staticmethod + def _normalize_setting_key(key: str) -> str: + return LEGACY_CONNECTION_SETTING_ALIASES.get(key, key) + def to_dict_of_known_non_empty_values(self) -> dict: return { k: v @@ -340,7 +352,9 @@ def get_connection_dict(connection_name: str) -> dict: from snowflake.cli.api.config_provider import get_config_provider_singleton provider = get_config_provider_singleton() - return provider.get_connection_dict(connection_name) + connection_raw = provider.get_connection_dict(connection_name) + connection = ConnectionConfig.from_dict(connection_raw) + return connection.to_dict_of_all_non_empty_values() def get_default_connection_name() -> str: diff --git a/tests/test_config.py b/tests/test_config.py index 9e3c2ba165..782b6219dd 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -111,6 +111,22 @@ def test_environment_variables_works_if_config_value_not_present(test_snowcli_co } +def test_legacy_pkce_key_is_normalized(config_file): + config_content = """ +[connections.test] +account = "legacy" +oatuh_enable_pkce = true +""" + with config_file(config_content) as cfg: + config_init(cfg) + + conn = get_connection_dict("test") + + assert conn["account"] == "legacy" + assert conn["oauth_enable_pkce"] is True + assert "oatuh_enable_pkce" not in conn + + @mock.patch.dict( os.environ, { From 13819cb08dbef3a90b425baed0a17ebfb6ac2741 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 11:41:17 +0100 Subject: [PATCH 68/78] SNOW-2306184: config refactor - thread safe singleton --- src/snowflake/cli/api/config_provider.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index f84780d9bd..6fc3b7e8e8 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -15,6 +15,7 @@ from __future__ import annotations import os +import threading from abc import ABC, abstractmethod from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Final, Optional @@ -506,6 +507,7 @@ def get_config_provider() -> ConfigProvider: return LegacyConfigProvider() +_CONFIG_PROVIDER_LOCK: Final = threading.Lock() _config_provider_instance: Optional[ConfigProvider] = None @@ -515,7 +517,9 @@ def get_config_provider_singleton() -> ConfigProvider: """ global _config_provider_instance if _config_provider_instance is None: - _config_provider_instance = get_config_provider() + with _CONFIG_PROVIDER_LOCK: + if _config_provider_instance is None: + _config_provider_instance = get_config_provider() return _config_provider_instance @@ -525,4 +529,5 @@ def reset_config_provider(): Useful for testing and when config source changes. """ global _config_provider_instance - _config_provider_instance = None + with _CONFIG_PROVIDER_LOCK: + _config_provider_instance = None From fb37a231df618df62accba767742809b6207e889 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 12:03:55 +0100 Subject: [PATCH 69/78] SNOW-2306184: config refactor - try_cast_to_bool compatibility added --- src/snowflake/cli/api/config_ng/sources.py | 40 ++++++++++++++++++++-- tests/config_ng/test_configuration.py | 28 +++++++++++++++ 2 files changed, 65 insertions(+), 3 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 54a85afa22..2bb453e0c9 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -39,6 +39,7 @@ from snowflake.cli.api.config_ng.core import SourceType, ValueSource from snowflake.cli.api.exceptions import ConfigFileTooWidePermissionsError from snowflake.cli.api.secure_utils import file_permissions_are_strict +from snowflake.cli.api.utils.types import try_cast_to_bool from snowflake.connector.compat import IS_WINDOWS log = logging.getLogger(__name__) @@ -513,7 +514,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: env_value = os.getenv(env_var) if env_value is not None: if config_key not in result: - result[config_key] = env_value + result[config_key] = _coerce_env_value(config_key, env_value) return result @@ -559,6 +560,37 @@ def supports_key(self, key: str) -> bool: "client_store_temporary_credential", ] +_BOOLEAN_ENV_CONFIG_KEYS: Final[set[str]] = { + "oauth_enable_pkce", + "oauth_enable_refresh_tokens", + "oauth_enable_single_use_refresh_tokens", + "client_store_temporary_credential", +} + + +def _coerce_env_value(config_key: str, env_value: Any) -> Any: + """ + Convert string environment values to booleans when appropriate. + + Args: + config_key: Configuration key associated with the value + env_value: Raw value from environment + + Returns: + Parsed value if conversion is applicable, otherwise the original value. + """ + if config_key in _BOOLEAN_ENV_CONFIG_KEYS: + try: + return try_cast_to_bool(env_value) + except ValueError: + log.warning( + "Expected boolean-compatible value for %s but got %r. " + "Using raw value without conversion.", + config_key, + env_value, + ) + return env_value + class ConnectionSpecificEnvironment(ValueSource): """ @@ -614,7 +646,9 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: if conn_name not in result["connections"]: result["connections"][conn_name] = {} - result["connections"][conn_name][config_key] = env_value + result["connections"][conn_name][config_key] = _coerce_env_value( + config_key, env_value + ) return result @@ -671,7 +705,7 @@ def discover(self, key: Optional[str] = None) -> Dict[str, Any]: config_key = config_key_upper.lower() if config_key in _ENV_CONFIG_KEYS: - result[config_key] = env_value + result[config_key] = _coerce_env_value(config_key, env_value) return result diff --git a/tests/config_ng/test_configuration.py b/tests/config_ng/test_configuration.py index ad4d6c953f..e4475dcba0 100644 --- a/tests/config_ng/test_configuration.py +++ b/tests/config_ng/test_configuration.py @@ -141,6 +141,34 @@ def test_level6_general_env_overrides_connection_specific(config_ng_setup): assert conn["schema"] == "general-schema" +def test_connection_specific_env_boolean_values_cast(config_ng_setup): + """Boolean-like env values should be converted to bool for connection-specific vars.""" + env_vars = { + "SNOWFLAKE_CONNECTIONS_TEST_CLIENT_STORE_TEMPORARY_CREDENTIAL": "0", + } + + with config_ng_setup(env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("test") + assert conn["client_store_temporary_credential"] is False + + +def test_general_env_boolean_values_cast(config_ng_setup): + """Boolean-like env values should be converted to bool for general vars.""" + env_vars = { + "SNOWFLAKE_ACCOUNT": "env-account", + "SNOWFLAKE_CLIENT_STORE_TEMPORARY_CREDENTIAL": "true", + } + + with config_ng_setup(env_vars=env_vars): + from snowflake.cli.api.config import get_connection_dict + + conn = get_connection_dict("default") + assert conn["account"] == "env-account" + assert conn["client_store_temporary_credential"] is True + + def test_complete_7_level_chain(config_ng_setup): """All 7 levels with different keys showing complete precedence""" snowsql_config = """ From 3e2ea59ab213c0ec1fd2bca667071ca92cfab590 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 12:46:50 +0100 Subject: [PATCH 70/78] SNOW-2306184: config refactor - sanitize logging --- src/snowflake/cli/api/config_ng/resolver.py | 53 ++++++++++++- tests/config_ng/test_resolver_logging.py | 85 +++++++++++++++++++++ 2 files changed, 134 insertions(+), 4 deletions(-) create mode 100644 tests/config_ng/test_resolver_logging.py diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index 6a14b8d3af..465432bfb1 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -40,6 +40,35 @@ log = logging.getLogger(__name__) +def _sanitize_source_error(exc: Exception) -> str: + """ + Produce a logging-safe description of discovery errors. + + Keys and structural metadata (section/key/line) are preserved, but raw + values are never rendered so sensitive data cannot leak through logs. + """ + + safe_parts: List[str] = [exc.__class__.__name__] + attribute_labels = ( + ("section", "section"), + ("option", "key"), + ("key", "key"), + ("lineno", "line"), + ("colno", "column"), + ("pos", "position"), + ) + + for attr_name, label in attribute_labels: + attr_value = getattr(exc, attr_name, None) + if attr_value: + safe_parts.append(f"{label}={attr_value}") + + if len(safe_parts) == 1: + safe_parts.append("details_masked") + + return ", ".join(safe_parts) + + class ResolutionHistoryTracker: """ Tracks the complete resolution process for all configuration keys. @@ -577,8 +606,16 @@ def _resolve_file_sources(self, key: Optional[str] = None) -> Dict[str, Any]: if k != "connections": result[k] = v - except Exception as e: - log.warning("Error from source %s: %s", source.source_name, e) + except Exception as exc: + sanitized_error = _sanitize_source_error(exc) + log.warning( + "Error from source %s: %s", source.source_name, sanitized_error + ) + log.debug( + "Error from source %s (full details hidden in warnings)", + source.source_name, + exc_info=exc, + ) return result @@ -662,8 +699,16 @@ def _apply_overlay_sources( elif general_params: result = deep_merge(result, general_params) - except Exception as e: - log.warning("Error from source %s: %s", source.source_name, e) + except Exception as exc: + sanitized_error = _sanitize_source_error(exc) + log.warning( + "Error from source %s: %s", source.source_name, sanitized_error + ) + log.debug( + "Error from source %s (full details hidden in warnings)", + source.source_name, + exc_info=exc, + ) if "connections" in result and result["connections"]: remaining_general_params, _ = extract_root_level_connection_params(result) diff --git a/tests/config_ng/test_resolver_logging.py b/tests/config_ng/test_resolver_logging.py new file mode 100644 index 0000000000..9dddb533bd --- /dev/null +++ b/tests/config_ng/test_resolver_logging.py @@ -0,0 +1,85 @@ +"""Tests ensuring resolver logging never exposes sensitive values.""" + +from __future__ import annotations + +import logging +from typing import Any, Dict, Optional + +import pytest +from snowflake.cli.api.config_ng.core import SourceType, ValueSource +from snowflake.cli.api.config_ng.resolver import ConfigurationResolver + + +class _BrokenSource(ValueSource): + def __init__( + self, + *, + source_type: SourceType, + exception: Exception, + source_name: ValueSource.SourceName = "cli_config_toml", + ): + self._source_type = source_type + self._exception = exception + self._source_name = source_name + + @property + def source_name(self) -> ValueSource.SourceName: + return self._source_name + + @property + def source_type(self) -> SourceType: + return self._source_type + + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: + raise self._exception + + def supports_key(self, key: str) -> bool: + return True + + +def test_file_source_errors_are_sanitized(caplog: pytest.LogCaptureFixture): + secret = "SuperSecret123!" + resolver = ConfigurationResolver( + sources=[ + _BrokenSource( + source_type=SourceType.FILE, + exception=ValueError(f"Raw secret: {secret}"), + ) + ] + ) + + with caplog.at_level(logging.WARNING): + resolved = resolver.resolve() + + assert resolved == {} + assert secret not in caplog.text + assert "ValueError" in caplog.text + assert "details_masked" in caplog.text + + +def test_overlay_source_logs_include_only_structural_metadata( + caplog: pytest.LogCaptureFixture, +): + class StructuredError(Exception): + def __init__(self): + super().__init__("leaked!") # pragma: no cover + self.section = "connections.default" + self.option = "password" + + resolver = ConfigurationResolver( + sources=[ + _BrokenSource( + source_type=SourceType.OVERLAY, + exception=StructuredError(), + source_name="cli_env", + ) + ] + ) + + with caplog.at_level(logging.WARNING): + resolved = resolver.resolve() + + assert resolved == {} + assert "connections.default" in caplog.text + assert "password" in caplog.text + assert "leaked!" not in caplog.text From e494653bd9099c25a88289c0c2242e28ed372d7a Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 15:56:38 +0100 Subject: [PATCH 71/78] SNOW-2306184: config refactor - remove tomli conditional import --- src/snowflake/cli/api/config_ng/parsers.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/parsers.py b/src/snowflake/cli/api/config_ng/parsers.py index 5ca6cf6f53..f3632eea06 100644 --- a/src/snowflake/cli/api/config_ng/parsers.py +++ b/src/snowflake/cli/api/config_ng/parsers.py @@ -15,13 +15,9 @@ """Configuration parsers - decouple parsing from file I/O.""" import configparser -from typing import Any, Dict +from typing import Any, Dict, cast -# Try to import tomllib (Python 3.11+) or fall back to tomli -try: - import tomllib -except ImportError: - import tomli as tomllib # type: ignore +import tomlkit class SnowSQLParser: @@ -118,7 +114,7 @@ def parse(content: str) -> Dict[str, Any]: """ Parse TOML format from string. - TOML is already nested, so this just wraps tomllib.loads(). + TOML is already nested, so this wraps tomlkit.loads().unwrap(). All TOML sources (CLI config, connections.toml) use this parser. Args: @@ -136,4 +132,4 @@ def parse(content: str) -> Dict[str, Any]: Output: {"connections": {"prod": {"account": "myaccount", "user": "myuser"}}} """ - return tomllib.loads(content) + return cast(Dict[str, Any], tomlkit.loads(content).unwrap()) From 236680fa62a5d10326a39a623b3576b296bdc19a Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 16:21:02 +0100 Subject: [PATCH 72/78] SNOW-2306184: config refactor - test fix --- tests_e2e/test_error_handling.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests_e2e/test_error_handling.py b/tests_e2e/test_error_handling.py index 3efe5b4a82..f9dad7432a 100644 --- a/tests_e2e/test_error_handling.py +++ b/tests_e2e/test_error_handling.py @@ -48,9 +48,12 @@ def test_corrupted_config_in_default_location( snowcli, temporary_directory, isolate_default_config_location, - test_root_path, - snapshot, + config_file, ): + healthy_config = Path(temporary_directory) / "healthy_config.toml" + healthy_config.write_text(Path(config_file).read_text()) + restrict_file_permissions(healthy_config) + default_config = Path(temporary_directory) / "config.toml" default_config.write_text("[connections.demo]\n[connections.demo]") restrict_file_permissions(default_config) @@ -64,7 +67,6 @@ def test_corrupted_config_in_default_location( ) # corrupted config in default location should not influence one passed with --config-file flag - healthy_config = test_root_path / "config" / "config.toml" result_healthy = subprocess_run( [snowcli, "--config-file", healthy_config, "connection", "list"], ) From c9bca4b5954d9c07086202ffdfd12751dba9de61 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Tue, 18 Nov 2025 17:25:51 +0100 Subject: [PATCH 73/78] SNOW-2306184: config refactor - test fix 2 --- src/snowflake/cli/api/config_provider.py | 7 ++++++ tests/test_config_provider_integration.py | 26 +++++++++++++++++++++++ 2 files changed, 33 insertions(+) diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 6fc3b7e8e8..2abcb125e4 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -165,6 +165,7 @@ def __init__( source_manager: Optional source manager (for testing) cli_context_getter: Optional CLI context getter function (for testing) """ + self._owns_source_manager: bool = source_manager is None self._source_manager = source_manager self._cli_context_getter = ( cli_context_getter or self._default_cli_context_getter @@ -184,6 +185,7 @@ def _default_cli_context_getter(): def _ensure_initialized(self) -> None: """Lazily initialize the resolver on first use.""" # Check if config_file_override has changed + override_changed = False try: cli_context = self._cli_context_getter() current_override = cli_context.config_file_override @@ -193,9 +195,14 @@ def _ensure_initialized(self) -> None: self._initialized = False self._config_cache.clear() self._last_config_override = current_override + override_changed = True except Exception: pass + if override_changed and self._owns_source_manager: + # Discard cached sources so that new config override can take effect + self._source_manager = None + if self._initialized: return diff --git a/tests/test_config_provider_integration.py b/tests/test_config_provider_integration.py index 99cbfdd288..79f4068b5d 100644 --- a/tests/test_config_provider_integration.py +++ b/tests/test_config_provider_integration.py @@ -29,6 +29,7 @@ from unittest import mock import pytest +from snowflake.cli.api.cli_global_context import fork_cli_context from snowflake.cli.api.config_provider import ( ALTERNATIVE_CONFIG_ENV_VAR, AlternativeConfigProvider, @@ -109,6 +110,31 @@ def test_reinitialization_clears_cache(self): assert provider._config_cache != {"old": "data"} +class TestAlternativeConfigProviderOverrideHandling: + """Tests for handling config file overrides.""" + + def test_reinitializes_sources_when_config_override_changes(self, tmp_path): + """Ensure provider rebuilds sources after config override changes.""" + provider = AlternativeConfigProvider() + + first_config = tmp_path / "config_one.toml" + first_config.write_text('[connections.test]\naccount = "first"\n') + first_config.chmod(0o600) + + second_config = tmp_path / "config_two.toml" + second_config.write_text('[connections.test]\naccount = "second"\n') + second_config.chmod(0o600) + + with fork_cli_context() as ctx: + ctx.config_file_override = first_config + first_connections = provider.get_section("connections") + assert first_connections["test"]["account"] == "first" + + ctx.config_file_override = second_config + second_connections = provider.get_section("connections") + assert second_connections["test"]["account"] == "second" + + class TestAlternativeConfigProviderBasicOperations: """Tests for basic config provider operations.""" From 99eb8d5292acc1158c5acddc8585e8e4a6e6373f Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 19 Nov 2025 12:44:16 +0100 Subject: [PATCH 74/78] SNOW-2306184: config refactor - ensure file permissions checks - plugins test update --- tests_integration/conftest.py | 15 +++++++++++++++ tests_integration/plugin/test_broken_plugin.py | 9 +++++++-- .../plugin/test_failing_plugin.py | 9 +++++++-- .../test_override_by_external_plugins.py | 18 ++++++++++++++---- 4 files changed, 43 insertions(+), 8 deletions(-) diff --git a/tests_integration/conftest.py b/tests_integration/conftest.py index 896dcec7c0..6ac517d508 100644 --- a/tests_integration/conftest.py +++ b/tests_integration/conftest.py @@ -88,6 +88,21 @@ def test_snowcli_config_provider(): yield TestConfigProvider(temp_dst) +@pytest.fixture +def secure_test_config(tmp_path): + """ + Copy a test config to a private location so strict permission checks pass. + """ + + def _copy(source_path: Path) -> Path: + destination = tmp_path / source_path.name + shutil.copy2(source_path, destination) + destination.chmod(0o600) + return destination + + return _copy + + @pytest.fixture(scope="session") def test_root_path(): return TEST_DIR diff --git a/tests_integration/plugin/test_broken_plugin.py b/tests_integration/plugin/test_broken_plugin.py index 9d9c8dc02c..d7fe1fe308 100644 --- a/tests_integration/plugin/test_broken_plugin.py +++ b/tests_integration/plugin/test_broken_plugin.py @@ -17,10 +17,15 @@ @pytest.mark.integration def test_broken_command_path_plugin( - runner, test_root_path, _install_plugin, caplog, config_snapshot + runner, + test_root_path, + _install_plugin, + caplog, + config_snapshot, + secure_test_config, ): """Test broken plugin.""" - config_path = ( + config_path = secure_test_config( test_root_path / "config" / "plugin_tests" / "broken_plugin_config.toml" ) diff --git a/tests_integration/plugin/test_failing_plugin.py b/tests_integration/plugin/test_failing_plugin.py index 32f0eebde1..8c94c5eed1 100644 --- a/tests_integration/plugin/test_failing_plugin.py +++ b/tests_integration/plugin/test_failing_plugin.py @@ -17,10 +17,15 @@ @pytest.mark.integration def test_failing_plugin( - runner, test_root_path, _install_plugin, caplog, config_snapshot + runner, + test_root_path, + _install_plugin, + caplog, + config_snapshot, + secure_test_config, ): """Test failing plugin.""" - config_path = ( + config_path = secure_test_config( test_root_path / "config" / "plugin_tests" / "failing_plugin_config.toml" ) diff --git a/tests_integration/plugin/test_override_by_external_plugins.py b/tests_integration/plugin/test_override_by_external_plugins.py index 951012fa29..6d6237e498 100644 --- a/tests_integration/plugin/test_override_by_external_plugins.py +++ b/tests_integration/plugin/test_override_by_external_plugins.py @@ -17,10 +17,15 @@ @pytest.mark.integration def test_override_build_in_commands( - runner, test_root_path, _install_plugin, caplog, config_snapshot + runner, + test_root_path, + _install_plugin, + caplog, + config_snapshot, + secure_test_config, ): """Test plugin override attempt.""" - config_path = ( + config_path = secure_test_config( test_root_path / "config" / "plugin_tests" / "override_plugin_config.toml" ) @@ -39,10 +44,15 @@ def test_override_build_in_commands( @pytest.mark.integration def test_disabled_plugin_is_not_executed( - runner, test_root_path, _install_plugin, caplog, config_snapshot + runner, + test_root_path, + _install_plugin, + caplog, + config_snapshot, + secure_test_config, ): """Test disabled plugin.""" - config_path = ( + config_path = secure_test_config( test_root_path / "config" / "plugin_tests" From e42940738065e4b625895ed3cb55c8685b6323c6 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Wed, 19 Nov 2025 16:41:23 +0100 Subject: [PATCH 75/78] SNOW-2306184: config refactor - top level conn params --- src/snowflake/cli/_app/telemetry.py | 6 +- src/snowflake/cli/api/config_provider.py | 65 ++++++++++-- tests/test_config_provider_integration.py | 122 ++++++++++++++++++++++ 3 files changed, 182 insertions(+), 11 deletions(-) diff --git a/src/snowflake/cli/_app/telemetry.py b/src/snowflake/cli/_app/telemetry.py index 827ea89398..429a38f428 100644 --- a/src/snowflake/cli/_app/telemetry.py +++ b/src/snowflake/cli/_app/telemetry.py @@ -227,9 +227,6 @@ def python_version() -> str: def _get_config_telemetry() -> TelemetryDict: """Get configuration resolution telemetry data.""" try: - from snowflake.cli.api.config_ng.telemetry_integration import ( - get_config_telemetry_payload, - ) from snowflake.cli.api.config_provider import ( AlternativeConfigProvider, get_config_provider_singleton, @@ -246,8 +243,7 @@ def _get_config_telemetry() -> TelemetryDict: # Get detailed telemetry if using ng config if isinstance(provider, AlternativeConfigProvider): - provider._ensure_initialized() # noqa: SLF001 - payload = get_config_telemetry_payload(provider._resolver) # noqa: SLF001 + payload = provider.resolution_summary # Map payload keys to telemetry fields if payload: diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index 2abcb125e4..b97e31d8ec 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -265,6 +265,23 @@ def read_config(self) -> None: assert self._resolver is not None self._config_cache = self._resolver.resolve() + @property + def resolution_summary(self) -> Dict[str, Any]: + """Summary of the configuration resolution process.""" + self._ensure_initialized() + + if self._resolver is None: + return {} + + try: + from snowflake.cli.api.config_ng.telemetry_integration import ( + get_config_telemetry_payload, + ) + + return get_config_telemetry_payload(self._resolver) + except Exception: + return {} + def get_section(self, *path) -> dict: """ Navigate nested dict to get configuration section. @@ -454,10 +471,15 @@ def _get_file_based_connections(self) -> dict: """ from snowflake.cli.api.config import ConnectionConfig from snowflake.cli.api.config_ng.constants import FILE_SOURCE_NAMES + from snowflake.cli.api.config_ng.merge_operations import ( + create_default_connection_from_params, + extract_root_level_connection_params, + merge_params_into_connections, + ) self._ensure_initialized() - connections: Dict[str, Dict[str, Any]] = {} + file_data: Dict[str, Any] = {} assert self._resolver is not None for source in self._resolver.get_sources(): @@ -466,17 +488,48 @@ def _get_file_based_connections(self) -> dict: try: source_data = source.discover() # Returns nested dict - if "connections" in source_data: - for conn_name, conn_config in source_data["connections"].items(): - if isinstance(conn_config, dict): - connections[conn_name] = conn_config + if not source_data: + continue + + connections_section = source_data.get("connections") + if isinstance(connections_section, dict): + if "connections" not in file_data: + file_data["connections"] = {} + for conn_name, conn_config in connections_section.items(): + file_data["connections"][conn_name] = conn_config + + for key, value in source_data.items(): + if key == "connections": + continue + file_data[key] = value except Exception: # Silently skip sources that fail to discover pass + general_params, remaining_config = extract_root_level_connection_params( + file_data + ) + raw_connections = remaining_config.get("connections", {}) + + filtered_connections: Dict[str, Dict[str, Any]] = {} + if isinstance(raw_connections, dict): + for conn_name, conn_config in raw_connections.items(): + if isinstance(conn_config, dict): + filtered_connections[conn_name] = conn_config + + if general_params: + if filtered_connections: + filtered_connections = merge_params_into_connections( + filtered_connections, general_params + ) + else: + filtered_connections = create_default_connection_from_params( + general_params + ) + return { name: ConnectionConfig.from_dict(config) - for name, config in connections.items() + for name, config in filtered_connections.items() } def invalidate_cache(self) -> None: diff --git a/tests/test_config_provider_integration.py b/tests/test_config_provider_integration.py index 79f4068b5d..28ef7efda5 100644 --- a/tests/test_config_provider_integration.py +++ b/tests/test_config_provider_integration.py @@ -26,10 +26,12 @@ import os from pathlib import Path from tempfile import TemporaryDirectory +from typing import Any from unittest import mock import pytest from snowflake.cli.api.cli_global_context import fork_cli_context +from snowflake.cli.api.config_ng.core import SourceType, ValueSource from snowflake.cli.api.config_provider import ( ALTERNATIVE_CONFIG_ENV_VAR, AlternativeConfigProvider, @@ -40,6 +42,52 @@ ) +class _StubResolver: + """Minimal resolver stub that only exposes get_sources().""" + + def __init__(self, sources: list[ValueSource]): + self._sources = sources + + def get_sources(self) -> list[ValueSource]: + return self._sources + + +class _StaticFileSource(ValueSource): + """Test-only file source that returns static data.""" + + def __init__( + self, + data: dict[str, Any], + source_name: ValueSource.SourceName = "cli_config_toml", + ): + self._data = data + self._source_name = source_name + + @property + def source_name(self) -> ValueSource.SourceName: + return self._source_name + + @property + def source_type(self) -> SourceType: + return SourceType.FILE + + def discover(self, key: str | None = None) -> dict[str, Any]: + return self._data + + def supports_key(self, key: str) -> bool: + return key in self._data + + +def _sync_last_config_override(provider: AlternativeConfigProvider) -> None: + """Mirror logic from production code to avoid re-initialization in tests.""" + from snowflake.cli.api.cli_global_context import get_cli_context + + try: + provider._last_config_override = get_cli_context().config_file_override + except Exception: + provider._last_config_override = None + + class TestProviderSelection: """Tests for provider selection via environment variable.""" @@ -520,3 +568,77 @@ def test_legacy_provider_ignores_include_env_flag(self, monkeypatch): # Should be same connections (legacy doesn't filter) assert set(connections_default.keys()) == set(connections_all.keys()) + + def test_file_connections_include_root_level_defaults(self): + """Root-level file parameters should merge into connection definitions.""" + provider = AlternativeConfigProvider() + provider._resolver = _StubResolver( + [ + _StaticFileSource( + { + "connections": {"dev": {"database": "sample_db"}}, + "account": "acct_from_file", + "user": "user_from_file", + } + ) + ] + ) + provider._initialized = True + _sync_last_config_override(provider) + + connections = provider.get_all_connections(include_env_connections=False) + + assert "dev" in connections + dev_conn = connections["dev"] + assert dev_conn.account == "acct_from_file" + assert dev_conn.user == "user_from_file" + assert dev_conn.database == "sample_db" + + def test_file_connections_create_default_from_root_params(self): + """Root-level file params should create a default connection when needed.""" + provider = AlternativeConfigProvider() + provider._resolver = _StubResolver( + [ + _StaticFileSource( + { + "account": "acct_only", + "user": "user_only", + "password": "secret", + } + ) + ] + ) + provider._initialized = True + _sync_last_config_override(provider) + + connections = provider.get_all_connections(include_env_connections=False) + + assert list(connections.keys()) == ["default"] + default_conn = connections["default"] + assert default_conn.account == "acct_only" + assert default_conn.user == "user_only" + assert default_conn.password == "secret" + + def test_file_connections_preserve_unknown_root_keys(self): + """Unknown root-level keys should be preserved in connection _other_settings.""" + provider = AlternativeConfigProvider() + provider._resolver = _StubResolver( + [ + _StaticFileSource( + { + "account": "acct_only", + "user": "user_only", + "custom_option": "custom_value", + } + ) + ] + ) + provider._initialized = True + _sync_last_config_override(provider) + + connections = provider.get_all_connections(include_env_connections=False) + + assert "default" in connections + default_conn = connections["default"] + assert default_conn.account == "acct_only" + assert default_conn._other_settings["custom_option"] == "custom_value" From a18add5fadfbb450a08b560e9d008a45dd955325 Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 24 Nov 2025 11:15:39 +0100 Subject: [PATCH 76/78] SNOW-2306184: config refactor - improve performance for key discovery --- src/snowflake/cli/api/config_ng/sources.py | 229 ++++++++++++++++----- tests/config_ng/test_sources.py | 59 ++++++ 2 files changed, 235 insertions(+), 53 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 2bb453e0c9..343e7cb427 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -33,7 +33,7 @@ import os from enum import Enum from pathlib import Path -from typing import Any, Dict, Final, List, Optional +from typing import Any, Dict, Final, List, Optional, Tuple from snowflake.cli.api.config_ng.constants import SNOWFLAKE_HOME_ENV from snowflake.cli.api.config_ng.core import SourceType, ValueSource @@ -45,6 +45,46 @@ log = logging.getLogger(__name__) +def _slice_nested_dict( + data: Dict[str, Any], dotted_key: Optional[str] +) -> Dict[str, Any]: + """ + Return subtree for dotted_key or full data if key not provided. + """ + + if not dotted_key: + return data + + parts = dotted_key.split(".") + current: Any = data + for part in parts: + if not isinstance(current, dict) or part not in current: + return {} + current = current[part] + + subtree: Any = current + for part in reversed(parts): + subtree = {part: subtree} + return subtree + + +def _has_nested_key(data: Dict[str, Any], dotted_key: str) -> bool: + """Check if dotted_key exists inside nested dict.""" + + current: Any = data + for part in dotted_key.split("."): + if not isinstance(current, dict) or part not in current: + return False + current = current[part] + return True + + +NestedConfigData = Dict[str, Any] +CachedConfigValues = Optional[NestedConfigData] +FileSignature = Tuple[str, Optional[float], Optional[int]] +CachedSignature = Optional[Tuple[FileSignature, ...]] + + def _ensure_strict_file_permissions(config_file: Path) -> None: """ Validate that configuration files have strict permissions before reading. @@ -103,6 +143,8 @@ def __init__( """ self._content = content self._config_paths = config_paths or self._get_default_paths() + self._cache: CachedConfigValues = None + self._cache_signature: CachedSignature = None @staticmethod def _get_default_paths() -> List[Path]: @@ -142,24 +184,46 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.FILE - def discover(self, key: Optional[str] = None) -> Dict[str, Any]: - """ - Two-phase discovery: acquire content → parse. + def _current_signature(self) -> Tuple[FileSignature, ...]: + """Return tuple describing current config files.""" - Phase 1: Get content (from string or by reading and merging files) - Phase 2: Parse content using SnowSQLParser + signature: List[FileSignature] = [] + for config_path in self._config_paths: + try: + stat_result = config_path.stat() + except OSError: + signature.append((config_path.as_posix(), None, None)) + continue + signature.append( + (config_path.as_posix(), stat_result.st_mtime, stat_result.st_size) + ) + return tuple(signature) - Returns: - Nested dict structure: {"connections": {...}, "variables": {...}} + def _load_data(self) -> NestedConfigData: + """ + Load and cache SnowSQL configuration data. """ from snowflake.cli.api.config_ng.parsers import SnowSQLParser if self._content is not None: - content = self._content - else: - content = self._read_and_merge_files() + if self._cache is None: + self._cache = SnowSQLParser.parse(self._content) + return self._cache + + signature = self._current_signature() + if self._cache is not None and self._cache_signature == signature: + return self._cache - return SnowSQLParser.parse(content) + content = self._read_and_merge_files() + self._cache = SnowSQLParser.parse(content) + self._cache_signature = signature + return self._cache + + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: + """ + Discover configuration values, optionally scoped to a dotted key. + """ + return _slice_nested_dict(self._load_data(), key) def _read_and_merge_files(self) -> str: """ @@ -185,7 +249,7 @@ def _read_and_merge_files(self) -> str: return output.getvalue() def supports_key(self, key: str) -> bool: - return key in self.discover() + return _has_nested_key(self._load_data(), key) class CliConfigFile(ValueSource): @@ -216,6 +280,8 @@ def __init__( """ self._content = content self._search_paths = search_paths or self._get_default_paths() + self._cache: CachedConfigValues = None + self._cache_signature: CachedSignature = None @staticmethod def _get_default_paths() -> List[Path]: @@ -262,27 +328,51 @@ def source_name(self) -> "ValueSource.SourceName": def source_type(self) -> SourceType: return SourceType.FILE - def discover(self, key: Optional[str] = None) -> Dict[str, Any]: - """ - Two-phase discovery: acquire content → parse. + def _current_signature(self) -> Tuple[FileSignature, ...]: + """Return tuple describing search path state.""" + + signature: List[FileSignature] = [] + for config_path in self._search_paths: + try: + stat_result = config_path.stat() + except OSError: + signature.append((config_path.as_posix(), None, None)) + continue + signature.append( + (config_path.as_posix(), stat_result.st_mtime, stat_result.st_size) + ) + return tuple(signature) - Phase 1: Get content (from string or by reading first existing file) - Phase 2: Parse content using TOMLParser + def _load_data(self) -> NestedConfigData: + """Load and cache CLI config TOML data.""" - Returns: - Nested dict structure with all TOML sections preserved - """ from snowflake.cli.api.config_ng.parsers import TOMLParser if self._content is not None: - content = self._content - else: - content = self._read_first_file() - + if self._cache is None: + if self._content: + self._cache = TOMLParser.parse(self._content) + else: + self._cache = {} + return self._cache + + signature = self._current_signature() + if self._cache is not None and self._cache_signature == signature: + return self._cache + + content = self._read_first_file() if not content: - return {} + self._cache = {} + else: + self._cache = TOMLParser.parse(content) + self._cache_signature = signature + return self._cache - return TOMLParser.parse(content) + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: + """ + Discover configuration values, optionally scoped to a dotted key. + """ + return _slice_nested_dict(self._load_data(), key) def _read_first_file(self) -> str: """ @@ -302,7 +392,7 @@ def _read_first_file(self) -> str: return "" def supports_key(self, key: str) -> bool: - return key in self.discover() + return _has_nested_key(self._load_data(), key) class ConnectionsConfigFile(ValueSource): @@ -338,6 +428,8 @@ def __init__(self, content: Optional[str] = None, file_path: Optional[Path] = No """ self._content = content self._file_path = file_path or self._get_default_path() + self._cache: CachedConfigValues = None + self._cache_signature: CachedSignature = None @staticmethod def _get_default_path() -> Path: @@ -381,7 +473,7 @@ def get_defined_connections(self) -> set[str]: This is used by the resolver to implement replacement behavior. """ try: - data = self.discover() + data = self._load_data() connections_section = data.get("connections", {}) if isinstance(connections_section, dict): return set(connections_section.keys()) @@ -390,38 +482,69 @@ def get_defined_connections(self) -> set[str]: log.debug("Failed to get defined connections: %s", e) return set() - def discover(self, key: Optional[str] = None) -> Dict[str, Any]: - """ - Three-phase discovery: acquire content → parse → normalize. + def _current_signature(self) -> Tuple[FileSignature, ...]: + """Return tuple describing the connections file state.""" - Phase 1: Get content (from string or file) - Phase 2: Parse TOML (generic parser) - Phase 3: Normalize legacy format (connections.toml specific) + try: + stat_result = self._file_path.stat() + except OSError: + return ((self._file_path.as_posix(), None, None),) + return ( + ( + self._file_path.as_posix(), + stat_result.st_mtime, + stat_result.st_size, + ), + ) + + def _load_data(self) -> NestedConfigData: + """Load and cache connections TOML data (normalized).""" - Returns: - Nested dict structure: {"connections": {"conn_name": {...}}} - """ from snowflake.cli.api.config_ng.parsers import TOMLParser - if self._content is not None: - content = self._content - else: - if not self._file_path.exists(): - return {} - _ensure_strict_file_permissions(self._file_path) + def _normalize_content(content: str) -> Dict[str, Any]: try: - content = self._file_path.read_text() - except Exception as e: - log.debug("Failed to read connections.toml: %s", e) + if not content: + parsed: Dict[str, Any] = {} + else: + parsed = TOMLParser.parse(content) + except Exception as exc: + log.debug("Failed to parse connections.toml: %s", exc) return {} + return self._normalize_connections_format(parsed) + + if self._content is not None: + if self._cache is None: + self._cache = _normalize_content(self._content) + return self._cache + + signature = self._current_signature() + if self._cache is not None and self._cache_signature == signature: + return self._cache + if not self._file_path.exists(): + self._cache = {} + self._cache_signature = signature + return self._cache + + _ensure_strict_file_permissions(self._file_path) try: - data = TOMLParser.parse(content) - except Exception as e: - log.debug("Failed to parse connections.toml: %s", e) - return {} + content = self._file_path.read_text() + except Exception as exc: + log.debug("Failed to read connections.toml: %s", exc) + self._cache = {} + self._cache_signature = signature + return self._cache + + self._cache = _normalize_content(content) + self._cache_signature = signature + return self._cache - return self._normalize_connections_format(data) + def discover(self, key: Optional[str] = None) -> Dict[str, Any]: + """ + Discover configuration values, optionally scoped to a dotted key. + """ + return _slice_nested_dict(self._load_data(), key) @staticmethod def _normalize_connections_format(data: Dict[str, Any]) -> Dict[str, Any]: @@ -455,7 +578,7 @@ def _normalize_connections_format(data: Dict[str, Any]) -> Dict[str, Any]: return result def supports_key(self, key: str) -> bool: - return key in self.discover() + return _has_nested_key(self._load_data(), key) class SnowSQLEnvironment(ValueSource): diff --git a/tests/config_ng/test_sources.py b/tests/config_ng/test_sources.py index 14fd41a3e1..3521ccefde 100644 --- a/tests/config_ng/test_sources.py +++ b/tests/config_ng/test_sources.py @@ -433,3 +433,62 @@ def test_connections_config_raises_on_insecure_file(self, tmp_path): with pytest.raises(ConfigFileTooWidePermissionsError): source.discover() + + +class TestFileSourceCaching: + @pytest.mark.parametrize( + ("source_cls", "read_method", "sample_content", "key_to_check"), + [ + ( + SnowSQLConfigFile, + "_read_and_merge_files", + """ +[connections.default] +accountname = cached_account +""", + "connections.default.account", + ), + ( + CliConfigFile, + "_read_first_file", + """ +[connections.default] +account = "cli_account" +""", + "connections.default.account", + ), + ], + ) + def test_file_sources_cache_file_reads( + self, monkeypatch, source_cls, read_method, sample_content, key_to_check + ): + call_counter = {"count": 0} + + def fake_reader(self): # type: ignore[override] + call_counter["count"] += 1 + return sample_content + + monkeypatch.setattr(source_cls, read_method, fake_reader) + + source = source_cls() + assert source.supports_key(key_to_check) is True + assert source.supports_key(key_to_check) is True + assert call_counter["count"] == 1 + + def test_connections_config_file_caches_parse(self, monkeypatch): + parse_calls = {"count": 0} + + def fake_parse(content): + parse_calls["count"] += 1 + return {"connections": {"shared": {"account": "shared_account"}}} + + monkeypatch.setattr( + "snowflake.cli.api.config_ng.parsers.TOMLParser.parse", + staticmethod(fake_parse), + ) + + source = ConnectionsConfigFile(content="[connections.shared]\naccount = 'x'\n") + + assert source.supports_key("connections.shared.account") is True + assert source.supports_key("connections.shared.account") is True + assert parse_calls["count"] == 1 From 4a4bca502fdefaca4e430d0cb1fba592c0497f3b Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 24 Nov 2025 16:19:04 +0100 Subject: [PATCH 77/78] SNOW-2306184: config refactor - silent exceptions logged as warning with sanitisation --- src/snowflake/cli/api/config.py | 7 +++-- src/snowflake/cli/api/config_ng/resolver.py | 34 ++------------------- src/snowflake/cli/api/config_ng/sources.py | 8 +++-- src/snowflake/cli/api/config_provider.py | 23 +++++++++++--- src/snowflake/cli/api/sanitizers.py | 30 ++++++++++++++++++ 5 files changed, 62 insertions(+), 40 deletions(-) diff --git a/src/snowflake/cli/api/config.py b/src/snowflake/cli/api/config.py index b27ea49dbe..9e9d187ec8 100644 --- a/src/snowflake/cli/api/config.py +++ b/src/snowflake/cli/api/config.py @@ -37,6 +37,7 @@ MissingConfigurationError, UnsupportedConfigSectionTypeError, ) +from snowflake.cli.api.sanitizers import sanitize_source_error from snowflake.cli.api.secure_path import SecurePath from snowflake.cli.api.secure_utils import ( file_permissions_are_strict, @@ -248,8 +249,10 @@ def _config_file(): provider = get_config_provider_singleton() if hasattr(provider, "invalidate_cache"): provider.invalidate_cache() - except Exception: - pass + except Exception as exc: + sanitized_error = sanitize_source_error(exc) + log.error("Failed to invalidate configuration cache: %s", sanitized_error) + raise def _read_config_file(): diff --git a/src/snowflake/cli/api/config_ng/resolver.py b/src/snowflake/cli/api/config_ng/resolver.py index 465432bfb1..1f356c392e 100644 --- a/src/snowflake/cli/api/config_ng/resolver.py +++ b/src/snowflake/cli/api/config_ng/resolver.py @@ -33,6 +33,7 @@ ResolutionHistory, SourceType, ) +from snowflake.cli.api.sanitizers import sanitize_source_error if TYPE_CHECKING: from snowflake.cli.api.config_ng.core import ValueSource @@ -40,35 +41,6 @@ log = logging.getLogger(__name__) -def _sanitize_source_error(exc: Exception) -> str: - """ - Produce a logging-safe description of discovery errors. - - Keys and structural metadata (section/key/line) are preserved, but raw - values are never rendered so sensitive data cannot leak through logs. - """ - - safe_parts: List[str] = [exc.__class__.__name__] - attribute_labels = ( - ("section", "section"), - ("option", "key"), - ("key", "key"), - ("lineno", "line"), - ("colno", "column"), - ("pos", "position"), - ) - - for attr_name, label in attribute_labels: - attr_value = getattr(exc, attr_name, None) - if attr_value: - safe_parts.append(f"{label}={attr_value}") - - if len(safe_parts) == 1: - safe_parts.append("details_masked") - - return ", ".join(safe_parts) - - class ResolutionHistoryTracker: """ Tracks the complete resolution process for all configuration keys. @@ -607,7 +579,7 @@ def _resolve_file_sources(self, key: Optional[str] = None) -> Dict[str, Any]: result[k] = v except Exception as exc: - sanitized_error = _sanitize_source_error(exc) + sanitized_error = sanitize_source_error(exc) log.warning( "Error from source %s: %s", source.source_name, sanitized_error ) @@ -700,7 +672,7 @@ def _apply_overlay_sources( result = deep_merge(result, general_params) except Exception as exc: - sanitized_error = _sanitize_source_error(exc) + sanitized_error = sanitize_source_error(exc) log.warning( "Error from source %s: %s", source.source_name, sanitized_error ) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 343e7cb427..52e895637c 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -37,7 +37,10 @@ from snowflake.cli.api.config_ng.constants import SNOWFLAKE_HOME_ENV from snowflake.cli.api.config_ng.core import SourceType, ValueSource -from snowflake.cli.api.exceptions import ConfigFileTooWidePermissionsError +from snowflake.cli.api.exceptions import ( + ConfigFileTooWidePermissionsError, + UnsupportedConfigSectionTypeError, +) from snowflake.cli.api.secure_utils import file_permissions_are_strict from snowflake.cli.api.utils.types import try_cast_to_bool from snowflake.connector.compat import IS_WINDOWS @@ -908,7 +911,8 @@ def get_merged_variables(cli_variables: Optional[List[str]] = None) -> Dict[str, provider = get_config_provider_singleton() try: snowsql_vars = provider.get_section(SnowSQLSection.VARIABLES.value) - except Exception: + except UnsupportedConfigSectionTypeError as exc: + log.warning("Failed to load SnowSQL variables: %s", exc) snowsql_vars = {} if cli_variables: diff --git a/src/snowflake/cli/api/config_provider.py b/src/snowflake/cli/api/config_provider.py index b97e31d8ec..f8687f0c64 100644 --- a/src/snowflake/cli/api/config_provider.py +++ b/src/snowflake/cli/api/config_provider.py @@ -14,18 +14,23 @@ from __future__ import annotations +import logging import os import threading from abc import ABC, abstractmethod from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Final, Optional +from snowflake.cli.api.sanitizers import sanitize_source_error + if TYPE_CHECKING: from snowflake.cli.api.config_ng.resolver import ConfigurationResolver from snowflake.cli.api.config_ng.source_manager import SourceManager ALTERNATIVE_CONFIG_ENV_VAR: Final[str] = "SNOWFLAKE_CLI_CONFIG_V2_ENABLED" +log = logging.getLogger(__name__) + class ConfigProvider(ABC): """ @@ -196,8 +201,12 @@ def _ensure_initialized(self) -> None: self._config_cache.clear() self._last_config_override = current_override override_changed = True - except Exception: - pass + except Exception as exc: + sanitized_error = sanitize_source_error(exc) + log.warning( + "Failed to inspect CLI config override; continuing with cached value: %s", + sanitized_error, + ) if override_changed and self._owns_source_manager: # Discard cached sources so that new config override can take effect @@ -502,9 +511,13 @@ def _get_file_based_connections(self) -> dict: if key == "connections": continue file_data[key] = value - except Exception: - # Silently skip sources that fail to discover - pass + except Exception as exc: + sanitized_error = sanitize_source_error(exc) + log.warning( + "Skipping config source %s due to discovery error: %s", + source.source_name, + sanitized_error, + ) general_params, remaining_config = extract_root_level_connection_params( file_data diff --git a/src/snowflake/cli/api/sanitizers.py b/src/snowflake/cli/api/sanitizers.py index 59040a8d4c..b540a91fc1 100644 --- a/src/snowflake/cli/api/sanitizers.py +++ b/src/snowflake/cli/api/sanitizers.py @@ -15,6 +15,7 @@ from __future__ import annotations import re +from typing import List # 7-bit C1 ANSI sequences _ANSI_ESCAPE = re.compile( @@ -41,3 +42,32 @@ def sanitize_for_terminal(text: str) -> str | None: if text is None: return None return _ANSI_ESCAPE.sub("", text) + + +def sanitize_source_error(exc: Exception) -> str: + """ + Produce a logging-safe description of discovery errors. + + Keys and structural metadata (section/key/line) are preserved, but raw + values are never rendered so sensitive data cannot leak through logs. + """ + + safe_parts: List[str] = [exc.__class__.__name__] + attribute_labels = ( + ("section", "section"), + ("option", "key"), + ("key", "key"), + ("lineno", "line"), + ("colno", "column"), + ("pos", "position"), + ) + + for attr_name, label in attribute_labels: + attr_value = getattr(exc, attr_name, None) + if attr_value: + safe_parts.append(f"{label}={attr_value}") + + if len(safe_parts) == 1: + safe_parts.append("details_masked") + + return ", ".join(safe_parts) From 286f7a2d43f51121fe86ba3fad2b71b7dc0f0c7f Mon Sep 17 00:00:00 2001 From: Marcin Raba Date: Mon, 24 Nov 2025 16:58:14 +0100 Subject: [PATCH 78/78] SNOW-2306184: config refactor - silent exceptions logged as warning with sanitisation 2 --- src/snowflake/cli/api/config_ng/sources.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/snowflake/cli/api/config_ng/sources.py b/src/snowflake/cli/api/config_ng/sources.py index 52e895637c..916a8f1698 100644 --- a/src/snowflake/cli/api/config_ng/sources.py +++ b/src/snowflake/cli/api/config_ng/sources.py @@ -39,7 +39,6 @@ from snowflake.cli.api.config_ng.core import SourceType, ValueSource from snowflake.cli.api.exceptions import ( ConfigFileTooWidePermissionsError, - UnsupportedConfigSectionTypeError, ) from snowflake.cli.api.secure_utils import file_permissions_are_strict from snowflake.cli.api.utils.types import try_cast_to_bool @@ -911,7 +910,7 @@ def get_merged_variables(cli_variables: Optional[List[str]] = None) -> Dict[str, provider = get_config_provider_singleton() try: snowsql_vars = provider.get_section(SnowSQLSection.VARIABLES.value) - except UnsupportedConfigSectionTypeError as exc: + except Exception as exc: # noqa: BLE001 - legacy providers can raise custom errors log.warning("Failed to load SnowSQL variables: %s", exc) snowsql_vars = {}