diff --git a/.github/workflows/_siteops-deploy.yaml b/.github/workflows/_siteops-deploy.yaml index 83f91a2..300d884 100644 --- a/.github/workflows/_siteops-deploy.yaml +++ b/.github/workflows/_siteops-deploy.yaml @@ -111,58 +111,14 @@ jobs: SITE_OVERRIDES: ${{ secrets.SITE_OVERRIDES }} shell: bash run: | - # Generate sites.local/ override files from SITE_OVERRIDES JSON secret. - # Supports nested paths using dot notation at any depth: - # { - # "munich-dev": { - # "subscription": "...", - # "resourceGroup": "...", - # "parameters.clusterName": "actual-cluster-name", - # "parameters.dataflowIdentity.clientId": "..." - # } - # } - if [[ -z "$SITE_OVERRIDES" ]]; then echo "No site overrides configured, using committed sites" exit 0 fi - python3 - <<'GENERATE_OVERLAYS' - import json, os, re, sys, yaml - - overrides = json.loads(os.environ["SITE_OVERRIDES"]) - workspace = os.environ["INPUT_WORKSPACE"] - site_name_re = re.compile(r"^[a-zA-Z0-9_-]+$") - - sites_local = os.path.join(workspace, "sites.local") - os.makedirs(sites_local, exist_ok=True) - - for site_name, values in overrides.items(): - if not site_name_re.match(site_name): - print(f"::error::Invalid site name in SITE_OVERRIDES: {site_name}") - sys.exit(1) - - # Expand dot-notation keys into nested dict - nested = {} - for key, value in values.items(): - parts = key.split(".") - current = nested - for part in parts[:-1]: - current = current.setdefault(part, {}) - current[parts[-1]] = value - - output_file = os.path.join(sites_local, f"{site_name}.yaml") - with open(output_file, "w") as f: - yaml.safe_dump(nested, f, default_flow_style=False) - - print(f"✓ Generated override for: {site_name}") - - print(f"✓ Generated {len(overrides)} site override(s)") - GENERATE_OVERLAYS + echo "$SITE_OVERRIDES" | python3 scripts/generate-site-overrides.py "$INPUT_WORKSPACE" - # Register each override value with GitHub's log masking so - # subscription IDs, resource groups, etc. appear as *** in all - # subsequent step output (logs, summaries, artifacts). + # Mask override values in GitHub Actions logs echo "$SITE_OVERRIDES" | jq -r '.. | strings' | while IFS= read -r val; do [[ -n "$val" ]] && echo "::add-mask::${val}" done diff --git a/.github/workflows/integration-test.yaml b/.github/workflows/integration-test.yaml new file mode 100644 index 0000000..f929ccf --- /dev/null +++ b/.github/workflows/integration-test.yaml @@ -0,0 +1,103 @@ +# Integration tests: deploy manifests against real Azure and assert outputs +# On-demand only via workflow_dispatch + +name: Integration Tests + +on: + workflow_dispatch: + inputs: + manifest: + description: "Manifest to test" + type: choice + options: + - all + - aio-install + - secretsync + - opc-ua-solution + default: all + skip-cleanup: + description: "Skip resource cleanup (for debugging)" + type: boolean + default: false + selector: + description: "Site selector override (default: manifest selector)" + type: string + default: "" + environment: + description: "Target environment" + type: string + default: "dev" + +env: + PYTHONUNBUFFERED: "1" + +jobs: + integration-test: + name: Integration Tests + runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + + permissions: + id-token: write + contents: read + + steps: + - name: Checkout repository + uses: actions/checkout@v6 + with: + persist-credentials: false + + - name: Setup Site Ops + uses: ./.github/actions/setup-siteops + with: + install-dev: true + + - name: Azure Login (OIDC) + uses: azure/login@a457da9ea143d694b1b9c7c869ebb04ebe844ef5 # v2 + with: + client-id: ${{ secrets.AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.AZURE_TENANT_ID }} + subscription-id: ${{ secrets.AZURE_SUBSCRIPTION_ID }} + + - name: Select test scope + id: scope + run: | + MANIFEST="${{ inputs.manifest }}" + if [[ "$MANIFEST" == "all" ]]; then + echo "pytest_args=tests/integration/" >> $GITHUB_OUTPUT + else + echo "pytest_args=tests/integration/test_${MANIFEST//-/_}_manifest.py" >> $GITHUB_OUTPUT + fi + + - name: Mask secret values + env: + SITE_OVERRIDES: ${{ secrets.SITE_OVERRIDES }} + run: | + if [[ -n "$SITE_OVERRIDES" ]]; then + echo "$SITE_OVERRIDES" | jq -r '.. | strings' | while IFS= read -r val; do + [[ -n "$val" ]] && echo "::add-mask::${val}" + done + fi + + - name: Run integration tests + env: + SITE_OVERRIDES: ${{ secrets.SITE_OVERRIDES }} + INTEGRATION_SKIP_CLEANUP: ${{ inputs.skip-cleanup }} + INTEGRATION_SELECTOR: ${{ inputs.selector }} + run: > + pytest ${{ steps.scope.outputs.pytest_args }} + -v --tb=long + -m integration + --junitxml=integration-results.xml + + - name: Publish test results + uses: actions/upload-artifact@v4 + if: always() + with: + name: integration-test-results + path: integration-results.xml + + - name: Azure Logout + if: always() + run: az logout + continue-on-error: true diff --git a/.pipelines/integration-test.yaml b/.pipelines/integration-test.yaml new file mode 100644 index 0000000..d09fbe4 --- /dev/null +++ b/.pipelines/integration-test.yaml @@ -0,0 +1,115 @@ +# Integration tests: deploy manifests against real Azure and assert outputs +# On-demand only via manual trigger +# Equivalent to .github/workflows/integration-test.yaml + +trigger: none + +pr: none + +parameters: + - name: manifest + displayName: "Manifest to test" + type: string + default: all + values: + - all + - aio-install + - secretsync + - opc-ua-solution + + - name: skipCleanup + displayName: "Skip resource cleanup (for debugging)" + type: boolean + default: false + + - name: environment + displayName: Target environment + type: string + default: dev + + - name: selector + displayName: "Site selector override (default: manifest selector)" + type: string + default: "" + + - name: serviceConnections + displayName: Service connection per environment + type: object + default: + dev: azure-siteops + staging: azure-siteops + prod: azure-siteops + + - name: secretGroups + displayName: Variable group per environment + type: object + default: + dev: siteops-secrets + staging: siteops-secrets + prod: siteops-secrets + +variables: + - group: ${{ parameters.secretGroups[parameters.environment] }} + - name: PYTHONUNBUFFERED + value: '1' + +pool: + vmImage: ubuntu-latest + +jobs: + - deployment: integration_test + displayName: Integration Tests + environment: ${{ parameters.environment }} + + strategy: + runOnce: + deploy: + steps: + - checkout: self + fetchDepth: 1 + persistCredentials: false + + - template: templates/setup-siteops.yaml + parameters: + installDev: true + + - script: | + if [[ -n "$SITE_OVERRIDES" ]]; then + echo "$SITE_OVERRIDES" | jq -r '.. | strings' | while IFS= read -r val; do + [[ -n "$val" ]] && echo "##vso[task.setvariable variable=masked;issecret=true]${val}" + done + fi + displayName: Mask secret values + env: + SITE_OVERRIDES: $(SITE_OVERRIDES) + + - task: AzureCLI@2 + displayName: Run integration tests + inputs: + azureSubscription: ${{ parameters.serviceConnections[parameters.environment] }} + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + MANIFEST="${{ parameters.manifest }}" + if [[ "$MANIFEST" == "all" ]]; then + PYTEST_ARGS="tests/integration/" + else + PYTEST_ARGS="tests/integration/test_${MANIFEST//-/_}_manifest.py" + fi + + pytest $PYTEST_ARGS \ + -v --tb=long \ + -m integration \ + --junitxml=$(Build.ArtifactStagingDirectory)/integration-results.xml + env: + SITE_OVERRIDES: $(SITE_OVERRIDES) + INTEGRATION_SKIP_CLEANUP: ${{ parameters.skipCleanup }} + INTEGRATION_SELECTOR: ${{ parameters.selector }} + + - task: PublishTestResults@2 + displayName: Publish test results + condition: succeededOrFailed() + inputs: + testResultsFormat: JUnit + testResultsFiles: "$(Build.ArtifactStagingDirectory)/integration-results.xml" + testRunTitle: Integration Tests diff --git a/.pipelines/templates/siteops-deploy.yaml b/.pipelines/templates/siteops-deploy.yaml index c417bee..384cf43 100644 --- a/.pipelines/templates/siteops-deploy.yaml +++ b/.pipelines/templates/siteops-deploy.yaml @@ -75,58 +75,14 @@ stages: SELECTOR: ${{ parameters.selector }} - script: | - # Generate sites.local/ override files from SITE_OVERRIDES JSON secret. - # Supports nested paths using dot notation at any depth: - # { - # "munich-dev": { - # "subscription": "...", - # "resourceGroup": "...", - # "parameters.clusterName": "actual-cluster-name", - # "parameters.dataflowIdentity.clientId": "..." - # } - # } - if [[ -z "$SITE_OVERRIDES" ]]; then echo "No site overrides configured, using committed sites" exit 0 fi - python3 - <<'GENERATE_OVERLAYS' - import json, os, re, sys, yaml - - overrides = json.loads(os.environ["SITE_OVERRIDES"]) - workspace = os.environ["WORKSPACE"] - site_name_re = re.compile(r"^[a-zA-Z0-9_-]+$") - - sites_local = os.path.join(workspace, "sites.local") - os.makedirs(sites_local, exist_ok=True) - - for site_name, values in overrides.items(): - if not site_name_re.match(site_name): - print(f"##vso[task.logissue type=error]Invalid site name in SITE_OVERRIDES: {site_name}") - sys.exit(1) - - # Expand dot-notation keys into nested dict - nested = {} - for key, value in values.items(): - parts = key.split(".") - current = nested - for part in parts[:-1]: - current = current.setdefault(part, {}) - current[parts[-1]] = value - - output_file = os.path.join(sites_local, f"{site_name}.yaml") - with open(output_file, "w") as f: - yaml.safe_dump(nested, f, default_flow_style=False) - - print(f"✓ Generated override for: {site_name}") - - print(f"✓ Generated {len(overrides)} site override(s)") - GENERATE_OVERLAYS + echo "$SITE_OVERRIDES" | python3 scripts/generate-site-overrides.py "$WORKSPACE" - # Mask each override value in ADO logs. - # ADO masking is additive — once a value is registered as secret, - # it stays masked for the remainder of the job. + # Mask override values in ADO logs echo "$SITE_OVERRIDES" | jq -r '.. | strings' | while IFS= read -r val; do [[ -n "$val" ]] && echo "##vso[task.setvariable variable=masked;issecret=true]${val}" done diff --git a/pyproject.toml b/pyproject.toml index b752df3..ed779e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,9 @@ testpaths = ["tests"] python_files = ["test_*.py"] python_functions = ["test_*"] addopts = "-v --tb=short" +markers = [ + "integration: requires Azure credentials and real resources (deselect with '-m not integration')", +] filterwarnings = [ "ignore::DeprecationWarning", ] diff --git a/scripts/generate-site-overrides.py b/scripts/generate-site-overrides.py new file mode 100644 index 0000000..be36f11 --- /dev/null +++ b/scripts/generate-site-overrides.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python3 +"""Generate sites.local/ overlay files from a SITE_OVERRIDES JSON string. + +Expands dot-notation keys into nested YAML structures: + {"munich-dev": {"parameters.clusterName": "arc-01"}} + becomes sites.local/munich-dev.yaml: + parameters: + clusterName: arc-01 + +Usage: + # From environment variable + python scripts/generate-site-overrides.py --from-env SITE_OVERRIDES + + # From stdin + echo "$SITE_OVERRIDES" | python scripts/generate-site-overrides.py + + # From file + python scripts/generate-site-overrides.py --file overrides.json + +Returns exit code 0 on success (or if input is empty), 1 on error. +""" + +import argparse +import json +import os +import re +import sys +from pathlib import Path + +import yaml + +SITE_NAME_PATTERN = re.compile(r"^[a-zA-Z0-9_-]+$") + + +def expand_dot_notation(flat: dict) -> dict: + """Expand dot-notation keys into a nested dictionary. + + Example: + {"parameters.broker.memoryProfile": "Low", "subscription": "abc"} + → {"parameters": {"broker": {"memoryProfile": "Low"}}, "subscription": "abc"} + """ + nested: dict = {} + for key, value in flat.items(): + parts = key.split(".") + current = nested + for part in parts[:-1]: + current = current.setdefault(part, {}) + current[parts[-1]] = value + return nested + + +def generate_overlays(overrides: dict, sites_local: Path) -> list[Path]: + """Generate sites.local/ YAML overlay files from an overrides dict. + + Args: + overrides: Dict mapping site names to their override key/value pairs. + sites_local: Path to the sites.local/ directory. + + Returns: + List of generated file paths. + + Raises: + ValueError: If a site name contains invalid characters. + """ + sites_local.mkdir(parents=True, exist_ok=True) + generated = [] + + for site_name, values in overrides.items(): + if not SITE_NAME_PATTERN.match(site_name): + raise ValueError( + f"Invalid site name: '{site_name}' (must match {SITE_NAME_PATTERN.pattern})" + ) + + output_path = sites_local / f"{site_name}.yaml" + if output_path.exists(): + continue + + expanded = expand_dot_notation(values) + output_path = sites_local / f"{site_name}.yaml" + output_path.write_text(yaml.safe_dump(expanded, default_flow_style=False)) + generated.append(output_path) + + return generated + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Generate sites.local/ overlays from SITE_OVERRIDES JSON" + ) + parser.add_argument("workspace", help="Workspace directory path") + parser.add_argument("--from-env", metavar="VAR", help="Read JSON from environment variable") + parser.add_argument("--file", metavar="PATH", help="Read JSON from file") + args = parser.parse_args() + + if args.from_env: + raw = os.environ.get(args.from_env, "") + elif args.file: + raw = Path(args.file).read_text(encoding="utf-8") + elif not sys.stdin.isatty(): + raw = sys.stdin.read() + else: + print("No input provided. Use --from-env, --file, or pipe JSON to stdin.", file=sys.stderr) + sys.exit(1) + + if not raw.strip(): + print("No site overrides configured, using committed sites") + sys.exit(0) + + try: + overrides = json.loads(raw) + except json.JSONDecodeError as e: + print(f"Error: Invalid JSON in site overrides: {e}", file=sys.stderr) + sys.exit(1) + + sites_local = Path(args.workspace) / "sites.local" + + try: + generated = generate_overlays(overrides, sites_local) + except ValueError as e: + print(f"Error: {e}", file=sys.stderr) + sys.exit(1) + + for path in generated: + print(f" {path.name}") + print(f"Generated {len(generated)} site override(s)") + + +if __name__ == "__main__": + main() diff --git a/siteops/cli.py b/siteops/cli.py index 589f411..6bb524e 100644 --- a/siteops/cli.py +++ b/siteops/cli.py @@ -19,7 +19,7 @@ from siteops.orchestrator import Orchestrator -def setup_logging(verbose: bool = False): +def setup_logging(verbose: bool = False) -> None: """Configure logging based on verbosity level.""" level = logging.DEBUG if verbose else logging.INFO logging.basicConfig( @@ -37,7 +37,7 @@ def resolve_manifest_path(manifest: Path, workspace: Path) -> Path: return workspace / manifest -def cmd_deploy(args, orchestrator: Orchestrator) -> int: +def cmd_deploy(args: argparse.Namespace, orchestrator: Orchestrator) -> int: """Execute deployment.""" manifest_path = resolve_manifest_path(args.manifest, args.workspace) @@ -79,7 +79,7 @@ def cmd_deploy(args, orchestrator: Orchestrator) -> int: return 0 -def cmd_validate(args, orchestrator: Orchestrator) -> int: +def cmd_validate(args: argparse.Namespace, orchestrator: Orchestrator) -> int: """Validate manifest and optionally show deployment plan.""" manifest_path = resolve_manifest_path(args.manifest, args.workspace) @@ -148,7 +148,7 @@ def _print_value(value: Any, indent: int = 6) -> None: print(f"{prefix}{value}") -def cmd_sites(args, orchestrator: Orchestrator) -> int: +def cmd_sites(args: argparse.Namespace, orchestrator: Orchestrator) -> int: """List available sites in the workspace.""" all_sites = orchestrator.load_all_sites() @@ -204,7 +204,7 @@ def cmd_sites(args, orchestrator: Orchestrator) -> int: return 0 -def main(): +def main() -> None: """Main entry point for the Site Ops CLI.""" parser = argparse.ArgumentParser( prog="siteops", diff --git a/siteops/executor.py b/siteops/executor.py index 5e7a32f..fdd85b1 100644 --- a/siteops/executor.py +++ b/siteops/executor.py @@ -492,8 +492,8 @@ def _arc_proxy( logger.debug(f"Error during proxy cleanup: {e}") try: proxy_process.kill() - except Exception: - pass + except Exception as e: + logger.debug(f"Failed to kill proxy process: {e}") # Release the allocated port slot if allocated_port is not None: diff --git a/siteops/models.py b/siteops/models.py index f2bac81..3cdf742 100644 --- a/siteops/models.py +++ b/siteops/models.py @@ -371,7 +371,7 @@ class DeploymentStep: scope: str = "resourceGroup" when: str | None = None - def __post_init__(self): + def __post_init__(self) -> None: if self.scope not in VALID_SCOPES: raise ValueError(f"Invalid scope '{self.scope}'. Must be one of: {VALID_SCOPES}") @@ -431,7 +431,7 @@ class KubectlStep: files: list[str] = field(default_factory=list) when: str | None = None - def __post_init__(self): + def __post_init__(self) -> None: if self.operation not in KUBECTL_OPERATIONS: raise ValueError( f"Invalid kubectl operation '{self.operation}'. " f"Supported: {', '.join(sorted(KUBECTL_OPERATIONS))}" diff --git a/siteops/orchestrator.py b/siteops/orchestrator.py index b91f75b..a8f4e1d 100644 --- a/siteops/orchestrator.py +++ b/siteops/orchestrator.py @@ -100,7 +100,7 @@ def _resolve_output_path(obj: Any, path: str) -> Any: _print_lock = threading.Lock() -def _thread_safe_print(*args, **kwargs): +def _thread_safe_print(*args: Any, **kwargs: Any) -> None: """Print with lock to avoid interleaved output from multiple threads.""" with _print_lock: print(*args, **kwargs) @@ -817,7 +817,7 @@ def resolve_parameters( def _check_unresolved_templates(self, params: dict[str, Any], site_name: str) -> None: """Warn if any {{ ... }} templates weren't resolved.""" - def check_value(v, path=""): + def check_value(v: Any, path: str = "") -> None: if isinstance(v, str) and "{{" in v and "}}" in v: logger.warning(f"Unresolved template in {path}: {v} (site: {site_name})") elif isinstance(v, dict): @@ -1717,6 +1717,13 @@ def validate(self, manifest_path: Path, selector: str | None = None) -> list[str # Build step name lookup for output reference validation all_step_names = {step.name for step in manifest.steps} + # Check for duplicate step names + seen_names: set[str] = set() + for step in manifest.steps: + if step.name in seen_names: + errors.append(f"Duplicate step name: '{step.name}'") + seen_names.add(step.name) + for step_index, step in enumerate(manifest.steps): # Steps that execute before this one (valid sources for output references) prior_step_names = {s.name for s in manifest.steps[:step_index]} diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000..3099d50 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,141 @@ +"""Fixtures for integration tests. + +Integration tests deploy real manifests against Azure and assert outputs. +The test framework is site-agnostic: it deploys to whatever sites match +the manifest's selector (or a user-provided override), just like production. + +Configuration is provided via: + - Local: sites.local/ overlay files (hand-written YAML, one per site) + - CI: SITE_OVERRIDES env var (JSON, auto-generates sites.local/ overlays) + +If no sites match the selector, integration tests are skipped gracefully. +""" + +import os +import subprocess +import sys +from pathlib import Path + +import pytest + +from siteops.orchestrator import Orchestrator + +WORKSPACE_PATH = Path(__file__).parent.parent.parent / "workspaces" / "iot-operations" +SCRIPT_PATH = Path(__file__).parent.parent.parent / "scripts" / "generate-site-overrides.py" + + +def _generate_overlays_from_site_overrides() -> bool: + """Generate sites.local/ overlays by calling the shared script. + + Returns True if overlays were generated. + """ + raw = os.environ.get("SITE_OVERRIDES", "") + if not raw.strip(): + return False + + result = subprocess.run( + [sys.executable, str(SCRIPT_PATH), str(WORKSPACE_PATH)], + input=raw, + capture_output=True, + text=True, + ) + if result.returncode != 0: + print(f"generate-site-overrides.py failed: {result.stderr}", file=sys.stderr) + return False + + return True + + +_pre_existing_overlays: set[str] = set() +_generated_overlays = False + + +def pytest_collection_modifyitems(config, items): + """Generate overlays from SITE_OVERRIDES and skip if no config available.""" + global _generated_overlays, _pre_existing_overlays + + # Snapshot existing overlay files before generation + sites_local = WORKSPACE_PATH / "sites.local" + if sites_local.is_dir(): + _pre_existing_overlays = {f.name for f in sites_local.glob("*.yaml")} + + _generated_overlays = _generate_overlays_from_site_overrides() + + has_config = _generated_overlays or ( + sites_local.is_dir() and any(sites_local.glob("*.yaml")) + ) + + if not has_config: + skip = pytest.mark.skip( + reason="Integration tests require sites.local/ overlays " + "or SITE_OVERRIDES env var" + ) + for item in items: + if "integration" in item.keywords: + item.add_marker(skip) + + +def pytest_sessionfinish(session, exitstatus): + """Clean up generated overlays unless skip-cleanup is set.""" + skip_cleanup = os.environ.get("INTEGRATION_SKIP_CLEANUP", "").lower() in ("true", "1", "yes") + if _generated_overlays and not skip_cleanup: + sites_local = WORKSPACE_PATH / "sites.local" + if sites_local.is_dir(): + for f in sites_local.glob("*.yaml"): + if f.name not in _pre_existing_overlays: + f.unlink(missing_ok=True) + + +@pytest.fixture(scope="session") +def workspace() -> Path: + """Path to the IoT Operations workspace.""" + assert WORKSPACE_PATH.is_dir(), f"Workspace not found: {WORKSPACE_PATH}" + return WORKSPACE_PATH + + +@pytest.fixture(scope="session") +def selector() -> str | None: + """Site selector from INTEGRATION_SELECTOR env var, or None for manifest default.""" + return os.environ.get("INTEGRATION_SELECTOR") or None + + +@pytest.fixture(scope="session") +def orchestrator(workspace: Path) -> Orchestrator: + """Orchestrator configured for the real workspace.""" + return Orchestrator(workspace) + + +@pytest.fixture(scope="session") +def aio_install_result(orchestrator: Orchestrator, selector: str | None) -> dict: + """Deploy aio-install.yaml once, shared by all dependent tests.""" + result = orchestrator.deploy( + manifest_path=WORKSPACE_PATH / "manifests" / "aio-install.yaml", + selector=selector, + ) + assert result["summary"]["failed"] == 0, ( + f"aio-install deployment failed: {result}" + ) + return result + + +@pytest.fixture(scope="session") +def secretsync_result( + orchestrator: Orchestrator, selector: str | None, aio_install_result: dict +) -> dict: + """Deploy secretsync.yaml after AIO is installed.""" + return orchestrator.deploy( + manifest_path=WORKSPACE_PATH / "manifests" / "secretsync.yaml", + selector=selector, + ) + + +@pytest.fixture(scope="session") +def opc_ua_solution_result( + orchestrator: Orchestrator, selector: str | None, aio_install_result: dict +) -> dict: + """Deploy opc-ua-solution.yaml after AIO is installed.""" + return orchestrator.deploy( + manifest_path=WORKSPACE_PATH / "manifests" / "opc-ua-solution.yaml", + selector=selector, + ) + diff --git a/tests/integration/helpers/__init__.py b/tests/integration/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/helpers/assertions.py b/tests/integration/helpers/assertions.py new file mode 100644 index 0000000..1d2c4d2 --- /dev/null +++ b/tests/integration/helpers/assertions.py @@ -0,0 +1,72 @@ +"""Assertion helpers for integration tests.""" + +from typing import Any + + +def find_step(result: dict[str, Any], site_name: str, step_name: str) -> dict[str, Any]: + """Find a step result by site and step name. + + Args: + result: Full deployment result from orchestrator.deploy() + site_name: Name of the site + step_name: Name of the step + + Returns: + Step result dict with keys: step, status, outputs, error, reason + + Raises: + KeyError: If site not found in results + ValueError: If step not found for the site + """ + site_result = result["sites"][site_name] + for step in site_result["steps"]: + if step["step"] == step_name: + return step + available = [s["step"] for s in site_result["steps"]] + raise ValueError(f"Step '{step_name}' not found for site '{site_name}'. Available: {available}") + + +def assert_step_succeeded(result: dict[str, Any], site_name: str, step_name: str) -> dict[str, Any]: + """Assert a step succeeded and return its result for further assertions.""" + step = find_step(result, site_name, step_name) + assert step["status"] == "success", ( + f"Step '{step_name}' did not succeed for site '{site_name}': " + f"status={step['status']}, error={step.get('error')}" + ) + return step + + +def assert_step_skipped(result: dict[str, Any], site_name: str, step_name: str) -> dict[str, Any]: + """Assert a step was skipped and return its result.""" + step = find_step(result, site_name, step_name) + assert step["status"] == "skipped", ( + f"Step '{step_name}' was not skipped for site '{site_name}': status={step['status']}" + ) + return step + + +def assert_output_exists(step_result: dict[str, Any], output_name: str) -> Any: + """Assert an output exists in a step result and return its value. + + Handles both raw values and Azure ARM wrapped format {"value": X, "type": "..."}. + """ + outputs = step_result.get("outputs", {}) + assert output_name in outputs, ( + f"Output '{output_name}' not found in step '{step_result['step']}'. " + f"Available: {sorted(outputs.keys())}" + ) + output = outputs[output_name] + if isinstance(output, dict) and "value" in output: + return output["value"] + return output + + +def assert_output_starts_with( + step_result: dict[str, Any], output_name: str, prefix: str +) -> str: + """Assert an output value starts with the given prefix.""" + value = assert_output_exists(step_result, output_name) + assert isinstance(value, str) and value.startswith(prefix), ( + f"Output '{output_name}' expected to start with '{prefix}', got: {value}" + ) + return value diff --git a/tests/integration/test_aio_install_manifest.py b/tests/integration/test_aio_install_manifest.py new file mode 100644 index 0000000..4befa42 --- /dev/null +++ b/tests/integration/test_aio_install_manifest.py @@ -0,0 +1,73 @@ +"""Integration tests for the aio-install.yaml manifest.""" + +import pytest + +from tests.integration.helpers.assertions import ( + assert_output_exists, + assert_step_succeeded, + find_step, +) +from tests.integration.conftest import WORKSPACE_PATH + +pytestmark = [pytest.mark.integration] + + +class TestAioInstallDeployment: + """Validate that aio-install.yaml deploys successfully.""" + + def test_no_failures(self, aio_install_result): + assert aio_install_result["summary"]["failed"] == 0 + + def test_all_sites_succeeded(self, aio_install_result): + for name in aio_install_result["sites"]: + site = aio_install_result["sites"][name] + assert site["status"] == "success", f"Site '{name}' failed: {site.get('error')}" + + def test_schema_registry_outputs(self, aio_install_result): + for name in aio_install_result["sites"]: + step = assert_step_succeeded(aio_install_result, name, "schema-registry") + assert_output_exists(step, "schemaRegistry") + + def test_adr_ns_outputs(self, aio_install_result): + for name in aio_install_result["sites"]: + step = assert_step_succeeded(aio_install_result, name, "adr-ns") + assert_output_exists(step, "adrNamespace") + + def test_aio_enablement_outputs(self, aio_install_result): + for name in aio_install_result["sites"]: + step = assert_step_succeeded(aio_install_result, name, "aio-enablement") + assert_output_exists(step, "clExtensionIds") + + def test_aio_instance_outputs(self, aio_install_result): + for name in aio_install_result["sites"]: + step = assert_step_succeeded(aio_install_result, name, "aio-instance") + assert_output_exists(step, "aio") + assert_output_exists(step, "customLocation") + assert_output_exists(step, "aioExtension") + + def test_schema_registry_role_succeeds(self, aio_install_result): + for name in aio_install_result["sites"]: + assert_step_succeeded(aio_install_result, name, "schema-registry-role") + + +class TestAioInstallConditionalSteps: + """Validate that conditional steps are gated correctly.""" + + def test_global_edge_site_skipped_for_rg_sites(self, aio_install_result): + """RG-level sites should skip the subscription-scoped edge site step.""" + for name in aio_install_result["sites"]: + step = find_step(aio_install_result, name, "global-edge-site") + assert step["status"] == "skipped", ( + f"Site '{name}': global-edge-site should be skipped for RG-level sites" + ) + + +class TestAioInstallIdempotency: + """Validate that re-deploying produces the same results.""" + + def test_redeploy_succeeds(self, orchestrator, selector, aio_install_result): + result2 = orchestrator.deploy( + manifest_path=WORKSPACE_PATH / "manifests" / "aio-install.yaml", + selector=selector, + ) + assert result2["summary"]["failed"] == 0 diff --git a/tests/integration/test_opc_ua_solution_manifest.py b/tests/integration/test_opc_ua_solution_manifest.py new file mode 100644 index 0000000..f0557a9 --- /dev/null +++ b/tests/integration/test_opc_ua_solution_manifest.py @@ -0,0 +1,48 @@ +"""Integration tests for the opc-ua-solution.yaml manifest.""" + +import pytest + +from tests.integration.helpers.assertions import ( + assert_step_succeeded, + find_step, +) +from tests.integration.conftest import WORKSPACE_PATH + +pytestmark = [pytest.mark.integration] + + +class TestOpcUaSolutionDeployment: + """Validate that opc-ua-solution.yaml deploys successfully.""" + + def test_no_failures(self, opc_ua_solution_result): + assert opc_ua_solution_result["summary"]["failed"] == 0 + + def test_all_sites_succeeded(self, opc_ua_solution_result): + for name in opc_ua_solution_result["sites"]: + site = opc_ua_solution_result["sites"][name] + assert site["status"] == "success", f"Site '{name}' failed: {site.get('error')}" + + def test_opc_ua_solution_step_succeeds(self, opc_ua_solution_result): + for name in opc_ua_solution_result["sites"]: + assert_step_succeeded(opc_ua_solution_result, name, "opc-ua-solution") + + +class TestOpcUaSolutionConditionalSteps: + """Validate that conditional steps are gated correctly.""" + + def test_opc_plc_simulator_conditional(self, opc_ua_solution_result): + """OPC PLC simulator should respect includeOpcPlcSimulator deploy option.""" + for name in opc_ua_solution_result["sites"]: + step = find_step(opc_ua_solution_result, name, "opc-plc-simulator") + assert step["status"] in ("success", "skipped") + + +class TestOpcUaSolutionIdempotency: + """Validate that re-deploying produces the same results.""" + + def test_redeploy_succeeds(self, orchestrator, selector, opc_ua_solution_result): + result2 = orchestrator.deploy( + manifest_path=WORKSPACE_PATH / "manifests" / "opc-ua-solution.yaml", + selector=selector, + ) + assert result2["summary"]["failed"] == 0 diff --git a/tests/integration/test_secretsync_manifest.py b/tests/integration/test_secretsync_manifest.py new file mode 100644 index 0000000..821547e --- /dev/null +++ b/tests/integration/test_secretsync_manifest.py @@ -0,0 +1,104 @@ +"""Integration tests for the secretsync.yaml manifest.""" + +import pytest + +from tests.integration.helpers.assertions import ( + assert_output_exists, + assert_output_starts_with, + assert_step_succeeded, + find_step, +) +from tests.integration.conftest import WORKSPACE_PATH + +pytestmark = [pytest.mark.integration] + + +class TestSecretSyncDeployment: + """Validate that secretsync.yaml deploys successfully.""" + + def test_no_failures(self, secretsync_result): + assert secretsync_result["summary"]["failed"] == 0 + + def test_all_sites_succeeded(self, secretsync_result): + for name in secretsync_result["sites"]: + site = secretsync_result["sites"][name] + assert site["status"] == "success", f"Site '{name}' failed: {site.get('error')}" + assert site["steps_completed"] == 2 + + +class TestSecretSyncResolveAio: + """Validate resolve-aio step outputs across all sites.""" + + def test_resolve_aio_succeeds(self, secretsync_result): + for name in secretsync_result["sites"]: + assert_step_succeeded(secretsync_result, name, "resolve-aio") + + def test_infrastructure_outputs(self, secretsync_result): + for name in secretsync_result["sites"]: + step = assert_step_succeeded(secretsync_result, name, "resolve-aio") + assert_output_exists(step, "customLocationName") + assert_output_exists(step, "customLocationNamespace") + assert_output_exists(step, "connectedClusterName") + assert_output_starts_with(step, "customLocationId", "/subscriptions/") + + def test_oidc_issuer_url(self, secretsync_result): + for name in secretsync_result["sites"]: + step = assert_step_succeeded(secretsync_result, name, "resolve-aio") + assert_output_starts_with(step, "oidcIssuerUrl", "https://") + + def test_instance_properties_forwarded(self, secretsync_result): + for name in secretsync_result["sites"]: + step = assert_step_succeeded(secretsync_result, name, "resolve-aio") + assert_output_exists(step, "instanceLocation") + assert_output_starts_with(step, "schemaRegistryResourceId", "/subscriptions/") + assert_output_exists(step, "identityType") + + +class TestSecretSyncEnablement: + """Validate secretsync step outputs across all sites.""" + + def test_secretsync_succeeds(self, secretsync_result): + for name in secretsync_result["sites"]: + assert_step_succeeded(secretsync_result, name, "secretsync") + + def test_spc_created(self, secretsync_result): + for name in secretsync_result["sites"]: + step = assert_step_succeeded(secretsync_result, name, "secretsync") + assert_output_starts_with(step, "spcResourceId", "/subscriptions/") + assert_output_exists(step, "spcResourceName") + + def test_managed_identity_created(self, secretsync_result): + for name in secretsync_result["sites"]: + step = assert_step_succeeded(secretsync_result, name, "secretsync") + assert_output_exists(step, "managedIdentityPrincipalId") + assert_output_exists(step, "managedIdentityClientId") + assert_output_starts_with(step, "managedIdentityResourceId", "/subscriptions/") + + def test_key_vault_created(self, secretsync_result): + for name in secretsync_result["sites"]: + step = assert_step_succeeded(secretsync_result, name, "secretsync") + assert_output_exists(step, "keyVaultName") + assert_output_starts_with(step, "keyVaultResourceId", "/subscriptions/") + + def test_federated_credential_created(self, secretsync_result): + for name in secretsync_result["sites"]: + step = assert_step_succeeded(secretsync_result, name, "secretsync") + assert_output_exists(step, "federatedCredentialName") + + +class TestSecretSyncIdempotency: + """Validate that re-deploying produces consistent results.""" + + def test_redeploy_succeeds_with_same_outputs(self, orchestrator, selector, secretsync_result): + result2 = orchestrator.deploy( + manifest_path=WORKSPACE_PATH / "manifests" / "secretsync.yaml", + selector=selector, + ) + assert result2["summary"]["failed"] == 0 + + for name in secretsync_result["sites"]: + step1 = find_step(secretsync_result, name, "secretsync") + step2 = find_step(result2, name, "secretsync") + spc1 = assert_output_exists(step1, "spcResourceId") + spc2 = assert_output_exists(step2, "spcResourceId") + assert spc1 == spc2, f"Site '{name}': SPC resource ID changed on redeploy" diff --git a/tests/test_executor.py b/tests/test_executor.py index f6c886e..c24b510 100644 --- a/tests/test_executor.py +++ b/tests/test_executor.py @@ -486,6 +486,10 @@ def test_deploy_resource_group_malformed_json_output(self, tmp_workspace, sample assert result.success is True assert result.outputs == {} + assert result.error is None + assert result.step_name == "step-1" + assert result.site_name == "site-1" + assert result.deployment_name == "test-deploy" def test_deploy_resource_group_dry_run(self, tmp_workspace, sample_bicep_template): executor = AzCliExecutor(workspace=tmp_workspace, dry_run=True) @@ -504,6 +508,61 @@ def test_deploy_resource_group_dry_run(self, tmp_workspace, sample_bicep_templat assert result.success is True mock_run.assert_not_called() + def test_deploy_resource_group_plain_text_stdout(self, tmp_workspace, sample_bicep_template, monkeypatch): + """Test that plain non-JSON stdout with success returncode doesn't crash.""" + executor = AzCliExecutor(workspace=tmp_workspace) + monkeypatch.setattr(executor, "_az_path", "/usr/bin/az") + + mock_result = subprocess.CompletedProcess( + args=[], + returncode=0, + stdout="not json at all", + stderr="", + ) + + with patch("subprocess.run", return_value=mock_result): + result = executor.deploy_resource_group( + subscription="sub-123", + resource_group="rg-test", + template_path=sample_bicep_template, + parameters={}, + deployment_name="test-deploy", + step_name="step-1", + site_name="site-1", + ) + + assert result.success is True + assert result.outputs == {} + + def test_deploy_resource_group_truncated_json_stdout(self, tmp_workspace, sample_bicep_template, monkeypatch): + """Test that truncated JSON stdout with success returncode doesn't crash.""" + executor = AzCliExecutor(workspace=tmp_workspace) + monkeypatch.setattr(executor, "_az_path", "/usr/bin/az") + + mock_result = subprocess.CompletedProcess( + args=[], + returncode=0, + stdout='{"properties": {"outputs":', + stderr="", + ) + + with patch("subprocess.run", return_value=mock_result): + result = executor.deploy_resource_group( + subscription="sub-123", + resource_group="rg-test", + template_path=sample_bicep_template, + parameters={}, + deployment_name="test-deploy", + step_name="step-1", + site_name="site-1", + ) + + assert result.success is True + assert result.outputs == {} + assert result.error is None + assert result.step_name == "step-1" + assert result.site_name == "site-1" + class TestDeploySubscription: """Tests for subscription-scoped deployments.""" diff --git a/tests/test_orchestrator_core.py b/tests/test_orchestrator_core.py index 0747cdf..ad428e7 100644 --- a/tests/test_orchestrator_core.py +++ b/tests/test_orchestrator_core.py @@ -238,6 +238,78 @@ def test_overlay_adds_resource_group(self, tmp_workspace): assert site.is_subscription_level is False assert site.resource_group == "rg-from-overlay" + def test_properties_deep_merge_preserves_sibling_keys(self, tmp_workspace): + """Overlay overriding one nested key should preserve sibling keys from base.""" + base_site = { + "name": "sibling-test", + "subscription": "sub", + "location": "eastus", + "properties": { + "deployOptions": {"enableSecretSync": True, "includeSolution": False}, + }, + } + (tmp_workspace / "sites" / "sibling-test.yaml").write_text(yaml.dump(base_site)) + + (tmp_workspace / "sites.local").mkdir() + local_override = { + "properties": {"deployOptions": {"enableSecretSync": False}}, + } + (tmp_workspace / "sites.local" / "sibling-test.yaml").write_text(yaml.dump(local_override)) + + orchestrator = Orchestrator(tmp_workspace) + site = orchestrator.load_site("sibling-test") + + assert site.properties["deployOptions"]["enableSecretSync"] is False # Overridden + assert site.properties["deployOptions"]["includeSolution"] is False # Preserved from base + + def test_properties_deep_merge_overlay_adds_new_nested_key(self, tmp_workspace): + """Overlay adding a new nested key should merge with existing base keys.""" + base_site = { + "name": "add-key-test", + "subscription": "sub", + "location": "eastus", + "properties": { + "deployOptions": {"includeSolution": True}, + }, + } + (tmp_workspace / "sites" / "add-key-test.yaml").write_text(yaml.dump(base_site)) + + (tmp_workspace / "sites.local").mkdir() + local_override = { + "properties": {"deployOptions": {"enableSecretSync": True}}, + } + (tmp_workspace / "sites.local" / "add-key-test.yaml").write_text(yaml.dump(local_override)) + + orchestrator = Orchestrator(tmp_workspace) + site = orchestrator.load_site("add-key-test") + + assert site.properties["deployOptions"]["includeSolution"] is True # Preserved from base + assert site.properties["deployOptions"]["enableSecretSync"] is True # Added by overlay + + def test_parameters_deep_merge_preserves_sibling_keys(self, tmp_workspace): + """Overlay overriding one nested parameter key should preserve siblings from base.""" + base_site = { + "name": "params-merge-test", + "subscription": "sub", + "location": "eastus", + "parameters": { + "brokerConfig": {"memoryProfile": "Medium", "replicas": 3}, + }, + } + (tmp_workspace / "sites" / "params-merge-test.yaml").write_text(yaml.dump(base_site)) + + (tmp_workspace / "sites.local").mkdir() + local_override = { + "parameters": {"brokerConfig": {"memoryProfile": "Low"}}, + } + (tmp_workspace / "sites.local" / "params-merge-test.yaml").write_text(yaml.dump(local_override)) + + orchestrator = Orchestrator(tmp_workspace) + site = orchestrator.load_site("params-merge-test") + + assert site.parameters["brokerConfig"]["memoryProfile"] == "Low" # Overridden + assert site.parameters["brokerConfig"]["replicas"] == 3 # Preserved from base + class TestResolveSites: """Tests for site resolution from manifests.""" @@ -841,4 +913,55 @@ def test_deployment_step_label(self, tmp_workspace): ) label = orchestrator._get_step_type_label(step) - assert label == "subscription" \ No newline at end of file + assert label == "subscription" + + +class TestAllStepsSkipped: + """Tests for deployments where all steps are skipped.""" + + def test_all_conditional_steps_skipped_succeeds(self, tmp_workspace, sample_bicep_template): + """Deployment should succeed with steps_completed=0 when all steps are skipped.""" + # Create site without the neverTrue property + site_data = { + "name": "test-site", + "subscription": "00000000-0000-0000-0000-000000000000", + "resourceGroup": "rg-test", + "location": "eastus", + } + (tmp_workspace / "sites" / "test-site.yaml").write_text(yaml.dump(site_data)) + + # Create manifest where every step has a condition that won't be met + manifest_data = { + "name": "all-skip-manifest", + "sites": ["test-site"], + "steps": [ + { + "name": "step1", + "template": "templates/test.bicep", + "when": "{{ site.properties.deployOptions.neverTrue }}", + }, + { + "name": "step2", + "template": "templates/test.bicep", + "when": "{{ site.properties.deployOptions.neverTrue }}", + }, + ], + } + manifest_path = tmp_workspace / "manifests" / "all-skip.yaml" + with open(manifest_path, "w", encoding="utf-8") as f: + yaml.dump(manifest_data, f) + + orchestrator = Orchestrator(tmp_workspace) + + with patch.object(orchestrator.executor, "deploy_resource_group") as mock_deploy: + result = orchestrator.deploy(manifest_path) + + # Executor should never be called since all steps are skipped + mock_deploy.assert_not_called() + + # Deployment should succeed + site_result = result["sites"]["test-site"] + assert site_result["status"] == "success" + assert site_result["steps_completed"] == 0 + assert site_result["steps_skipped"] == 2 + assert all(s["status"] == "skipped" for s in site_result["steps"]) \ No newline at end of file diff --git a/tests/test_orchestrator_inheritance.py b/tests/test_orchestrator_inheritance.py index 4266b0a..efac89e 100644 --- a/tests/test_orchestrator_inheritance.py +++ b/tests/test_orchestrator_inheritance.py @@ -355,6 +355,76 @@ def test_deep_merge_properties_with_inheritance(self, tmp_workspace): assert loaded.properties["baseOnly"] == "from-base" # Preserved from base assert loaded.properties["siteOnly"] == "from-site" # Added by site + def test_inheritance_preserves_sibling_deploy_options(self, tmp_workspace): + """Child overriding one deployOptions key should preserve siblings from parent.""" + shared_dir = tmp_workspace / "shared" + shared_dir.mkdir() + + parent_template = { + "kind": "SiteTemplate", + "subscription": "inherited-sub", + "properties": { + "deployOptions": {"enableSecretSync": False, "includeSolution": True}, + }, + } + (shared_dir / "parent.yaml").write_text(yaml.dump(parent_template)) + + site = { + "inherits": "../shared/parent.yaml", + "name": "deploy-opts-test", + "location": "eastus", + "properties": { + "deployOptions": {"enableSecretSync": True}, + }, + } + (tmp_workspace / "sites" / "deploy-opts-test.yaml").write_text(yaml.dump(site)) + + orchestrator = Orchestrator(tmp_workspace) + loaded = orchestrator.load_site("deploy-opts-test") + + assert loaded.properties["deployOptions"]["enableSecretSync"] is True # Overridden + assert loaded.properties["deployOptions"]["includeSolution"] is True # Preserved from parent + + def test_three_level_inheritance_preserves_deep_properties(self, tmp_workspace): + """Properties from all three levels should be preserved through deep merge.""" + shared_dir = tmp_workspace / "shared" + shared_dir.mkdir() + + grandparent = { + "kind": "SiteTemplate", + "subscription": "org-sub", + "properties": { + "deployOptions": {"enableSecretSync": False}, + }, + } + (shared_dir / "grandparent.yaml").write_text(yaml.dump(grandparent)) + + parent = { + "kind": "SiteTemplate", + "inherits": "grandparent.yaml", + "properties": { + "deployOptions": {"includeSolution": True}, + }, + } + (shared_dir / "parent.yaml").write_text(yaml.dump(parent)) + + site = { + "inherits": "../shared/parent.yaml", + "name": "three-level-test", + "location": "eastus", + "properties": { + "deployOptions": {"enableOpcPlcSimulator": True}, + }, + } + (tmp_workspace / "sites" / "three-level-test.yaml").write_text(yaml.dump(site)) + + orchestrator = Orchestrator(tmp_workspace) + loaded = orchestrator.load_site("three-level-test") + + assert loaded.properties["deployOptions"]["enableSecretSync"] is False # From grandparent + assert loaded.properties["deployOptions"]["includeSolution"] is True # From parent + assert loaded.properties["deployOptions"]["enableOpcPlcSimulator"] is True # From site + class TestSiteTemplateExclusion: """Tests for SiteTemplate handling in site discovery and loading.""" diff --git a/tests/test_orchestrator_parameters.py b/tests/test_orchestrator_parameters.py index b8aeb95..6d111d5 100644 --- a/tests/test_orchestrator_parameters.py +++ b/tests/test_orchestrator_parameters.py @@ -121,6 +121,41 @@ def test_resolve_complex_output_type(self, complete_workspace): assert result == ["id-1", "id-2", "id-3"] + def test_deep_nested_output_three_plus_levels(self, complete_workspace): + """Test resolving output nested 3+ levels deep.""" + orchestrator = Orchestrator(complete_workspace) + step_outputs = { + "deploy": { + "config": { + "value": {"nested": {"deep": {"value": "found"}}}, + "type": "Object", + }, + }, + } + + value = "{{ steps.deploy.outputs.config.nested.deep.value }}" + result = orchestrator._resolve_step_outputs(value, step_outputs) + + assert result == "found" + + def test_output_with_missing_mid_path(self, complete_workspace): + """Test resolving output when an intermediate key is missing.""" + orchestrator = Orchestrator(complete_workspace) + step_outputs = { + "deploy": { + "config": { + "value": {"a": "b"}, + "type": "Object", + }, + }, + } + + value = "{{ steps.deploy.outputs.config.nonexistent.subfield }}" + result = orchestrator._resolve_step_outputs(value, step_outputs) + + # Missing path should leave the template unresolved + assert result == value + class TestConditionEvaluation: """Tests for when condition evaluation.""" @@ -2319,4 +2354,67 @@ def test_not_equals_operator_on_properties(self, tmp_workspace): assert result is True result = orchestrator._evaluate_condition("{{ site.properties.env != 'staging' }}", site) + assert result is False + + def test_enable_secret_sync_truthy_true(self, tmp_workspace): + """Test truthy evaluation of enableSecretSync set to True.""" + orchestrator = Orchestrator(tmp_workspace) + site = Site( + name="test", + subscription="sub", + resource_group="rg", + location="eastus", + properties={"deployOptions": {"enableSecretSync": True}}, + ) + + result = orchestrator._evaluate_condition( + "{{ site.properties.deployOptions.enableSecretSync }}", site + ) + assert result is True + + def test_enable_secret_sync_truthy_false(self, tmp_workspace): + """Test truthy evaluation of enableSecretSync set to False.""" + orchestrator = Orchestrator(tmp_workspace) + site = Site( + name="test", + subscription="sub", + resource_group="rg", + location="eastus", + properties={"deployOptions": {"enableSecretSync": False}}, + ) + + result = orchestrator._evaluate_condition( + "{{ site.properties.deployOptions.enableSecretSync }}", site + ) + assert result is False + + def test_missing_intermediate_property_path_returns_falsy(self, tmp_workspace): + """Test that missing intermediate key 'deployOptions' returns False.""" + orchestrator = Orchestrator(tmp_workspace) + site = Site( + name="test", + subscription="sub", + resource_group="rg", + location="eastus", + properties={}, + ) + + result = orchestrator._evaluate_condition( + "{{ site.properties.deployOptions.enableSecretSync }}", site + ) + assert result is False + + def test_string_false_treated_as_falsy(self, tmp_workspace): + """Test that string 'false' is treated as falsy in truthy context.""" + orchestrator = Orchestrator(tmp_workspace) + site = Site( + name="test", + subscription="sub", + resource_group="rg", + location="eastus", + properties={"flag": "false"}, + ) + + result = orchestrator._evaluate_condition("{{ site.properties.flag }}", site) + # The string "false" is treated as falsy (case-insensitive check) assert result is False \ No newline at end of file diff --git a/tests/test_orchestrator_validation.py b/tests/test_orchestrator_validation.py index d161fc7..13bccba 100644 --- a/tests/test_orchestrator_validation.py +++ b/tests/test_orchestrator_validation.py @@ -505,6 +505,33 @@ def test_validate_rg_missing_for_rg_scoped_step(self, tmp_workspace): # This validates the exemption logic works correctly. assert not any("missing 'resourceGroup'" in e for e in errors) + def test_validate_duplicate_step_names(self, complete_workspace): + """Test behavior when manifest has duplicate step names.""" + orchestrator = Orchestrator(complete_workspace) + + manifest_data = { + "name": "dup-steps", + "sites": ["test-site"], + "steps": [ + { + "name": "deploy-infra", + "template": "templates/test.bicep", + }, + { + "name": "deploy-infra", + "template": "templates/test.bicep", + }, + ], + } + manifest_path = complete_workspace / "manifests" / "dup.yaml" + with open(manifest_path, "w", encoding="utf-8") as f: + yaml.dump(manifest_data, f) + + errors = orchestrator.validate(manifest_path) + dup_errors = [e for e in errors if "duplicate" in e.lower()] + assert len(dup_errors) == 1 + assert "deploy-infra" in dup_errors[0] + class TestKubectlValidation: """Tests for kubectl step validation.""" diff --git a/tests/workspace/__init__.py b/tests/workspace/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/workspace/conftest.py b/tests/workspace/conftest.py new file mode 100644 index 0000000..297179c --- /dev/null +++ b/tests/workspace/conftest.py @@ -0,0 +1,27 @@ +"""Fixtures for workspace content tests. + +These tests validate the actual committed workspace content (manifests, +parameters, sites, templates) is internally consistent. They use the real +workspaces/iot-operations/ directory, not synthetic fixtures. +""" + +import pytest +from pathlib import Path + +from siteops.orchestrator import Orchestrator + + +WORKSPACE_PATH = Path(__file__).parent.parent.parent / "workspaces" / "iot-operations" + + +@pytest.fixture(scope="module") +def workspace() -> Path: + """Path to the IoT Operations workspace.""" + assert WORKSPACE_PATH.is_dir(), f"Workspace not found: {WORKSPACE_PATH}" + return WORKSPACE_PATH + + +@pytest.fixture(scope="module") +def orchestrator(workspace: Path) -> Orchestrator: + """Orchestrator configured for the real workspace.""" + return Orchestrator(workspace) diff --git a/tests/workspace/test_manifest_validation.py b/tests/workspace/test_manifest_validation.py new file mode 100644 index 0000000..8009cf6 --- /dev/null +++ b/tests/workspace/test_manifest_validation.py @@ -0,0 +1,45 @@ +"""Tests that all workspace manifests pass validation.""" + +from pathlib import Path + +from siteops.orchestrator import Orchestrator + + +class TestManifestValidation: + """Every manifest in the workspace should validate without errors.""" + + def _get_manifest_files(self, workspace: Path) -> list[Path]: + manifests_dir = workspace / "manifests" + return sorted(manifests_dir.glob("*.yaml")) + sorted(manifests_dir.glob("*.yml")) + + def test_all_manifests_discovered(self, workspace): + """Sanity check: workspace has manifests to validate.""" + manifests = self._get_manifest_files(workspace) + assert len(manifests) >= 1, "No manifests found in workspace" + + def test_aio_install_validates(self, workspace, orchestrator): + """aio-install.yaml should validate with no errors.""" + errors = orchestrator.validate(workspace / "manifests" / "aio-install.yaml") + assert errors == [], f"aio-install.yaml validation errors: {errors}" + + def test_secretsync_validates(self, workspace, orchestrator): + """secretsync.yaml should validate with no errors.""" + errors = orchestrator.validate(workspace / "manifests" / "secretsync.yaml") + assert errors == [], f"secretsync.yaml validation errors: {errors}" + + def test_opc_ua_solution_validates(self, workspace, orchestrator): + """opc-ua-solution.yaml should validate with no errors.""" + errors = orchestrator.validate(workspace / "manifests" / "opc-ua-solution.yaml") + assert errors == [], f"opc-ua-solution.yaml validation errors: {errors}" + + def test_no_duplicate_step_names_in_any_manifest(self, workspace, orchestrator): + """No manifest should have duplicate step names.""" + from siteops.models import Manifest + + for manifest_path in self._get_manifest_files(workspace): + manifest = Manifest.from_file(manifest_path) + step_names = [s.name for s in manifest.steps] + duplicates = [n for n in step_names if step_names.count(n) > 1] + assert duplicates == [], ( + f"{manifest_path.name} has duplicate step names: {set(duplicates)}" + ) diff --git a/tests/workspace/test_parameter_chaining.py b/tests/workspace/test_parameter_chaining.py new file mode 100644 index 0000000..cfa4902 --- /dev/null +++ b/tests/workspace/test_parameter_chaining.py @@ -0,0 +1,154 @@ +"""Tests that parameter chaining files reference valid step outputs.""" + +import re +from pathlib import Path + +import yaml + + +# Pattern to extract step references: {{ steps..outputs. }} +STEP_OUTPUT_PATTERN = re.compile(r"\{\{\s*steps\.([^.]+)\.outputs\.(\S+?)\s*\}\}") + + +class TestParameterChaining: + """Chaining parameter files should reference steps and outputs that exist.""" + + def _get_chaining_refs(self, param_file: Path) -> list[tuple[str, str, str]]: + """Extract (step_name, output_path, raw_template) from a parameter file.""" + with open(param_file, "r", encoding="utf-8") as f: + content = f.read() + + refs = [] + for match in STEP_OUTPUT_PATTERN.finditer(content): + step_name = match.group(1) + output_path = match.group(2) + refs.append((step_name, output_path, match.group(0))) + return refs + + def _get_manifest_step_names(self, manifest_path: Path) -> set[str]: + """Get all step names from a manifest.""" + from siteops.models import Manifest + manifest = Manifest.from_file(manifest_path) + return {s.name for s in manifest.steps} + + def test_secretsync_chaining_refs_valid_steps(self, workspace): + """secretsync-chaining.yaml should only reference steps that exist in manifests.""" + chaining_file = workspace / "parameters" / "secretsync-chaining.yaml" + refs = self._get_chaining_refs(chaining_file) + assert len(refs) > 0, "No step output references found in secretsync-chaining.yaml" + + # Get step names from both manifests that use this chaining file + aio_steps = self._get_manifest_step_names(workspace / "manifests" / "aio-install.yaml") + secretsync_steps = self._get_manifest_step_names(workspace / "manifests" / "secretsync.yaml") + all_valid_steps = aio_steps | secretsync_steps + + for step_name, output_path, raw in refs: + assert step_name in all_valid_steps, ( + f"secretsync-chaining.yaml references unknown step '{step_name}': {raw}" + ) + + def test_secretsync_chaining_refs_valid_outputs(self, workspace): + """Every output referenced in secretsync-chaining.yaml should exist in resolve-aio.bicep.""" + chaining_file = workspace / "parameters" / "secretsync-chaining.yaml" + refs = self._get_chaining_refs(chaining_file) + + # Parse output names from resolve-aio.bicep + resolve_aio = workspace / "templates" / "iot-ops" / "common" / "resolve-aio.bicep" + bicep_content = resolve_aio.read_text(encoding="utf-8") + output_names = set(re.findall(r"^output\s+(\w+)\s+", bicep_content, re.MULTILINE)) + assert len(output_names) > 0, "No outputs found in resolve-aio.bicep" + + for step_name, output_path, raw in refs: + if step_name != "resolve-aio": + continue + # The top-level output name is the first segment of the path + top_level_output = output_path.split(".")[0] + assert top_level_output in output_names, ( + f"secretsync-chaining.yaml references unknown output " + f"'{top_level_output}' from resolve-aio: {raw}\n" + f"Available outputs: {sorted(output_names)}" + ) + + def test_chaining_yaml_refs_in_aio_install(self, workspace): + """chaining.yaml should only reference steps that exist in aio-install.yaml.""" + chaining_file = workspace / "parameters" / "chaining.yaml" + refs = self._get_chaining_refs(chaining_file) + + if not refs: + return + + aio_steps = self._get_manifest_step_names(workspace / "manifests" / "aio-install.yaml") + + for step_name, output_path, raw in refs: + assert step_name in aio_steps, ( + f"chaining.yaml references unknown step '{step_name}': {raw}" + ) + + def test_post_instance_yaml_refs_in_aio_install(self, workspace): + """post-instance.yaml should only reference steps that exist in aio-install.yaml.""" + chaining_file = workspace / "parameters" / "post-instance.yaml" + refs = self._get_chaining_refs(chaining_file) + + if not refs: + return + + aio_steps = self._get_manifest_step_names(workspace / "manifests" / "aio-install.yaml") + + for step_name, output_path, raw in refs: + assert step_name in aio_steps, ( + f"post-instance.yaml references unknown step '{step_name}': {raw}" + ) + + +class TestConditionalStepCoverage: + """Every when: condition should reference a property that exists in base-site.yaml.""" + + def _get_conditions_from_manifest(self, manifest_path: Path) -> list[tuple[str, str]]: + """Extract (step_name, condition) pairs from a manifest.""" + from siteops.models import Manifest + manifest = Manifest.from_file(manifest_path) + conditions = [] + for step in manifest.steps: + if step.when: + conditions.append((step.name, step.when)) + return conditions + + def _get_base_site_property_paths(self, workspace: Path) -> set[str]: + """Get all dot-separated property paths defined in base-site.yaml.""" + base_path = workspace / "sites" / "base-site.yaml" + with open(base_path, "r", encoding="utf-8") as f: + data = yaml.safe_load(f) + + paths = set() + properties = data.get("properties", {}) + + def collect_paths(d: dict, prefix: str = ""): + for k, v in d.items(): + full = f"{prefix}.{k}" if prefix else k + paths.add(full) + if isinstance(v, dict): + collect_paths(v, full) + + collect_paths(properties) + return paths + + def test_all_when_conditions_reference_known_properties(self, workspace): + """Every when: condition property path should exist in base-site.yaml.""" + known_paths = self._get_base_site_property_paths(workspace) + prop_pattern = re.compile(r"site\.properties\.([\w.]+)") + + manifests_dir = workspace / "manifests" + for manifest_file in sorted(manifests_dir.glob("*.yaml")): + conditions = self._get_conditions_from_manifest(manifest_file) + + for step_name, condition in conditions: + match = prop_pattern.search(condition) + if not match: + continue + + prop_path = match.group(1) + assert prop_path in known_paths, ( + f"{manifest_file.name} step '{step_name}' references unknown property " + f"'site.properties.{prop_path}' in when condition.\n" + f"Known property paths: {sorted(known_paths)}" + ) diff --git a/tests/workspace/test_site_configuration.py b/tests/workspace/test_site_configuration.py new file mode 100644 index 0000000..28da2ba --- /dev/null +++ b/tests/workspace/test_site_configuration.py @@ -0,0 +1,93 @@ +"""Tests that site inheritance resolves correctly and consistently.""" + +from pathlib import Path + +import yaml + +from siteops.orchestrator import Orchestrator + + +# All deployOptions defined in base-site.yaml +EXPECTED_DEPLOY_OPTIONS = { + "includeGlobalSite", + "includeEdgeSite", + "includeSolution", + "includeOpcPlcSimulator", + "enableSecretSync", +} + + +class TestSiteInheritanceResolution: + """Every site should load cleanly with complete inherited configuration.""" + + def _get_site_names(self, workspace: Path) -> list[str]: + """Get all Site (not SiteTemplate) names from the workspace.""" + sites_dir = workspace / "sites" + names = [] + for f in sorted(sites_dir.glob("*.yaml")): + with open(f, "r", encoding="utf-8") as fh: + data = yaml.safe_load(fh) + if data and data.get("kind") != "SiteTemplate": + names.append(data.get("name", f.stem)) + return names + + def test_all_sites_load(self, workspace, orchestrator): + """Every Site file should load without errors.""" + site_names = self._get_site_names(workspace) + assert len(site_names) >= 1, "No sites found" + + for name in site_names: + site = orchestrator.load_site(name) + assert site.name == name + assert site.subscription, f"{name}: missing subscription" + assert site.location, f"{name}: missing location" + + def test_all_sites_have_complete_deploy_options(self, workspace, orchestrator): + """Every site should inherit all deployOptions from base-site.yaml.""" + site_names = self._get_site_names(workspace) + + for name in site_names: + site = orchestrator.load_site(name) + deploy_options = site.properties.get("deployOptions", {}) + actual_keys = set(deploy_options.keys()) + missing = EXPECTED_DEPLOY_OPTIONS - actual_keys + assert missing == set(), ( + f"{name}: missing deployOptions keys after inheritance: {missing}" + ) + + def test_base_site_defines_all_deploy_options(self, workspace): + """base-site.yaml should define every expected deployOptions key.""" + base_path = workspace / "sites" / "base-site.yaml" + with open(base_path, "r", encoding="utf-8") as f: + data = yaml.safe_load(f) + + deploy_options = data.get("properties", {}).get("deployOptions", {}) + actual_keys = set(deploy_options.keys()) + missing = EXPECTED_DEPLOY_OPTIONS - actual_keys + assert missing == set(), ( + f"base-site.yaml missing deployOptions keys: {missing}" + ) + + def test_shared_templates_inherit_base(self, workspace): + """All shared SiteTemplates should inherit from base-site.yaml.""" + shared_dir = workspace / "sites" / "shared" + if not shared_dir.is_dir(): + return + + for f in sorted(shared_dir.glob("*.yaml")): + with open(f, "r", encoding="utf-8") as fh: + data = yaml.safe_load(fh) + inherits = data.get("inherits", "") + assert "base-site" in inherits, ( + f"shared/{f.name} does not inherit from base-site.yaml: inherits={inherits}" + ) + + def test_no_site_has_placeholder_subscription(self, workspace, orchestrator): + """Sites should not have obviously placeholder subscription IDs.""" + site_names = self._get_site_names(workspace) + + for name in site_names: + site = orchestrator.load_site(name) + assert site.subscription != "", f"{name}: empty subscription" + # Allow the 00000000 placeholder since committed sites use it + # (real values come from sites.local/ overlays)