diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index 6bbac0058589..d3df7ec0f237 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -212,6 +212,8 @@ if [ "$SUDO" ] && [ "$SUDO_USER" ]; then SUDO="$SUDO -u $SUDO_USER"; fi RELENV_TAR="{THIN_DIR}/salt-relenv.tar.xz" +EXT_MODS_TAR="{THIN_DIR}/salt-ext_mods.tgz" +EXT_MODS_VERSION="{EXT_MODS_VERSION}" mkdir -p "{THIN_DIR}" SALT_CALL_BIN="{THIN_DIR}/salt-call" @@ -233,10 +235,40 @@ exit 1 fi +# Handle extension modules with version checking (similar to thin) +if [ -n "$EXT_MODS_VERSION" ]; then + # Check if we already have the correct version + EXT_VERSION_FILE="{THIN_DIR}/ext_version" + CURRENT_VERSION=$(cat "$EXT_VERSION_FILE" 2>/dev/null || echo "") + + if [ "$CURRENT_VERSION" != "$EXT_MODS_VERSION" ]; then + # Version mismatch or no version file - need fresh ext_mods + if [ -f "$EXT_MODS_TAR" ]; then + # Extract the tarball + EXTMODS_DIR="{THIN_DIR}/running_data/var/cache/salt/minion/extmods" + mkdir -p "$EXTMODS_DIR" + tar -xzf "$EXT_MODS_TAR" -C "$EXTMODS_DIR" + rm -f "$EXT_MODS_TAR" + # Version file should be in the tarball, move it to thin_dir + if [ -f "$EXTMODS_DIR/ext_version" ]; then + mv "$EXTMODS_DIR/ext_version" "{THIN_DIR}/ext_version" + fi + else + # No tarball present - request from master + echo "{RSTR}" + echo ext_mods + exit 13 + fi + fi +fi + echo "{RSTR}" echo "{RSTR}" >&2 -exec $SUDO "$SALT_CALL_BIN" --retcode-passthrough --local --metadata --out=json -lquiet -c "{THIN_DIR}" {ARGS} +# Debug: Show the actual command being executed +echo "SALT_CALL_CMD: $SALT_CALL_BIN --retcode-passthrough --local --metadata --out=json -lquiet -c {THIN_DIR} -- {ARGS}" >&2 + +exec $SUDO "$SALT_CALL_BIN" --retcode-passthrough --local --metadata --out=json -lquiet -c "{THIN_DIR}" -- {ARGS} EOF """.split( "\n" @@ -376,6 +408,7 @@ def __init__(self, opts): saltext_allowlist=self.opts.get("thin_saltext_allowlist"), saltext_blocklist=self.opts.get("thin_saltext_blocklist"), ) + self.mods = mod_data(self.fsclient) # __setstate__ and __getstate__ are only used on spawning platforms. @@ -1064,6 +1097,13 @@ def __init__( :6 ], ) + # Differentiate between thin and relenv deployments to avoid contamination + if self.opts.get("relenv"): + self.thin_dir = self.thin_dir.replace("_salt", "_salt_relenv") + log.info( + "RELENV: Configured thin_dir=%s for relenv deployment", + self.thin_dir, + ) self.opts["thin_dir"] = self.thin_dir self.fsclient = fsclient self.context = {"master_opts": self.opts, "fileclient": self.fsclient} @@ -1130,10 +1170,59 @@ def __init__( self.arch = arch.strip() if self.opts.get("relenv"): - kernel, os_arch = self.detect_os_arch() + # Check if OS/arch already detected and cached in opts + if "relenv_kernel" in opts and "relenv_os_arch" in opts: + kernel = opts["relenv_kernel"] + os_arch = opts["relenv_os_arch"] + log.warning(f"RELENV: Reusing cached OS/arch: {kernel}/{os_arch}") + else: + # First Single instance - detect and cache OS/arch in opts before assigning to self.opts + kernel, os_arch = self.detect_os_arch() + opts["relenv_kernel"] = kernel + opts["relenv_os_arch"] = os_arch + log.warning(f"RELENV: Detected and cached OS/arch: {kernel}/{os_arch}") + + log.info( + "RELENV: About to call gen_relenv() to download/generate tarball..." + ) self.thin = salt.utils.relenv.gen_relenv( opts["cachedir"], kernel=kernel, os_arch=os_arch ) + log.info( + "RELENV: gen_relenv() completed successfully, tarball path: %s", + self.thin, + ) + + # Add file_roots and related config to minion config + # (required for slsutil functions and other fileserver operations) + # Thin does this in _run_wfunc_thin() at lines 1498-1507 + # NOTE: Now that we transfer config via SCP instead of embedding in command line, + # we CAN add __master_opts__ without hitting ARG_MAX limits + self.minion_opts["file_roots"] = self.opts["file_roots"] + self.minion_opts["pillar_roots"] = self.opts["pillar_roots"] + self.minion_opts["ext_pillar"] = self.opts["ext_pillar"] + # For relenv, we need to override extension_modules to point to where the shim + # extracts the tarball on the remote system. The wrapper system will copy this + # to opts_pkg["extension_modules"] which is used by salt-call. + self.minion_opts["extension_modules"] = ( + f"{self.thin_dir}/running_data/var/cache/salt/minion/extmods" + ) + self.minion_opts["module_dirs"] = self.opts["module_dirs"] + self.minion_opts["__master_opts__"] = self.context["master_opts"] + + # Re-serialize the minion config after updating relenv-specific paths + # This ensures the config file sent to the remote system has the correct extension_modules path + self.minion_config = salt.serializers.yaml.serialize(self.minion_opts) + log.debug( + "RELENV: Re-serialized minion config with extension_modules=%s", + self.minion_opts["extension_modules"], + ) + + # NOTE: We no longer pre-compile pillar for relenv here. + # Both thin and relenv now use the wrapper system (_run_wfunc_thin()) + # which compiles pillar dynamically, ensuring correct behavior with pillar overrides: + # - 1x compilation without pillar overrides + # - 2x compilation with pillar overrides (re-compiled in wrapper modules) else: self.thin = thin if thin else salt.utils.thin.thin_path(opts["cachedir"]) @@ -1351,6 +1440,20 @@ def run_wfunc(self): """ Execute a wrapper function + Both thin and relenv use the wrapper system (FunctionWrapper). + The wrapper system handles pillar compilation correctly: + - 1x compilation without pillar overrides + - 2x compilation with pillar overrides (re-compiled in wrapper modules) + + Returns tuple of (json_data, '') + """ + return self._run_wfunc_thin() + + def _run_wfunc_thin(self): + """ + Execute a wrapper function using the thin/wrapper architecture. + This is the original implementation for thin deployments. + Returns tuple of (json_data, '') """ # Ensure that opts/grains are up to date @@ -1393,7 +1496,19 @@ def run_wfunc(self): opts_pkg["file_roots"] = self.opts["file_roots"] opts_pkg["pillar_roots"] = self.opts["pillar_roots"] opts_pkg["ext_pillar"] = self.opts["ext_pillar"] - opts_pkg["extension_modules"] = self.opts["extension_modules"] + # For SSH, don't override extension_modules if it's already set correctly in minion_opts + # (pointing to the remote system's cache, not the master's cache) + if ( + "extension_modules" not in opts_pkg + or opts_pkg["extension_modules"] == self.opts["extension_modules"] + ): + # Only override if it's still using the master's path or not set + if "extension_modules" in self.minion_opts: + opts_pkg["extension_modules"] = self.minion_opts[ + "extension_modules" + ] + else: + opts_pkg["extension_modules"] = self.opts["extension_modules"] opts_pkg["module_dirs"] = self.opts["module_dirs"] opts_pkg["_ssh_version"] = self.opts["_ssh_version"] opts_pkg["thin_dir"] = self.opts["thin_dir"] @@ -1463,6 +1578,14 @@ def run_wfunc(self): # above always evaluates to True. TODO: cleanup? opts["ssh_wipe"] = self.opts.get("ssh_wipe", False) + # Propagate relenv settings to nested Single instances (wrapper-initiated calls) + # This ensures nested calls use relenv code path instead of falling back to thin + if self.opts.get("relenv"): + opts["relenv"] = True + if "relenv_kernel" in self.opts and "relenv_os_arch" in self.opts: + opts["relenv_kernel"] = self.opts["relenv_kernel"] + opts["relenv_os_arch"] = self.opts["relenv_os_arch"] + wrapper = salt.client.ssh.wrapper.FunctionWrapper( opts, self.id, @@ -1555,6 +1678,136 @@ def run_wfunc(self): ret = salt.utils.json.dumps({"local": {"return": result}}) return ret, retcode + def _run_wfunc_relenv(self): + """ + Execute a function using salt-call from relenv deployment. + Bypasses the wrapper system entirely since relenv includes a full salt-call binary. + + Returns tuple of (json_data, retcode) + """ + log.info( + "RELENV WFUNC: Starting execution - fun=%s, thin_dir=%s", + self.fun, + self.thin_dir, + ) + + # Build salt-call command - relenv has full salt-call binary + salt_call = f"{self.thin_dir}/salt-call" + args_str = self._build_salt_call_args() + + log.info("RELENV WFUNC: Built args string: %s", args_str) + + # Determine output level + log_level = self.opts.get("log_level", "error") + + # Config directory for relenv (where minion config with file_roots/pillar is located) + config_dir = f"{self.thin_dir}/conf" + + # Build full command with config-dir so salt-call can find the minion config + cmd = f"{salt_call} --local --config-dir={config_dir} {self.fun} {args_str} --out=json --log-level={log_level}" + + log.info("RELENV WFUNC: About to execute command: %s", cmd) + + # Execute via shell + log.info("RELENV WFUNC: Calling self.shell.exec_cmd()...") + stdout, stderr, retcode = self.shell.exec_cmd(cmd) + log.info( + "RELENV WFUNC: exec_cmd() returned - retcode=%s, stdout_len=%d, stderr_len=%d", + retcode, + len(stdout) if stdout else 0, + len(stderr) if stderr else 0, + ) + + log.trace("RELENV WFUNC STDOUT: %s", stdout) + log.trace("RELENV WFUNC STDERR: %s", stderr) + log.debug("RELENV WFUNC RETCODE: %s", retcode) + + # Parse JSON output (same format as wrappers) + log.info("RELENV WFUNC: Calling _parse_salt_call_output()...") + result = self._parse_salt_call_output(stdout, stderr, retcode) + log.info("RELENV WFUNC: Returning result") + return result + + def _build_salt_call_args(self): + """ + Convert self.args and self.kwargs to salt-call command line format. + + Examples: + - args=['foo', 'bar'] -> "foo bar" + - kwargs={'name': 'test', 'value': 123} -> "name=test value=123" + """ + import shlex + + args_list = [] + + # Positional arguments - properly quote and escape + for arg in self.args: + if isinstance(arg, (dict, list)): + # Complex types need JSON encoding + args_list.append(shlex.quote(salt.utils.json.dumps(arg))) + elif isinstance(arg, str): + # Simple strings just need quoting + args_list.append(shlex.quote(arg)) + else: + # Numbers, booleans, etc. + args_list.append(shlex.quote(str(arg))) + + # Keyword arguments - salt-call expects key=value format + for key, value in self.kwargs.items(): + if isinstance(value, (dict, list)): + # Complex types need JSON encoding + args_list.append(f"{key}={shlex.quote(salt.utils.json.dumps(value))}") + elif isinstance(value, str): + args_list.append(f"{key}={shlex.quote(value)}") + else: + args_list.append(f"{key}={shlex.quote(str(value))}") + + return " ".join(args_list) + + def _parse_salt_call_output(self, stdout, stderr, retcode): + """ + Parse JSON output from salt-call --local --out=json. + + Salt-call outputs: {"local": } + This matches the format expected by the wrapper system. + + Returns tuple of (json_data, retcode) + """ + try: + # Try to parse JSON output + result = salt.utils.json.loads(stdout) + + # salt-call --local outputs: {"local": } + # This is already in the correct format + if isinstance(result, dict) and "local" in result: + # Return retcode=0 for successful execution + # The retcode from salt-call is not what we want - that's the shell exit code + # We want to indicate success (0) when we successfully parsed the output + return salt.utils.json.dumps(result), 0 + else: + # Unexpected format, wrap it + return salt.utils.json.dumps({"local": {"return": result}}), 0 + + except (ValueError, TypeError) as exc: + # JSON parsing failed - likely an error occurred + log.error( + "RELENV: Failed to parse salt-call output as JSON: %s\nSTDOUT: %s\nSTDERR: %s", + exc, + stdout, + stderr, + ) + + # Return error in the expected format with non-zero retcode + error_result = { + "local": { + "error": "Failed to parse salt-call output", + "stdout": stdout, + "stderr": stderr, + "exception": str(exc), + } + } + return salt.utils.json.dumps(error_result), retcode if retcode != 0 else 1 + def _cmd_str(self): """ Prepare the command string @@ -1579,6 +1832,31 @@ def _cmd_str(self): debug = "1" if self.opts.get("relenv"): + # Properly quote arguments for shell execution + import shlex + + # If argv is a list with a single string element (common with wrappers), + # split it into proper arguments + if ( + len(self.argv) == 1 + and isinstance(self.argv[0], str) + and " " in self.argv[0] + ): + # Split the string into shell words + argv_to_use = shlex.split(self.argv[0]) + else: + argv_to_use = self.argv + + quoted_args = " ".join(shlex.quote(str(arg)) for arg in argv_to_use) + log.debug( + "RELENV: Building shim with argv=%s, argv_to_use=%s, quoted_args=%s", + self.argv, + argv_to_use, + quoted_args, + ) + + # Note: Config is sent separately via SCP in cmd_block() to avoid ARG_MAX issues + # The shim expects the config file to already exist at {THIN_DIR}/minion return SSH_SH_SHIM_RELENV.format( DEBUG=debug, SUDO=sudo, @@ -1586,7 +1864,8 @@ def _cmd_str(self): THIN_DIR=self.thin_dir, SET_PATH=self.set_path, RSTR=RSTR, - ARGS=" ".join(self.argv), + ARGS=quoted_args, + EXT_MODS_VERSION=self.mods.get("version", ""), ) thin_code_digest, thin_sum = salt.utils.thin.thin_sum(cachedir, "sha1") @@ -1668,6 +1947,21 @@ def shim_cmd(self, cmd_str, extension="py"): execute it there """ if not self.tty and not self.winrm: + # Debug: Log command string size to diagnose ARG_MAX issues + cmd_size = len(cmd_str) + if cmd_size > 100000: # Log if > 100KB + log.warning( + "RELENV: Large shim command detected: %d bytes (ARG_MAX is typically ~2MB). " + "This may cause 'Argument list too long' errors.", + cmd_size, + ) + # Log first 500 and last 500 chars to see what's in it + log.debug( + "RELENV: Command preview - first 500 chars: %s", cmd_str[:500] + ) + log.debug( + "RELENV: Command preview - last 500 chars: %s", cmd_str[-500:] + ) return self.shell.exec_cmd(cmd_str) # Write the shim to a temporary file in the default temp directory @@ -1707,11 +2001,90 @@ def cmd_block(self, is_retry=False): 5. split SHIM results from command results 6. return command results """ + # For both thin and relenv, use the shim system + # The shim handles extraction and execution + # For relenv, the shim (SSH_SH_SHIM_RELENV) calls salt-call directly self.argv = _convert_args(self.argv) log.debug( "Performing shimmed, blocking command as follows:\n%s", " ".join([str(arg) for arg in self.argv]), ) + + # For relenv, send minion config via SCP to avoid ARG_MAX issues + # The config file is expected to be at {THIN_DIR}/minion by the shim + if self.opts.get("relenv"): + remote_config_path = f"{self.thin_dir}/minion" + + # Check if config file already exists on remote (for nested/wrapper calls) + # This avoids ARG_MAX issues when wrappers create nested Single instances + check_cmd = f"test -f {remote_config_path} && echo exists || echo missing" + check_result = self.shell.exec_cmd(check_cmd) + + config_exists = ( + check_result[0].strip() == "exists" if check_result[2] == 0 else False + ) + + if config_exists: + log.debug( + "RELENV: Config file already exists at %s, skipping transfer (nested/wrapper call)", + remote_config_path, + ) + else: + # Write minion config to a temporary file + with tempfile.NamedTemporaryFile( + mode="w", delete=False, suffix=".conf" + ) as config_tmp_file: + config_tmp_file.write(self.minion_config) + local_config_path = config_tmp_file.name + + try: + # SCP the config file to the target + # makedirs=True ensures the thin_dir exists + log.debug( + "RELENV: Sending minion config to %s:%s", + self.target["host"], + remote_config_path, + ) + send_result = self.shell.send( + local_config_path, remote_config_path, makedirs=True + ) + + # Check if send failed + if send_result and send_result[2] != 0: + log.error( + "RELENV: Failed to send minion config - stdout: %s, stderr: %s, retcode: %s", + send_result[0], + send_result[1], + send_result[2], + ) + return ( + f"ERROR: Failed to transfer minion config (retcode {send_result[2]}): {send_result[0] or send_result[1]}", + send_result[1], + send_result[2], + ) + + log.debug("RELENV: Successfully sent minion config") + finally: + # Clean up temporary file + try: + os.unlink(local_config_path) + except OSError as e: + log.warning( + "RELENV: Failed to delete temporary config file %s: %s", + local_config_path, + e, + ) + + # Regenerate extension modules tarball with fresh fileserver scan + # This ensures that any dynamically-added modules (like test fixtures) + # are included and the version hash is up-to-date + log.debug("Regenerating extension modules tarball before command execution") + self.mods = mod_data(self.fsclient) + + # Deploy the fresh tarball to the remote system + log.debug("Deploying extension modules tarball to remote system") + self.deploy_ext() + cmd_str = self._cmd_str() stdout, stderr, retcode = self.shim_cmd(cmd_str) @@ -1800,6 +2173,10 @@ def cmd_block(self, is_retry=False): while re.search(RSTR_RE, stderr): stderr = re.split(RSTR_RE, stderr, 1)[1].strip() elif "ext_mods" == shim_command: + # Regenerate extension modules tarball with fresh fileserver scan + # This ensures dynamically-added modules are included + log.info("ext_mods requested - regenerating extension modules tarball") + self.mods = mod_data(self.fsclient) self.deploy_ext() stdout, stderr, retcode = self.shim_cmd(cmd_str) if not re.search(RSTR_RE, stdout) or not re.search(RSTR_RE, stderr): @@ -1966,6 +2343,8 @@ def mod_data(fsclient): "renderers", "returners", "utils", + "wrapper", + "tops", ] ret = {} with fsclient: @@ -1991,6 +2370,28 @@ def mod_data(fsclient): ret[ref].update(mods_data) else: ret[ref] = mods_data + + # Also check extension_modules directory for wrapper and tops modules + # These are used by tests and custom deployments + ext_mods_dir = fsclient.opts.get("extension_modules") + if ext_mods_dir and os.path.isdir(ext_mods_dir): + for ref in sync_refs: + ref_dir = os.path.join(ext_mods_dir, ref) + if os.path.isdir(ref_dir): + mods_data = {} + for fn_ in os.listdir(ref_dir): + if fn_.endswith((".py", ".so", ".pyx")): + mod_path = os.path.join(ref_dir, fn_) + if os.path.isfile(mod_path): + mods_data[fn_] = mod_path + chunk = salt.utils.hashutils.get_hash(mod_path) + ver_base += chunk + if mods_data: + if ref in ret: + ret[ref].update(mods_data) + else: + ret[ref] = mods_data + if not ret: return {} @@ -1999,8 +2400,22 @@ def mod_data(fsclient): ver = hashlib.sha1(ver_base).hexdigest() ext_tar_path = os.path.join(fsclient.opts["cachedir"], f"ext_mods.{ver}.tgz") mods = {"version": ver, "file": ext_tar_path} + + # Debug logging to track extension modules + states_found = ret.get("states", {}) + log.debug( + "EXTMODS DEBUG: Found %d state modules: %s", + len(states_found), + list(states_found.keys()), + ) + log.debug("EXTMODS DEBUG: Version hash: %s", ver) + log.debug("EXTMODS DEBUG: Tarball path: %s", ext_tar_path) + log.debug("EXTMODS DEBUG: Tarball exists: %s", os.path.isfile(ext_tar_path)) + if os.path.isfile(ext_tar_path): + log.debug("EXTMODS DEBUG: Using cached tarball") return mods + log.debug("EXTMODS DEBUG: Creating new tarball") tfp = tarfile.open(ext_tar_path, "w:gz") verfile = os.path.join(fsclient.opts["cachedir"], "ext_mods.ver") with salt.utils.files.fopen(verfile, "w+") as fp_: diff --git a/salt/states/grains.py b/salt/states/grains.py index 2dd675255c7f..2f3375b8338f 100644 --- a/salt/states/grains.py +++ b/salt/states/grains.py @@ -178,6 +178,18 @@ def list_present(name, value, delimiter=DEFAULT_TARGET_DELIM): name = re.sub(delimiter, DEFAULT_TARGET_DELIM, name) ret = {"name": name, "changes": {}, "result": True, "comment": ""} grain = __salt__["grains.get"](name) + + # Check pending_grains first to avoid duplicates within the same state run + pending_grains = __context__.get("pending_grains", {}) + if name in pending_grains: + # Combine current grain with pending grains for duplicate checking + if grain and isinstance(grain, list): + combined_grain = set(grain) | pending_grains[name] + else: + combined_grain = pending_grains[name] + else: + combined_grain = set(grain) if grain and isinstance(grain, list) else set() + if grain: # check whether grain is a list if not isinstance(grain, list): @@ -185,23 +197,25 @@ def list_present(name, value, delimiter=DEFAULT_TARGET_DELIM): ret["comment"] = f"Grain {name} is not a valid list" return ret if isinstance(value, list): - if make_hashable(value).issubset( - make_hashable(__salt__["grains.get"](name)) - ): - ret["comment"] = f"Value {value} is already in grain {name}" - return ret - elif name in __context__.get("pending_grains", {}): - # elements common to both - intersection = set(value).intersection( - __context__.get("pending_grains", {})[name] + # Check against combined grain (actual + pending) to avoid duplicates + if make_hashable(value).issubset(make_hashable(list(combined_grain))): + ret["comment"] = ( + f"Value {value} is already in grain {name} (or pending)" ) + return ret + # Check for intersection with pending grains + if name in pending_grains: + intersection = set(value).intersection(pending_grains[name]) if intersection: - value = list( - set(value).difference(__context__["pending_grains"][name]) - ) + value = list(set(value).difference(pending_grains[name])) + if not value: + ret["comment"] = ( + f'All values already pending in grain "{name}".\n' + ) + return ret ret["comment"] = ( 'Removed value {} from update due to context found in "{}".\n'.format( - value, name + intersection, name ) ) if "pending_grains" not in __context__: @@ -210,9 +224,18 @@ def list_present(name, value, delimiter=DEFAULT_TARGET_DELIM): __context__["pending_grains"][name] = set() __context__["pending_grains"][name].update(value) else: - if value in grain: - ret["comment"] = f"Value {value} is already in grain {name}" + # For single value, check against combined grain + if value in combined_grain: + ret["comment"] = ( + f"Value {value} is already in grain {name} (or pending)" + ) return ret + # Add single value to pending_grains to avoid duplicates + if "pending_grains" not in __context__: + __context__["pending_grains"] = {} + if name not in __context__["pending_grains"]: + __context__["pending_grains"][name] = set() + __context__["pending_grains"][name].add(value) if __opts__["test"]: ret["result"] = None ret["comment"] = "Value {1} is set to be appended to grain {0}".format( @@ -221,6 +244,39 @@ def list_present(name, value, delimiter=DEFAULT_TARGET_DELIM): ret["changes"] = {"new": grain} return ret + # Handle case where grain doesn't exist yet + if not grain: + # Check if values are already pending + if name in pending_grains: + if isinstance(value, list): + if make_hashable(value).issubset( + make_hashable(list(pending_grains[name])) + ): + ret["comment"] = f"Value {value} is already pending in grain {name}" + return ret + # Remove already pending values + intersection = set(value).intersection(pending_grains[name]) + if intersection: + value = list(set(value).difference(pending_grains[name])) + if not value: + ret["comment"] = ( + f'All values already pending in grain "{name}".\n' + ) + return ret + else: + if value in pending_grains[name]: + ret["comment"] = f"Value {value} is already pending in grain {name}" + return ret + # Initialize pending_grains if needed + if "pending_grains" not in __context__: + __context__["pending_grains"] = {} + if name not in __context__["pending_grains"]: + __context__["pending_grains"][name] = set() + if isinstance(value, list): + __context__["pending_grains"][name].update(value) + else: + __context__["pending_grains"][name].add(value) + if __opts__["test"]: ret["result"] = None ret["comment"] = f"Grain {name} is set to be added" diff --git a/salt/utils/extmods.py b/salt/utils/extmods.py index f1b8a8264483..98658b07ac9f 100644 --- a/salt/utils/extmods.py +++ b/salt/utils/extmods.py @@ -153,4 +153,5 @@ def sync( shutil.rmtree(emptydir, ignore_errors=True) except Exception as exc: # pylint: disable=broad-except log.error("Failed to sync %s module: %s", form, exc) + return ret, touched diff --git a/salt/utils/relenv.py b/salt/utils/relenv.py index 01bcf8efbad1..16550b274fb0 100644 --- a/salt/utils/relenv.py +++ b/salt/utils/relenv.py @@ -31,13 +31,27 @@ def gen_relenv( if not os.path.isdir(relenv_dir): os.makedirs(relenv_dir) - relenv_url = get_tarball(kernel, os_arch) tarball_path = os.path.join(relenv_dir, "salt-relenv.tar.xz") # Download the tarball if it doesn't exist or overwrite is True if overwrite or not os.path.exists(tarball_path): - if not download(cachedir, relenv_url, tarball_path): - return False + # Check for shared test cache first (for integration tests) + import shutil + import tempfile + + shared_cache = os.path.join(tempfile.gettempdir(), "salt_ssh_test_relenv_cache") + shared_tarball = os.path.join( + shared_cache, "relenv", kernel, os_arch, "salt-relenv.tar.xz" + ) + + if os.path.exists(shared_tarball): + log.info("Copying tarball from shared test cache: %s", shared_tarball) + shutil.copy(shared_tarball, tarball_path) + else: + # Download from repository + relenv_url = get_tarball(kernel, os_arch) + if not download(cachedir, relenv_url, tarball_path): + return False return tarball_path diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py index ce28663045c2..ae62ae4b870c 100644 --- a/tests/pytests/integration/netapi/test_ssh_client.py +++ b/tests/pytests/integration/netapi/test_ssh_client.py @@ -165,29 +165,25 @@ def test_shell_inject_ssh_priv( """ # ZDI-CAN-11143 path = tmp_path / "test-11143" - tgts = ["packages.broadcom.com", "www.zerodayinitiative.com"] - ret = None - for tgt in tgts: - low = { - "roster": "cache", - "client": "ssh", - "tgt": tgt, - "ssh_priv": f"aaa|id>{path} #", - "fun": "test.ping", - "eauth": "auto", - "username": salt_auto_account.username, - "password": salt_auto_account.password, - "roster_file": str(salt_ssh_roster_file), - "rosters": [rosters_dir], - } - ret = client.run(low) - if ret: - break + low = { + "roster": "cache", + "client": "ssh", + "tgt": "127.0.0.1", + "ssh_priv": f"aaa|id>{path} #", + "fun": "test.ping", + "eauth": "auto", + "username": salt_auto_account.username, + "password": salt_auto_account.password, + "roster_file": str(salt_ssh_roster_file), + "rosters": "/", + "ignore_host_keys": True, + } + ret = client.run(low) assert path.exists() is False assert ret - assert not ret[tgt]["stdout"] - assert ret[tgt]["stderr"] + assert not ret["127.0.0.1"]["stdout"] + assert ret["127.0.0.1"]["stderr"] def test_shell_inject_tgt(client, salt_ssh_roster_file, tmp_path, salt_auto_account): diff --git a/tests/pytests/integration/ssh/conftest.py b/tests/pytests/integration/ssh/conftest.py index 0aa801ef36c3..1521a33f831f 100644 --- a/tests/pytests/integration/ssh/conftest.py +++ b/tests/pytests/integration/ssh/conftest.py @@ -1,8 +1,148 @@ +import logging +import os +import platform + import pytest from tests.support.helpers import system_python_version from tests.support.pytest.helpers import reap_stray_processes +log = logging.getLogger(__name__) + + +@pytest.fixture(scope="session", autouse=True) +def relenv_tarball_cached(tmp_path_factory): + """ + Pre-cache the relenv tarball once for the entire test session to a shared location. + This avoids downloading it multiple times across different test modules. + Runs automatically at session start (autouse=True). + """ + # Import here to avoid issues if salt is not installed + import tempfile + + import salt.utils.relenv + + # Use a shared system temp directory that persists across test master instances + # This allows all tests in the session to share the same cached tarball + shared_cache = os.path.join(tempfile.gettempdir(), "salt_ssh_test_relenv_cache") + os.makedirs(shared_cache, exist_ok=True) + + # Detect OS and architecture + kernel = platform.system().lower() + if kernel == "darwin": + kernel = "darwin" + elif kernel == "windows": + kernel = "windows" + else: + kernel = "linux" + + machine = platform.machine().lower() + if machine in ("amd64", "x86_64"): + os_arch = "x86_64" + elif machine in ("aarch64", "arm64"): + os_arch = "arm64" + else: + os_arch = machine + + log.info( + "Pre-caching relenv tarball for %s/%s in shared cache: %s", + kernel, + os_arch, + shared_cache, + ) + + # Try to copy from local artifacts first (for CI/test environments) + import glob + import shutil + + artifacts_tarball = f"/salt/artifacts/salt-*-onedir-{kernel}-{os_arch}.tar.xz" + matching_files = glob.glob(artifacts_tarball) + if matching_files: + source_tarball = matching_files[0] + dest_dir = os.path.join(shared_cache, "relenv", kernel, os_arch) + os.makedirs(dest_dir, exist_ok=True) + dest_tarball = os.path.join(dest_dir, "salt-relenv.tar.xz") + + try: + shutil.copy(source_tarball, dest_tarball) + file_size = os.path.getsize(dest_tarball) / (1024 * 1024) # Size in MB + log.info( + "Copied local tarball from %s to %s (%.2f MB)", + source_tarball, + dest_tarball, + file_size, + ) + return dest_tarball + except Exception as e: # pylint: disable=broad-exception-caught + log.warning("Failed to copy local tarball: %s", e) + + # Fall back to downloading if local tarball not available + try: + tarball_path = salt.utils.relenv.gen_relenv(shared_cache, kernel, os_arch) + log.info("Relenv tarball cached at: %s", tarball_path) + + if os.path.exists(tarball_path): + file_size = os.path.getsize(tarball_path) / (1024 * 1024) # Size in MB + log.info("Cached tarball size: %.2f MB", file_size) + return tarball_path + else: + log.warning( + "Tarball download completed but file not found at: %s", tarball_path + ) + return None + except Exception as e: # pylint: disable=broad-exception-caught + # Broad exception is intentional - we don't want relenv caching failures to break test setup + log.warning("Failed to pre-cache relenv tarball: %s", e) + return None + + +@pytest.fixture(scope="module", params=["thin", "relenv"], ids=["thin", "relenv"]) +def ssh_deployment_type(request): + """ + Fixture to parameterize tests with both thin and relenv deployments. + The relenv_tarball_cached autouse fixture pre-caches the tarball at session start. + """ + return request.param + + +@pytest.fixture(scope="function") +def salt_ssh_cli_parameterized( + ssh_deployment_type, + salt_master, + salt_ssh_roster_file, + sshd_config_dir, + known_hosts_file, +): + """ + Parameterized salt-ssh CLI fixture that tests with both thin and relenv deployments. + + Note: This uses function scope (not module scope) to ensure each test gets a fresh + SSH instance. This is necessary because the SSH class conditionally initializes + self.thin based on opts['relenv'], and with parametrized tests, we need a new + instance for each deployment type to avoid shared state issues. + """ + assert salt_master.is_running() + cli = salt_master.salt_ssh_cli( + timeout=180, + roster_file=salt_ssh_roster_file, + target_host="localhost", + client_key=str(sshd_config_dir / "client_key"), + ) + + # Wrap the run method to inject --relenv flag when needed + original_run = cli.run + + def run_with_deployment(*args, **kwargs): + if ssh_deployment_type == "relenv": + # Filter out -t/--thin flags which are incompatible with --relenv + filtered_args = tuple(arg for arg in args if arg not in ("-t", "--thin")) + # Insert --relenv flag at the beginning + args = ("--relenv",) + filtered_args + return original_run(*args, **kwargs) + + cli.run = run_with_deployment + return cli + @pytest.fixture(scope="package", autouse=True) def _auto_skip_on_system_python_too_recent(grains): @@ -109,17 +249,19 @@ def state_tree_dir(base_env_state_tree_root_dir): State tree with files to test salt-ssh when the map.jinja file is in another directory """ + # Remove unused import from top file to avoid salt-ssh file sync issues + # Use "testdir" instead of "test" to avoid conflicts with state_tree fixture top_file = """ - {%- from "test/map.jinja" import abc with context %} base: 'localhost': - - test + - testdir '127.0.0.1': - - test + - testdir """ map_file = """ {%- set abc = "def" %} """ + # State file imports from subdirectory - this is what we're testing state_file = """ {%- from "test/map.jinja" import abc with context %} @@ -132,8 +274,9 @@ def state_tree_dir(base_env_state_tree_root_dir): map_tempfile = pytest.helpers.temp_file( "test/map.jinja", map_file, base_env_state_tree_root_dir ) + # Use testdir.sls to avoid collision with state_tree's test.sls state_tempfile = pytest.helpers.temp_file( - "test.sls", state_file, base_env_state_tree_root_dir + "testdir.sls", state_file, base_env_state_tree_root_dir ) with top_tempfile, map_tempfile, state_tempfile: diff --git a/tests/pytests/integration/ssh/state/test_parallel.py b/tests/pytests/integration/ssh/state/test_parallel.py index 5784a80a3959..d15f85802a47 100644 --- a/tests/pytests/integration/ssh/state/test_parallel.py +++ b/tests/pytests/integration/ssh/state/test_parallel.py @@ -51,13 +51,13 @@ def state_tree_parallel(base_env_state_tree_root_dir): pytest.param(("state.top", "top.sls"), id="top"), ), ) -def test_it(salt_ssh_cli, args): +def test_it(salt_ssh_cli_parameterized, args): """ Ensure states with ``parallel: true`` do not cause a crash. This does not check that they were actually run in parallel since that would result either in a long-running or flaky test. """ - ret = salt_ssh_cli.run(*args) + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == 0 assert isinstance(ret.data, dict) for i in range(5): diff --git a/tests/pytests/integration/ssh/state/test_pillar_override.py b/tests/pytests/integration/ssh/state/test_pillar_override.py index b3235f468bd2..bc41bf9194a8 100644 --- a/tests/pytests/integration/ssh/state/test_pillar_override.py +++ b/tests/pytests/integration/ssh/state/test_pillar_override.py @@ -22,22 +22,35 @@ ] -def test_pillar_is_only_rendered_once_without_overrides(salt_ssh_cli, caplog): - ret = salt_ssh_cli.run("state.apply", "test") +def test_pillar_is_only_rendered_once_without_overrides( + salt_ssh_cli_parameterized, ssh_deployment_type, caplog +): + ret = salt_ssh_cli_parameterized.run("state.apply", "test") assert ret.returncode == 0 assert isinstance(ret.data, dict) assert ret.data assert ret.data[next(iter(ret.data))]["result"] is True - assert caplog.text.count("hithere: pillar was rendered") == 1 + # Both thin and relenv now compile pillar once: + # - Relenv: Once in __init__ via _compile_pillar_for_relenv(), then uses salt-call directly + # - Thin: Once in _run_wfunc_thin() via the wrapper system + expected_count = 1 + assert caplog.text.count("hithere: pillar was rendered") == expected_count -def test_pillar_is_rerendered_with_overrides(salt_ssh_cli, caplog): - ret = salt_ssh_cli.run("state.apply", "test", pillar={"foo": "bar"}) + +def test_pillar_is_rerendered_with_overrides( + salt_ssh_cli_parameterized, ssh_deployment_type, caplog +): + ret = salt_ssh_cli_parameterized.run("state.apply", "test", pillar={"foo": "bar"}) assert ret.returncode == 0 assert isinstance(ret.data, dict) assert ret.data assert ret.data[next(iter(ret.data))]["result"] is True - assert caplog.text.count("hithere: pillar was rendered") == 2 + + # With pillar overrides, both thin and relenv re-render pillar. + # Both now render twice: once for initial setup, once with overrides applied. + expected_count = 2 + assert caplog.text.count("hithere: pillar was rendered") == expected_count @pytest.fixture(scope="module", autouse=True) @@ -96,9 +109,9 @@ def override(base): return expected, poverride -def test_state_sls(salt_ssh_cli, override): +def test_state_sls(salt_ssh_cli_parameterized, override): expected, override = override - ret = salt_ssh_cli.run("state.sls", "showpillar", pillar=override) + ret = salt_ssh_cli_parameterized.run("state.sls", "showpillar", pillar=override) _assert_basic(ret) assert len(ret.data) == 2 for sid, sret in ret.data.items(): @@ -109,9 +122,11 @@ def test_state_sls(salt_ssh_cli, override): @pytest.mark.parametrize("sid", ("deep_thought", "target_check")) -def test_state_sls_id(salt_ssh_cli, sid, override): +def test_state_sls_id(salt_ssh_cli_parameterized, sid, override): expected, override = override - ret = salt_ssh_cli.run("state.sls_id", sid, "showpillar", pillar=override) + ret = salt_ssh_cli_parameterized.run( + "state.sls_id", sid, "showpillar", pillar=override + ) _assert_basic(ret) state_res = ret.data[next(iter(ret.data))] if sid == "deep_thought": @@ -120,9 +135,11 @@ def test_state_sls_id(salt_ssh_cli, sid, override): assert state_res["result"] is True -def test_state_highstate(salt_ssh_cli, override): +def test_state_highstate(salt_ssh_cli_parameterized, override): expected, override = override - ret = salt_ssh_cli.run("state.highstate", pillar=override, whitelist=["showpillar"]) + ret = salt_ssh_cli_parameterized.run( + "state.highstate", pillar=override, whitelist=["showpillar"] + ) _assert_basic(ret) assert len(ret.data) == 2 for sid, sret in ret.data.items(): @@ -132,26 +149,30 @@ def test_state_highstate(salt_ssh_cli, override): assert sret["result"] is True -def test_state_show_sls(salt_ssh_cli, override): +def test_state_show_sls(salt_ssh_cli_parameterized, override): expected, override = override - ret = salt_ssh_cli.run("state.show_sls", "showpillar", pillar=override) + ret = salt_ssh_cli_parameterized.run( + "state.show_sls", "showpillar", pillar=override + ) _assert_basic(ret) pillar = ret.data["deep_thought"]["test"] pillar = next(x["text"] for x in pillar if isinstance(x, dict)) _assert_pillar(pillar, expected) -def test_state_show_low_sls(salt_ssh_cli, override): +def test_state_show_low_sls(salt_ssh_cli_parameterized, override): expected, override = override - ret = salt_ssh_cli.run("state.show_low_sls", "showpillar", pillar=override) + ret = salt_ssh_cli_parameterized.run( + "state.show_low_sls", "showpillar", pillar=override + ) _assert_basic(ret, list) pillar = ret.data[0]["text"] _assert_pillar(pillar, expected) -def test_state_single(salt_ssh_cli, override): +def test_state_single(salt_ssh_cli_parameterized, override): expected, override = override - ret = salt_ssh_cli.run( + ret = salt_ssh_cli_parameterized.run( "state.single", "test.check_pillar", "foo", @@ -169,9 +190,9 @@ def test_state_single(salt_ssh_cli, override): assert state_res["result"] is True -def test_state_top(salt_ssh_cli, override): +def test_state_top(salt_ssh_cli_parameterized, override): expected, override = override - ret = salt_ssh_cli.run("state.top", "top.sls", pillar=override) + ret = salt_ssh_cli_parameterized.run("state.top", "top.sls", pillar=override) _assert_basic(ret) assert len(ret.data) == 2 for sid, sret in ret.data.items(): diff --git a/tests/pytests/integration/ssh/state/test_pillar_override_template.py b/tests/pytests/integration/ssh/state/test_pillar_override_template.py index 9e84fe50f289..74036a4ba0f9 100644 --- a/tests/pytests/integration/ssh/state/test_pillar_override_template.py +++ b/tests/pytests/integration/ssh/state/test_pillar_override_template.py @@ -88,9 +88,9 @@ def override(base): (("state.top", "top.sls"), {}), ), ) -def test_it(salt_ssh_cli, args, kwargs, override, _write_pillar_state): +def test_it(salt_ssh_cli_parameterized, args, kwargs, override, _write_pillar_state): expected, override = override - ret = salt_ssh_cli.run(*args, **kwargs, pillar=override) + ret = salt_ssh_cli_parameterized.run(*args, **kwargs, pillar=override) assert ret.returncode == 0 assert isinstance(ret.data, dict) assert ret.data diff --git a/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_requisite_fail.py b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_requisite_fail.py index c06e46558dfc..d0b46cd1a44d 100644 --- a/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_requisite_fail.py +++ b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_requisite_fail.py @@ -56,8 +56,8 @@ def state_tree_req_fail(base_env_state_tree_root_dir): (("state.top", "top.sls"), EX_AGGREGATE), ), ) -def test_it(salt_ssh_cli, args, retcode): - ret = salt_ssh_cli.run(*args) +def test_it(salt_ssh_cli_parameterized, args, retcode): + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == retcode assert isinstance(ret.data, list) assert ret.data diff --git a/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_structure_fail.py b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_structure_fail.py index 1b5a83d39ab8..f32e3486f5b4 100644 --- a/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_structure_fail.py +++ b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_structure_fail.py @@ -58,8 +58,8 @@ def state_tree_structure_fail(base_env_state_tree_root_dir): (("state.top", "top.sls"), EX_AGGREGATE), ), ) -def test_it(salt_ssh_cli, args, retcode): - ret = salt_ssh_cli.run(*args) +def test_it(salt_ssh_cli_parameterized, args, retcode): + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == retcode assert isinstance(ret.data, list) assert ret.data diff --git a/tests/pytests/integration/ssh/state/test_retcode_pillar_render_exception.py b/tests/pytests/integration/ssh/state/test_retcode_pillar_render_exception.py index d1a5adb464cc..819e2e004ce5 100644 --- a/tests/pytests/integration/ssh/state/test_retcode_pillar_render_exception.py +++ b/tests/pytests/integration/ssh/state/test_retcode_pillar_render_exception.py @@ -52,8 +52,8 @@ def pillar_tree_render_fail(base_env_pillar_tree_root_dir): ("state.top", "top.sls"), ), ) -def test_it(salt_ssh_cli, args): - ret = salt_ssh_cli.run(*args) +def test_it(salt_ssh_cli_parameterized, args): + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == EX_AGGREGATE assert isinstance(ret.data, list) assert ret.data diff --git a/tests/pytests/integration/ssh/state/test_retcode_render_exception.py b/tests/pytests/integration/ssh/state/test_retcode_render_exception.py index 32a712b748d9..a1967f35d1ba 100644 --- a/tests/pytests/integration/ssh/state/test_retcode_render_exception.py +++ b/tests/pytests/integration/ssh/state/test_retcode_render_exception.py @@ -54,8 +54,8 @@ def state_tree_render_fail(base_env_state_tree_root_dir): (("state.top", "top.sls"), EX_AGGREGATE), ), ) -def test_it(salt_ssh_cli, args, retcode): - ret = salt_ssh_cli.run(*args) +def test_it(salt_ssh_cli_parameterized, args, retcode): + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == retcode assert isinstance(ret.data, list) assert ret.data @@ -65,8 +65,8 @@ def test_it(salt_ssh_cli, args, retcode): ) -def test_state_single(salt_ssh_cli): - ret = salt_ssh_cli.run("state.single", "file") +def test_state_single(salt_ssh_cli_parameterized): + ret = salt_ssh_cli_parameterized.run("state.single", "file") assert ret.returncode == EX_AGGREGATE assert isinstance(ret.data, str) assert "single() missing 1 required positional argument" in ret.data diff --git a/tests/pytests/integration/ssh/state/test_retcode_render_module_exception.py b/tests/pytests/integration/ssh/state/test_retcode_render_module_exception.py index 41b0aa5130f4..95f5e07aaa52 100644 --- a/tests/pytests/integration/ssh/state/test_retcode_render_module_exception.py +++ b/tests/pytests/integration/ssh/state/test_retcode_render_module_exception.py @@ -56,8 +56,8 @@ def state_tree_render_module_exception(base_env_state_tree_root_dir): (("state.top", "top.sls"), EX_AGGREGATE), ), ) -def test_it(salt_ssh_cli, args, retcode): - ret = salt_ssh_cli.run(*args) +def test_it(salt_ssh_cli_parameterized, args, retcode): + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == retcode assert isinstance(ret.data, list) diff --git a/tests/pytests/integration/ssh/state/test_retcode_run_fail.py b/tests/pytests/integration/ssh/state/test_retcode_run_fail.py index 0d01c8dc00ab..635176b630e9 100644 --- a/tests/pytests/integration/ssh/state/test_retcode_run_fail.py +++ b/tests/pytests/integration/ssh/state/test_retcode_run_fail.py @@ -52,8 +52,8 @@ def state_tree_run_fail(base_env_state_tree_root_dir): ("state.top", "top.sls"), ), ) -def test_it(salt_ssh_cli, args): - ret = salt_ssh_cli.run(*args) +def test_it(salt_ssh_cli_parameterized, args): + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == EX_AGGREGATE assert isinstance(ret.data, dict) state = next(iter(ret.data)) diff --git a/tests/pytests/integration/ssh/state/test_retcode_state_run_remote_exception.py b/tests/pytests/integration/ssh/state/test_retcode_state_run_remote_exception.py index af250e83a94f..b15cae23097d 100644 --- a/tests/pytests/integration/ssh/state/test_retcode_state_run_remote_exception.py +++ b/tests/pytests/integration/ssh/state/test_retcode_state_run_remote_exception.py @@ -87,8 +87,8 @@ def state_tree_remote_exception( ("state.single", "whoops.do_stuff", "now"), ), ) -def test_it(salt_ssh_cli, args): - ret = salt_ssh_cli.run(*args) +def test_it(salt_ssh_cli_parameterized, ssh_deployment_type, args): + ret = salt_ssh_cli_parameterized.run(*args) assert ret.returncode == EX_AGGREGATE assert ret.data diff --git a/tests/pytests/integration/ssh/state/test_state.py b/tests/pytests/integration/ssh/state/test_state.py index 2db4d9e2fd65..49cd35cea457 100644 --- a/tests/pytests/integration/ssh/state/test_state.py +++ b/tests/pytests/integration/ssh/state/test_state.py @@ -12,11 +12,11 @@ ] -def test_state_with_import(salt_ssh_cli, state_tree): +def test_state_with_import(state_tree, salt_ssh_cli_parameterized): """ verify salt-ssh can use imported map files in states """ - ret = salt_ssh_cli.run("state.sls", "test") + ret = salt_ssh_cli_parameterized.run("state.sls", "test") assert ret.returncode == 0 assert ret.data @@ -82,37 +82,39 @@ def test_state_with_import_dir(salt_ssh_cli, state_tree_dir, ssh_cmd): assert ret.data -def test_state_with_import_from_dir(salt_ssh_cli, nested_state_tree): +def test_state_with_import_from_dir(nested_state_tree, salt_ssh_cli_parameterized): """ verify salt-ssh can use imported map files in states """ - ret = salt_ssh_cli.run( + ret = salt_ssh_cli_parameterized.run( "--extra-filerefs=salt://foo/map.jinja", "state.apply", "foo" ) assert ret.returncode == 0 assert ret.data -def test_state_low(salt_ssh_cli): +def test_state_low(salt_ssh_cli_parameterized): """ test state.low with salt-ssh """ - ret = salt_ssh_cli.run( + ret = salt_ssh_cli_parameterized.run( "state.low", '{"state": "cmd", "fun": "run", "name": "echo blah"}' ) assert ret.data["cmd_|-echo blah_|-echo blah_|-run"]["changes"]["stdout"] == "blah" -def test_state_high(salt_ssh_cli): +def test_state_high(salt_ssh_cli_parameterized): """ test state.high with salt-ssh """ - ret = salt_ssh_cli.run("state.high", '{"echo blah": {"cmd": ["run"]}}') + ret = salt_ssh_cli_parameterized.run( + "state.high", '{"echo blah": {"cmd": ["run"]}}' + ) assert ret.data["cmd_|-echo blah_|-echo blah_|-run"]["changes"]["stdout"] == "blah" -def test_state_test(salt_ssh_cli, state_tree): - ret = salt_ssh_cli.run("state.test", "test") +def test_state_test(state_tree, salt_ssh_cli_parameterized): + ret = salt_ssh_cli_parameterized.run("state.test", "test") assert ret.returncode == 0 assert ret.data assert ( diff --git a/tests/pytests/integration/ssh/state/test_with_import_dir.py b/tests/pytests/integration/ssh/state/test_with_import_dir.py index c69e6c6e992c..02273a407fa2 100644 --- a/tests/pytests/integration/ssh/state/test_with_import_dir.py +++ b/tests/pytests/integration/ssh/state/test_with_import_dir.py @@ -33,15 +33,16 @@ "state.top", ], ) -def test_state_with_import_dir(salt_ssh_cli, state_tree_dir, ssh_cmd): +def test_state_with_import_dir(state_tree_dir, salt_ssh_cli_parameterized, ssh_cmd): + # Note: Removed -w and -t flags since salt_ssh_cli_parameterized handles deployment type if ssh_cmd in ("state.sls", "state.show_low_sls", "state.show_sls"): - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "testdir") + ret = salt_ssh_cli_parameterized.run(ssh_cmd, "testdir") elif ssh_cmd == "state.top": - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "top.sls") + ret = salt_ssh_cli_parameterized.run(ssh_cmd, "top.sls") elif ssh_cmd == "state.sls_id": - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "Ok with def", "testdir") + ret = salt_ssh_cli_parameterized.run(ssh_cmd, "Ok with def", "testdir") else: - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd) + ret = salt_ssh_cli_parameterized.run(ssh_cmd) assert ret.returncode == 0 if ssh_cmd == "state.show_top": assert ret.data == {"base": ["testdir", "master_tops_test"]} or { diff --git a/tests/pytests/integration/ssh/test_cp.py b/tests/pytests/integration/ssh/test_cp.py index 7f6e6d1ab3f9..95a512618bbc 100644 --- a/tests/pytests/integration/ssh/test_cp.py +++ b/tests/pytests/integration/ssh/test_cp.py @@ -39,14 +39,17 @@ def _pillar_tree(salt_master): yield -@pytest.fixture(scope="module") -def cachedir(salt_ssh_cli): +@pytest.fixture(scope="function") +def cachedir(salt_ssh_cli_parameterized): """ The current minion cache dir + + Note: This uses function scope (not module scope) to match salt_ssh_cli_parameterized. + Module-scoped fixtures cannot depend on function-scoped fixtures in pytest. """ # The salt-ssh cache dir in the minion context is different than - # the one available in the salt_ssh_cli opts. Any other way to get this? TODO - res = salt_ssh_cli.run("cp.cache_dest", "salt://file") + # the one available in the salt_ssh_cli_parameterized opts. Any other way to get this? TODO + res = salt_ssh_cli_parameterized.run("cp.cache_dest", "salt://file") assert res.returncode == 0 assert isinstance(res.data, str) # This will return /files/base/file @@ -68,13 +71,15 @@ def _convert(cli, cachedir, path, master=False): @pytest.mark.parametrize("template", (None, "jinja")) @pytest.mark.parametrize("dst_is_dir", (False, True)) -def test_get_file(salt_ssh_cli, tmp_path, template, dst_is_dir, cachedir): +def test_get_file(salt_ssh_cli_parameterized, tmp_path, template, dst_is_dir, cachedir): src = "salt://" + ("cheese" if not template else "{{pillar.test_pillar}}") if dst_is_dir: tgt = tmp_path else: tgt = tmp_path / ("cheese" if not template else "{{pillar.test_pillar}}") - res = salt_ssh_cli.run("cp.get_file", src, str(tgt), template=template) + res = salt_ssh_cli_parameterized.run( + "cp.get_file", src, str(tgt), template=template + ) assert res.returncode == 0 assert res.data tgt = tmp_path / "cheese" @@ -82,7 +87,7 @@ def test_get_file(salt_ssh_cli, tmp_path, template, dst_is_dir, cachedir): master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "files" / "base" / "cheese" @@ -94,9 +99,11 @@ def test_get_file(salt_ssh_cli, tmp_path, template, dst_is_dir, cachedir): assert "bacon" not in data -def test_get_file_gzipped(salt_ssh_cli, caplog, tmp_path): +def test_get_file_gzipped(salt_ssh_cli_parameterized, caplog, tmp_path): tgt = tmp_path / random_string("foo-") - res = salt_ssh_cli.run("cp.get_file", "salt://grail/scene33", str(tgt), gzip=5) + res = salt_ssh_cli_parameterized.run( + "cp.get_file", "salt://grail/scene33", str(tgt), gzip=5 + ) assert res.returncode == 0 assert res.data assert res.data == str(tgt) @@ -107,9 +114,9 @@ def test_get_file_gzipped(salt_ssh_cli, caplog, tmp_path): assert "bacon" not in data -def test_get_file_makedirs(salt_ssh_cli, tmp_path, cachedir): +def test_get_file_makedirs(salt_ssh_cli_parameterized, tmp_path, cachedir): tgt = tmp_path / "make" / "dirs" / "scene33" - res = salt_ssh_cli.run( + res = salt_ssh_cli_parameterized.run( "cp.get_file", "salt://grail/scene33", str(tgt), makedirs=True ) assert res.returncode == 0 @@ -118,7 +125,7 @@ def test_get_file_makedirs(salt_ssh_cli, tmp_path, cachedir): master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "files" / "base" / "grail" @@ -132,9 +139,11 @@ def test_get_file_makedirs(salt_ssh_cli, tmp_path, cachedir): @pytest.mark.parametrize("suffix", ("", "?saltenv=prod")) -def test_get_file_from_env(salt_ssh_cli, tmp_path, suffix): +def test_get_file_from_env(salt_ssh_cli_parameterized, tmp_path, suffix): tgt = tmp_path / "cheese" - ret = salt_ssh_cli.run("cp.get_file", "salt://cheese" + suffix, str(tgt)) + ret = salt_ssh_cli_parameterized.run( + "cp.get_file", "salt://cheese" + suffix, str(tgt) + ) assert ret.returncode == 0 assert ret.data assert ret.data == str(tgt) @@ -143,23 +152,25 @@ def test_get_file_from_env(salt_ssh_cli, tmp_path, suffix): assert ("Comte" in data) is bool(suffix) -def test_get_file_nonexistent_source(salt_ssh_cli): - res = salt_ssh_cli.run("cp.get_file", "salt://grail/nonexistent_scene", "") +def test_get_file_nonexistent_source(salt_ssh_cli_parameterized): + res = salt_ssh_cli_parameterized.run( + "cp.get_file", "salt://grail/nonexistent_scene", "" + ) assert res.returncode == 0 # not a fan of this assert res.data == "" -def test_envs(salt_ssh_cli): - ret = salt_ssh_cli.run("cp.envs") +def test_envs(salt_ssh_cli_parameterized): + ret = salt_ssh_cli_parameterized.run("cp.envs") assert ret.returncode == 0 assert ret.data assert isinstance(ret.data, list) assert sorted(ret.data) == sorted(["base", "prod"]) -def test_get_template(salt_ssh_cli, tmp_path, cachedir): +def test_get_template(salt_ssh_cli_parameterized, tmp_path, cachedir): tgt = tmp_path / "scene33" - res = salt_ssh_cli.run( + res = salt_ssh_cli_parameterized.run( "cp.get_template", "salt://grail/scene33", str(tgt), spam="bacon" ) assert res.returncode == 0 @@ -168,7 +179,7 @@ def test_get_template(salt_ssh_cli, tmp_path, cachedir): master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "extrn_files" / "base" / "grail" @@ -181,21 +192,23 @@ def test_get_template(salt_ssh_cli, tmp_path, cachedir): assert "spam" not in data -def test_get_template_dest_empty(salt_ssh_cli, cachedir): - res = salt_ssh_cli.run("cp.get_template", "salt://grail/scene33", "", spam="bacon") +def test_get_template_dest_empty(salt_ssh_cli_parameterized, cachedir): + res = salt_ssh_cli_parameterized.run( + "cp.get_template", "salt://grail/scene33", "", spam="bacon" + ) assert res.returncode == 0 assert res.data assert isinstance(res.data, str) master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "extrn_files" / "base" / "grail" / "scene33" ) - tgt = _convert(salt_ssh_cli, cachedir, master_path) + tgt = _convert(salt_ssh_cli_parameterized, cachedir, master_path) assert res.data == str(tgt) for file in (tgt, master_path): assert file.exists() @@ -204,8 +217,10 @@ def test_get_template_dest_empty(salt_ssh_cli, cachedir): assert "spam" not in data -def test_get_template_nonexistent_source(salt_ssh_cli, tmp_path): - res = salt_ssh_cli.run("cp.get_template", "salt://grail/nonexistent_scene", "") +def test_get_template_nonexistent_source(salt_ssh_cli_parameterized, tmp_path): + res = salt_ssh_cli_parameterized.run( + "cp.get_template", "salt://grail/nonexistent_scene", "" + ) assert res.returncode == 0 # not a fan of this assert res.data == "" # The regular module only logs "unable to fetch" with get_url @@ -213,9 +228,9 @@ def test_get_template_nonexistent_source(salt_ssh_cli, tmp_path): @pytest.mark.parametrize("template", (None, "jinja")) @pytest.mark.parametrize("suffix", ("", "/")) -def test_get_dir(salt_ssh_cli, tmp_path, template, suffix, cachedir): +def test_get_dir(salt_ssh_cli_parameterized, tmp_path, template, suffix, cachedir): tgt = tmp_path / ("many" if not template else "{{pillar.alot}}") - res = salt_ssh_cli.run( + res = salt_ssh_cli_parameterized.run( "cp.get_dir", "salt://" + ("grail" if not template else "{{pillar.script}}"), str(tgt) + suffix, @@ -226,7 +241,11 @@ def test_get_dir(salt_ssh_cli, tmp_path, template, suffix, cachedir): assert isinstance(res.data, list) tgt = tmp_path / "many" master_path = ( - cachedir / "salt-ssh" / salt_ssh_cli.get_minion_tgt() / "files" / "base" + cachedir + / "salt-ssh" + / salt_ssh_cli_parameterized.get_minion_tgt() + / "files" + / "base" ) for path in (tgt, master_path): assert path.exists() @@ -249,9 +268,9 @@ def test_get_dir(salt_ssh_cli, tmp_path, template, suffix, cachedir): assert files == filelist -def test_get_dir_gzipped(salt_ssh_cli, caplog, tmp_path): +def test_get_dir_gzipped(salt_ssh_cli_parameterized, caplog, tmp_path): tgt = tmp_path / "many" - res = salt_ssh_cli.run("cp.get_dir", "salt://grail", tgt, gzip=5) + res = salt_ssh_cli_parameterized.run("cp.get_dir", "salt://grail", tgt, gzip=5) assert "The gzip argument to cp.get_dir in salt-ssh is unsupported" in caplog.text assert res.returncode == 0 assert res.data @@ -264,21 +283,23 @@ def test_get_dir_gzipped(salt_ssh_cli, caplog, tmp_path): assert "scene" in os.listdir(tgt / "grail" / "36") -def test_get_dir_nonexistent_source(salt_ssh_cli, caplog): - res = salt_ssh_cli.run("cp.get_dir", "salt://grail/non/ex/is/tent", "") +def test_get_dir_nonexistent_source(salt_ssh_cli_parameterized, caplog): + res = salt_ssh_cli_parameterized.run( + "cp.get_dir", "salt://grail/non/ex/is/tent", "" + ) assert res.returncode == 0 # not a fan of this assert isinstance(res.data, list) assert not res.data @pytest.mark.parametrize("dst_is_dir", (False, True)) -def test_get_url(salt_ssh_cli, tmp_path, dst_is_dir, cachedir): +def test_get_url(salt_ssh_cli_parameterized, tmp_path, dst_is_dir, cachedir): src = "salt://grail/scene33" if dst_is_dir: tgt = tmp_path else: tgt = tmp_path / "scene33" - res = salt_ssh_cli.run("cp.get_url", src, str(tgt)) + res = salt_ssh_cli_parameterized.run("cp.get_url", src, str(tgt)) assert res.returncode == 0 assert res.data tgt = tmp_path / "scene33" @@ -286,7 +307,7 @@ def test_get_url(salt_ssh_cli, tmp_path, dst_is_dir, cachedir): master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "files" / "base" / "grail" @@ -299,9 +320,9 @@ def test_get_url(salt_ssh_cli, tmp_path, dst_is_dir, cachedir): assert "bacon" not in data -def test_get_url_makedirs(salt_ssh_cli, tmp_path, cachedir): +def test_get_url_makedirs(salt_ssh_cli_parameterized, tmp_path, cachedir): tgt = tmp_path / "make" / "dirs" / "scene33" - res = salt_ssh_cli.run( + res = salt_ssh_cli_parameterized.run( "cp.get_url", "salt://grail/scene33", str(tgt), makedirs=True ) assert res.returncode == 0 @@ -310,7 +331,7 @@ def test_get_url_makedirs(salt_ssh_cli, tmp_path, cachedir): master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "files" / "base" / "grail" @@ -323,24 +344,24 @@ def test_get_url_makedirs(salt_ssh_cli, tmp_path, cachedir): assert "bacon" not in data -def test_get_url_dest_empty(salt_ssh_cli, cachedir): +def test_get_url_dest_empty(salt_ssh_cli_parameterized, cachedir): """ salt:// source and destination omitted, should still cache the file """ - res = salt_ssh_cli.run("cp.get_url", "salt://grail/scene33") + res = salt_ssh_cli_parameterized.run("cp.get_url", "salt://grail/scene33") assert res.returncode == 0 assert res.data assert isinstance(res.data, str) master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "files" / "base" / "grail" / "scene33" ) - tgt = _convert(salt_ssh_cli, cachedir, master_path) + tgt = _convert(salt_ssh_cli_parameterized, cachedir, master_path) assert res.data == str(tgt) for file in (tgt, master_path): assert file.exists() @@ -349,11 +370,11 @@ def test_get_url_dest_empty(salt_ssh_cli, cachedir): assert "bacon" not in data -def test_get_url_no_dest(salt_ssh_cli): +def test_get_url_no_dest(salt_ssh_cli_parameterized): """ salt:// source given and destination set as None, should return the data """ - res = salt_ssh_cli.run("cp.get_url", "salt://grail/scene33", None) + res = salt_ssh_cli_parameterized.run("cp.get_url", "salt://grail/scene33", None) assert res.returncode == 0 assert res.data assert isinstance(res.data, str) @@ -361,8 +382,10 @@ def test_get_url_no_dest(salt_ssh_cli): assert "bacon" not in res.data -def test_get_url_nonexistent_source(salt_ssh_cli, caplog): - res = salt_ssh_cli.run("cp.get_url", "salt://grail/nonexistent_scene", None) +def test_get_url_nonexistent_source(salt_ssh_cli_parameterized, caplog): + res = salt_ssh_cli_parameterized.run( + "cp.get_url", "salt://grail/nonexistent_scene", None + ) assert res.returncode == 0 # not a fan of this assert res.data is False assert ( @@ -371,16 +394,18 @@ def test_get_url_nonexistent_source(salt_ssh_cli, caplog): ) -def test_get_url_https(salt_ssh_cli, tmp_path, cachedir): +def test_get_url_https(salt_ssh_cli_parameterized, tmp_path, cachedir): tgt = tmp_path / "index.html" - res = salt_ssh_cli.run("cp.get_url", "https://saltproject.io/index.html", tgt) + res = salt_ssh_cli_parameterized.run( + "cp.get_url", "https://saltproject.io/index.html", tgt + ) assert res.returncode == 0 assert res.data assert res.data == str(tgt) master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "extrn_files" / "base" / "saltproject.io" @@ -392,23 +417,25 @@ def test_get_url_https(salt_ssh_cli, tmp_path, cachedir): assert "Salt Project" in data -def test_get_url_https_dest_empty(salt_ssh_cli, tmp_path, cachedir): +def test_get_url_https_dest_empty(salt_ssh_cli_parameterized, tmp_path, cachedir): """ https:// source given and destination omitted, should still cache the file """ - res = salt_ssh_cli.run("cp.get_url", "https://saltproject.io/index.html") + res = salt_ssh_cli_parameterized.run( + "cp.get_url", "https://saltproject.io/index.html" + ) assert res.returncode == 0 assert res.data master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "extrn_files" / "base" / "saltproject.io" / "index.html" ) - tgt = _convert(salt_ssh_cli, cachedir, master_path) + tgt = _convert(salt_ssh_cli_parameterized, cachedir, master_path) assert res.data == str(tgt) for path in (tgt, master_path): assert path.exists() @@ -416,7 +443,7 @@ def test_get_url_https_dest_empty(salt_ssh_cli, tmp_path, cachedir): assert "Salt Project" in data -def test_get_url_https_no_dest(salt_ssh_cli): +def test_get_url_https_no_dest(salt_ssh_cli_parameterized): """ https:// source given and destination set as None, should return the data """ @@ -424,7 +451,9 @@ def test_get_url_https_no_dest(salt_ssh_cli): start = time.time() sleep = 5 while time.time() - start <= timeout: - res = salt_ssh_cli.run("cp.get_url", "https://saltproject.io/index.html", None) + res = salt_ssh_cli_parameterized.run( + "cp.get_url", "https://saltproject.io/index.html", None + ) if isinstance(res.data, str) and res.data.find("HTTP 599") == -1: break time.sleep(sleep) @@ -444,22 +473,22 @@ def test_get_url_https_no_dest(salt_ssh_cli): (Path("_foo", "bar", "baz"), False), ), ) -def test_get_url_file(salt_ssh_cli, path, expected, scheme): +def test_get_url_file(salt_ssh_cli_parameterized, path, expected, scheme): """ Ensure the file:// scheme is not supported """ - res = salt_ssh_cli.run("cp.get_url", scheme + str(path)) + res = salt_ssh_cli_parameterized.run("cp.get_url", scheme + str(path)) assert res.returncode == 0 assert res.data is False -def test_get_url_file_contents(salt_ssh_cli, tmp_path, caplog): +def test_get_url_file_contents(salt_ssh_cli_parameterized, tmp_path, caplog): """ A file:// source is not supported since it would need to fetch a file from the minion onto the master to be consistent """ src = Path(RUNTIME_VARS.FILES) / "file" / "base" / "file.big" - res = salt_ssh_cli.run("cp.get_url", "file://" + str(src), None) + res = salt_ssh_cli_parameterized.run("cp.get_url", "file://" + str(src), None) assert res.returncode == 0 assert res.data is False assert ( @@ -467,9 +496,10 @@ def test_get_url_file_contents(salt_ssh_cli, tmp_path, caplog): ) -def test_get_url_ftp(salt_ssh_cli, tmp_path, cachedir): +@pytest.mark.timeout(300) # FTP can be slow, allow 5 minutes +def test_get_url_ftp(salt_ssh_cli_parameterized, tmp_path, cachedir): tgt = tmp_path / "README.TXT" - res = salt_ssh_cli.run( + res = salt_ssh_cli_parameterized.run( "cp.get_url", "ftp://ftp.freebsd.org/pub/FreeBSD/releases/amd64/README.TXT", tgt ) assert res.returncode == 0 @@ -478,7 +508,7 @@ def test_get_url_ftp(salt_ssh_cli, tmp_path, cachedir): master_path = ( cachedir / "salt-ssh" - / salt_ssh_cli.get_minion_tgt() + / salt_ssh_cli_parameterized.get_minion_tgt() / "extrn_files" / "base" / "ftp.freebsd.org" @@ -494,46 +524,46 @@ def test_get_url_ftp(salt_ssh_cli, tmp_path, cachedir): assert "The official FreeBSD" in data -def test_get_file_str_salt(salt_ssh_cli, cachedir): +def test_get_file_str_salt(salt_ssh_cli_parameterized, cachedir): src = "salt://grail/scene33" - res = salt_ssh_cli.run("cp.get_file_str", src) + res = salt_ssh_cli_parameterized.run("cp.get_file_str", src) assert res.returncode == 0 assert res.data assert isinstance(res.data, str) assert "KNIGHT: They're nervous, sire." in res.data tgt = cachedir / "files" / "base" / "grail" / "scene33" - master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True) + master_path = _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True) for path in (tgt, master_path): assert path.exists() text = path.read_text(encoding="utf-8") assert "KNIGHT: They're nervous, sire." in text -def test_get_file_str_nonexistent_source(salt_ssh_cli, caplog): +def test_get_file_str_nonexistent_source(salt_ssh_cli_parameterized, caplog): src = "salt://grail/nonexistent_scene" - res = salt_ssh_cli.run("cp.get_file_str", src) + res = salt_ssh_cli_parameterized.run("cp.get_file_str", src) assert res.returncode == 0 # yup... assert res.data is False -def test_get_file_str_https(salt_ssh_cli, cachedir): +def test_get_file_str_https(salt_ssh_cli_parameterized, cachedir): src = "https://saltproject.io/index.html" - res = salt_ssh_cli.run("cp.get_file_str", src) + res = salt_ssh_cli_parameterized.run("cp.get_file_str", src) assert res.returncode == 0 assert res.data assert isinstance(res.data, str) assert "Salt Project" in res.data tgt = cachedir / "extrn_files" / "base" / "saltproject.io" / "index.html" - master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True) + master_path = _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True) for path in (tgt, master_path): assert path.exists() text = path.read_text(encoding="utf-8") assert "Salt Project" in text -def test_get_file_str_local(salt_ssh_cli, cachedir, caplog): +def test_get_file_str_local(salt_ssh_cli_parameterized, cachedir, caplog): src = Path(RUNTIME_VARS.FILES) / "file" / "base" / "cheese" - res = salt_ssh_cli.run("cp.get_file_str", "file://" + str(src)) + res = salt_ssh_cli_parameterized.run("cp.get_file_str", "file://" + str(src)) assert res.returncode == 0 assert isinstance(res.data, str) assert "Gromit" in res.data @@ -544,8 +574,8 @@ def test_get_file_str_local(salt_ssh_cli, cachedir, caplog): @pytest.mark.parametrize("suffix", ("", "?saltenv=prod")) -def test_cache_file(salt_ssh_cli, suffix, cachedir): - res = salt_ssh_cli.run("cp.cache_file", "salt://cheese" + suffix) +def test_cache_file(salt_ssh_cli_parameterized, suffix, cachedir): + res = salt_ssh_cli_parameterized.run("cp.cache_file", "salt://cheese" + suffix) assert res.returncode == 0 assert res.data tgt = ( @@ -554,7 +584,7 @@ def test_cache_file(salt_ssh_cli, suffix, cachedir): / ("base" if "saltenv" not in suffix else suffix.split("=")[1]) / "cheese" ) - master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True) + master_path = _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True) for file in (tgt, master_path): data = file.read_text(encoding="utf-8") assert "Gromit" in data @@ -562,12 +592,12 @@ def test_cache_file(salt_ssh_cli, suffix, cachedir): @pytest.fixture -def _cache_twice(salt_master, request, salt_ssh_cli, cachedir): +def _cache_twice(salt_master, request, salt_ssh_cli_parameterized, cachedir): # ensure the cache is clean tgt = cachedir / "extrn_files" / "base" / "saltproject.io" / "index.html" tgt.unlink(missing_ok=True) - master_tgt = _convert(salt_ssh_cli, cachedir, tgt, master=True) + master_tgt = _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True) master_tgt.unlink(missing_ok=True) # create a template that will cause a file to get cached twice @@ -598,13 +628,15 @@ def _cache_twice(salt_master, request, salt_ssh_cli, cachedir): yield f"salt://{name}" -def test_cache_file_context_cache(salt_ssh_cli, cachedir, _cache_twice): - res = salt_ssh_cli.run("slsutil.renderer", _cache_twice, default_renderer="jinja") +def test_cache_file_context_cache(salt_ssh_cli_parameterized, cachedir, _cache_twice): + res = salt_ssh_cli_parameterized.run( + "slsutil.renderer", _cache_twice, default_renderer="jinja" + ) assert res.returncode == 0 tgt = res.data.strip() assert tgt tgt = Path(tgt) - for file in (tgt, _convert(salt_ssh_cli, cachedir, tgt, master=True)): + for file in (tgt, _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True)): assert tgt.exists() # If both files were present, they should not be re-fetched assert "wasmodifiedhahaha" in tgt.read_text(encoding="utf-8") @@ -612,21 +644,25 @@ def test_cache_file_context_cache(salt_ssh_cli, cachedir, _cache_twice): @pytest.mark.parametrize("_cache_twice", ("master", "minion"), indirect=True) def test_cache_file_context_cache_requires_both_caches( - salt_ssh_cli, cachedir, _cache_twice + salt_ssh_cli_parameterized, cachedir, _cache_twice ): - res = salt_ssh_cli.run("slsutil.renderer", _cache_twice, default_renderer="jinja") + res = salt_ssh_cli_parameterized.run( + "slsutil.renderer", _cache_twice, default_renderer="jinja" + ) assert res.returncode == 0 tgt = res.data.strip() assert tgt tgt = Path(tgt) - for file in (tgt, _convert(salt_ssh_cli, cachedir, tgt, master=True)): + for file in (tgt, _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True)): assert tgt.exists() # If one of the files was removed, it should be re-fetched assert "wasmodifiedhahaha" not in tgt.read_text(encoding="utf-8") -def test_cache_file_nonexistent_source(salt_ssh_cli): - res = salt_ssh_cli.run("cp.get_template", "salt://grail/nonexistent_scene", "") +def test_cache_file_nonexistent_source(salt_ssh_cli_parameterized): + res = salt_ssh_cli_parameterized.run( + "cp.get_template", "salt://grail/nonexistent_scene", "" + ) assert res.returncode == 0 # not a fan of this assert res.data == "" # The regular module only logs "unable to fetch" with get_url @@ -639,8 +675,8 @@ def test_cache_file_nonexistent_source(salt_ssh_cli): "salt://grail/scene33,salt://grail/36/scene", ), ) -def test_cache_files(salt_ssh_cli, files): - res = salt_ssh_cli.run("cp.cache_files", files) +def test_cache_files(salt_ssh_cli_parameterized, files): + res = salt_ssh_cli_parameterized.run("cp.cache_files", files) assert res.returncode == 0 assert res.data assert isinstance(res.data, list) @@ -653,13 +689,13 @@ def test_cache_files(salt_ssh_cli, files): assert "bacon" not in data -def test_cache_dir(salt_ssh_cli, cachedir): - res = salt_ssh_cli.run("cp.cache_dir", "salt://grail") +def test_cache_dir(salt_ssh_cli_parameterized, cachedir): + res = salt_ssh_cli_parameterized.run("cp.cache_dir", "salt://grail") assert res.returncode == 0 assert res.data assert isinstance(res.data, list) tgt = cachedir / "files" / "base" / "grail" - master_path = _convert(salt_ssh_cli, cachedir, tgt, master=True) + master_path = _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True) for path in (tgt, master_path): assert path.exists() assert "36" in os.listdir(path) @@ -673,14 +709,16 @@ def test_cache_dir(salt_ssh_cli, cachedir): assert files == filelist -def test_cache_dir_nonexistent_source(salt_ssh_cli, caplog): - res = salt_ssh_cli.run("cp.cache_dir", "salt://grail/non/ex/is/tent", "") +def test_cache_dir_nonexistent_source(salt_ssh_cli_parameterized, caplog): + res = salt_ssh_cli_parameterized.run( + "cp.cache_dir", "salt://grail/non/ex/is/tent", "" + ) assert res.returncode == 0 # not a fan of this assert isinstance(res.data, list) assert not res.data -def test_list_states(salt_master, salt_ssh_cli, tmp_path): +def test_list_states(salt_master, salt_ssh_cli_parameterized, tmp_path): top_sls = """ base: '*': @@ -696,7 +734,7 @@ def test_list_states(salt_master, salt_ssh_cli, tmp_path): with salt_master.state_tree.base.temp_file( "top.sls", top_sls ), salt_master.state_tree.base.temp_file("core.sls", core_state): - res = salt_ssh_cli.run( + res = salt_ssh_cli_parameterized.run( "cp.list_states", ) assert res.returncode == 0 @@ -707,8 +745,8 @@ def test_list_states(salt_master, salt_ssh_cli, tmp_path): assert "cheese" not in res.data -def test_list_master(salt_ssh_cli): - res = salt_ssh_cli.run("cp.list_master") +def test_list_master(salt_ssh_cli_parameterized): + res = salt_ssh_cli_parameterized.run("cp.list_master") assert res.returncode == 0 assert res.data assert isinstance(res.data, list) @@ -724,8 +762,8 @@ def test_list_master(salt_ssh_cli): assert "test_deep/a" not in res.data -def test_list_master_dirs(salt_ssh_cli): - res = salt_ssh_cli.run("cp.list_master_dirs") +def test_list_master_dirs(salt_ssh_cli_parameterized): + res = salt_ssh_cli_parameterized.run("cp.list_master_dirs") assert res.returncode == 0 assert res.data assert isinstance(res.data, list) @@ -740,31 +778,38 @@ def test_list_master_dirs(salt_ssh_cli): assert path not in res.data -def test_list_master_symlinks(salt_ssh_cli, salt_master): - if salt_ssh_cli.config.get("fileserver_ignoresymlinks", False): +def test_list_master_symlinks(salt_ssh_cli_parameterized, salt_master): + if salt_ssh_cli_parameterized.config.get("fileserver_ignoresymlinks", False): pytest.skip("Fileserver is configured to ignore symlinks") with salt_master.state_tree.base.temp_file(random_string("foo-"), "") as tgt: - sym = tgt.parent / "test_list_master_symlinks" - sym.symlink_to(tgt) - res = salt_ssh_cli.run("cp.list_master_symlinks") - assert res.returncode == 0 - assert res.data - assert isinstance(res.data, dict) - assert res.data - assert sym.name in res.data - assert res.data[sym.name] == str(tgt) + sym = tgt.parent / random_string("test_list_master_symlinks-") + try: + sym.symlink_to(tgt) + res = salt_ssh_cli_parameterized.run("cp.list_master_symlinks") + assert res.returncode == 0 + assert res.data + assert isinstance(res.data, dict) + assert res.data + assert sym.name in res.data + assert res.data[sym.name] == str(tgt) + finally: + # Clean up the symlink to avoid conflicts with parametrized tests + if sym.exists() or sym.is_symlink(): + sym.unlink() @pytest.fixture(params=(False, "cached", "render_cached")) -def _is_cached(salt_ssh_cli, suffix, request, cachedir): +def _is_cached(salt_ssh_cli_parameterized, suffix, request, cachedir): remove = ["files", "extrn_files"] if request.param == "cached": - ret = salt_ssh_cli.run("cp.cache_file", "salt://grail/scene33" + suffix) + ret = salt_ssh_cli_parameterized.run( + "cp.cache_file", "salt://grail/scene33" + suffix + ) assert ret.returncode == 0 assert ret.data remove.remove("files") elif request.param == "render_cached": - ret = salt_ssh_cli.run( + ret = salt_ssh_cli_parameterized.run( "cp.get_template", "salt://grail/scene33" + suffix, "", spam="bacon" ) assert ret.returncode == 0 @@ -773,13 +818,13 @@ def _is_cached(salt_ssh_cli, suffix, request, cachedir): for basedir in remove: tgt = cachedir / basedir / "base" / "grail" / "scene33" tgt.unlink(missing_ok=True) - master_tgt = _convert(salt_ssh_cli, cachedir, tgt, master=True) + master_tgt = _convert(salt_ssh_cli_parameterized, cachedir, tgt, master=True) master_tgt.unlink(missing_ok=True) return request.param @pytest.mark.parametrize("suffix", ("", "?saltenv=base")) -def test_is_cached(salt_ssh_cli, cachedir, _is_cached, suffix): +def test_is_cached(salt_ssh_cli_parameterized, cachedir, _is_cached, suffix): """ is_cached should find both cached files from the fileserver as well as cached rendered templates @@ -788,42 +833,46 @@ def test_is_cached(salt_ssh_cli, cachedir, _is_cached, suffix): tgt = cachedir / "extrn_files" / "base" / "grail" / "scene33" else: tgt = cachedir / "files" / "base" / "grail" / "scene33" - res = salt_ssh_cli.run("cp.is_cached", "salt://grail/scene33" + suffix) + res = salt_ssh_cli_parameterized.run( + "cp.is_cached", "salt://grail/scene33" + suffix + ) assert res.returncode == 0 assert (res.data == str(tgt)) is bool(_is_cached) assert (res.data != "") is bool(_is_cached) -def test_is_cached_nonexistent(salt_ssh_cli): - res2 = salt_ssh_cli.run("cp.is_cached", "salt://fasldkgj/poicxzbn") +def test_is_cached_nonexistent(salt_ssh_cli_parameterized): + res2 = salt_ssh_cli_parameterized.run("cp.is_cached", "salt://fasldkgj/poicxzbn") assert res2.returncode == 0 assert res2.data == "" @pytest.mark.parametrize("suffix", ("", "?saltenv=base")) -def test_hash_file(salt_ssh_cli, cachedir, suffix): - res = salt_ssh_cli.run("cp.hash_file", "salt://grail/scene33" + suffix) +def test_hash_file(salt_ssh_cli_parameterized, cachedir, suffix): + res = salt_ssh_cli_parameterized.run( + "cp.hash_file", "salt://grail/scene33" + suffix + ) assert res.returncode == 0 assert res.data sha256_hash = res.data["hsum"] - res = salt_ssh_cli.run("cp.cache_file", "salt://grail/scene33") + res = salt_ssh_cli_parameterized.run("cp.cache_file", "salt://grail/scene33") assert res.returncode == 0 assert res.data - master_path = _convert(salt_ssh_cli, cachedir, res.data, master=True) + master_path = _convert(salt_ssh_cli_parameterized, cachedir, res.data, master=True) assert master_path.exists() data = master_path.read_bytes() digest = hashlib.sha256(data).hexdigest() assert digest == sha256_hash -def test_hash_file_local(salt_ssh_cli, caplog): +def test_hash_file_local(salt_ssh_cli_parameterized, caplog): """ Ensure that local files are run through ``salt-call`` on the target. We have to trust that this would otherwise fail because the tests run against localhost. """ path = Path(RUNTIME_VARS.FILES) / "file" / "base" / "cheese" - res = salt_ssh_cli.run("cp.hash_file", str(path)) + res = salt_ssh_cli_parameterized.run("cp.hash_file", str(path)) assert res.returncode == 0 # This would be logged if SSHCpClient was used instead of # performing a shimmed salt-call command @@ -863,10 +912,10 @@ def state_tree_jinjaimport(tmp_path, salt_master): def test_cp_cache_file_as_workaround_for_missing_map_file( - salt_ssh_cli, state_tree_jinjaimport, tmp_path + salt_ssh_cli_parameterized, state_tree_jinjaimport, tmp_path ): tgt = tmp_path / "config.conf" - ret = salt_ssh_cli.run("state.sls", state_tree_jinjaimport) + ret = salt_ssh_cli_parameterized.run("state.sls", state_tree_jinjaimport) assert ret.returncode == 0 assert isinstance(ret.data, dict) assert ret.data diff --git a/tests/pytests/integration/ssh/test_grains.py b/tests/pytests/integration/ssh/test_grains.py index 667698531ca0..7b8e856a3e47 100644 --- a/tests/pytests/integration/ssh/test_grains.py +++ b/tests/pytests/integration/ssh/test_grains.py @@ -13,9 +13,9 @@ ] -@pytest.fixture(scope="module") -def grains_filter_by_lookup(salt_ssh_cli): - ret = salt_ssh_cli.run("grains.get", "os") +@pytest.fixture(scope="function") +def grains_filter_by_lookup(salt_ssh_cli_parameterized): + ret = salt_ssh_cli_parameterized.run("grains.get", "os") assert ret.returncode == 0 assert ret.data os = ret.data @@ -38,6 +38,9 @@ def grains_filter_by_lookup(salt_ssh_cli): @pytest.fixture(scope="module") def grains_filter_by_default(): + """ + Note: Module scope is fine here since this fixture has no dependencies. + """ return { "common": { "has_common": True, @@ -51,7 +54,7 @@ def grains_filter_by_default(): } -@pytest.fixture(scope="module") +@pytest.fixture(scope="function") def grains_filter_by_states( salt_master, salt_ssh_cli, grains_filter_by_lookup, grains_filter_by_default ): @@ -142,11 +145,11 @@ def test_grains_filter_by_jinja(salt_ssh_cli): assert "defaulted" not in rendered -def test_grains_filter_by_default(salt_ssh_cli, grains_filter_by_default): +def test_grains_filter_by_default(grains_filter_by_default, salt_ssh_cli_parameterized): """ - test grains.filter_by with salt-ssh and default parameter + test grains.filter_by with salt-ssh and default parameter (parameterized for both thin and relenv) """ - ret = salt_ssh_cli.run( + ret = salt_ssh_cli_parameterized.run( "grains.filter_by", grains_filter_by_default, grain="os", @@ -162,11 +165,13 @@ def test_grains_filter_by_default(salt_ssh_cli, grains_filter_by_default): @pytest.mark.usefixtures("grains_filter_by_states") -def test_grains_filter_by_default_jinja(salt_ssh_cli, grains_filter_by_default): +def test_grains_filter_by_default_jinja( + grains_filter_by_default, salt_ssh_cli_parameterized +): """ - test grains.filter_by during template rendering with salt-ssh and default parameter + test grains.filter_by during template rendering with salt-ssh and default parameter (parameterized for both thin and relenv) """ - ret = salt_ssh_cli.run("state.show_sls", "grains_filter_by_default") + ret = salt_ssh_cli_parameterized.run("state.show_sls", "grains_filter_by_default") assert ret.returncode == 0 assert ret.data rendered = ret.data["grains-filter-by"]["file"][1]["context"] diff --git a/tests/pytests/unit/client/ssh/test_single.py b/tests/pytests/unit/client/ssh/test_single.py index 5e5357bf222e..0246e2039927 100644 --- a/tests/pytests/unit/client/ssh/test_single.py +++ b/tests/pytests/unit/client/ssh/test_single.py @@ -634,7 +634,9 @@ def test_cmd_block_python_version_error(opts, target): return_value=(("", "ERROR: Unable to locate appropriate python command\n", 10)) ) patch_shim = patch("salt.client.ssh.Single.shim_cmd", mock_shim) - with patch_shim: + patch_mod_data = patch("salt.client.ssh.mod_data", return_value={}) + patch_deploy_ext = patch("salt.client.ssh.Single.deploy_ext") + with patch_shim, patch_mod_data, patch_deploy_ext: ret = single.cmd_block() assert "ERROR: Python version error. Recommendation(s) follow:" in ret[0] diff --git a/tests/pytests/unit/states/test_grains.py b/tests/pytests/unit/states/test_grains.py index a2df4028244d..0a1a4490e9ef 100644 --- a/tests/pytests/unit/states/test_grains.py +++ b/tests/pytests/unit/states/test_grains.py @@ -705,7 +705,7 @@ def test_list_present_nested_already(): with set_grains({"a": "aval", "b": {"foo": ["bar"]}}): ret = grains.list_present(name="b:foo", value="bar") assert ret["result"] is True - assert ret["comment"] == "Value bar is already in grain b:foo" + assert ret["comment"] == "Value bar is already in grain b:foo (or pending)" assert ret["changes"] == {} assert grains.__grains__ == {"a": "aval", "b": {"foo": ["bar"]}} assert_grain_file_content("a: aval\nb:\n foo:\n - bar\n") @@ -715,7 +715,7 @@ def test_list_present_already(): with set_grains({"a": "aval", "foo": ["bar"]}): ret = grains.list_present(name="foo", value="bar") assert ret["result"] is True - assert ret["comment"] == "Value bar is already in grain foo" + assert ret["comment"] == "Value bar is already in grain foo (or pending)" assert ret["changes"] == {} assert grains.__grains__ == {"a": "aval", "foo": ["bar"]} assert_grain_file_content("a: aval\nfoo:\n- bar\n")