diff --git a/.github/workflows/samples-integration-test.yml b/.github/workflows/samples-integration-test.yml index a61fb5d4..183307a8 100644 --- a/.github/workflows/samples-integration-test.yml +++ b/.github/workflows/samples-integration-test.yml @@ -171,7 +171,7 @@ jobs: # Note: /p:TreatWarningsAsErrors=false avoids failing on SDK doc warnings dotnet pack sdk/cs/src/Microsoft.AI.Foundry.Local.csproj ` -o local-packages ` - /p:Version=1.0.0-rc1 ` + /p:Version=1.0.0-rc2 ` /p:IsPacking=true ` /p:TreatWarningsAsErrors=false ` --configuration Release @@ -180,7 +180,7 @@ jobs: if ($IsWindows) { dotnet pack sdk/cs/src/Microsoft.AI.Foundry.Local.csproj ` -o local-packages ` - /p:Version=1.0.0-rc1 ` + /p:Version=1.0.0-rc2 ` /p:UseWinML=true ` /p:IsPacking=true ` /p:TreatWarningsAsErrors=false ` diff --git a/.pipelines/templates/build-core-steps.yml b/.pipelines/templates/build-core-steps.yml index 9f024c42..3803ccf0 100644 --- a/.pipelines/templates/build-core-steps.yml +++ b/.pipelines/templates/build-core-steps.yml @@ -48,7 +48,6 @@ steps: - diff --git a/.pipelines/templates/build-python-steps.yml b/.pipelines/templates/build-python-steps.yml index 8ab4d8d1..f21d9508 100644 --- a/.pipelines/templates/build-python-steps.yml +++ b/.pipelines/templates/build-python-steps.yml @@ -110,8 +110,12 @@ steps: Write-Warning "No FLC wheel found matching $filter in ${{ parameters.flcWheelsDir }}" } -- script: pip install onnxruntime-core==1.24.3 onnxruntime-genai-core==0.12.1 - displayName: 'Install ORT native packages' +- ${{ if eq(parameters.isWinML, true) }}: + - script: pip install onnxruntime-core==1.23.2.3 onnxruntime-genai-core==0.12.1 + displayName: 'Install ORT native packages (WinML)' +- ${{ else }}: + - script: pip install onnxruntime-core==1.24.3 onnxruntime-genai-core==0.12.1 + displayName: 'Install ORT native packages' - script: pip install "pydantic>=2.0.0" "requests>=2.32.4" "openai>=2.24.0" displayName: 'Install pure python dependencies' diff --git a/.pipelines/templates/package-core-steps.yml b/.pipelines/templates/package-core-steps.yml index 960b6cd3..d389b7a5 100644 --- a/.pipelines/templates/package-core-steps.yml +++ b/.pipelines/templates/package-core-steps.yml @@ -109,9 +109,9 @@ steps: $nuspec = "$nsRoot/src/FoundryLocalCore/Core/WinMLNuget.nuspec" $id = "Microsoft.AI.Foundry.Local.Core.WinML" $ortVer = $pg.OnnxRuntimeFoundryVersionForWinML - $genaiVer = $pg.OnnxRuntimeGenAIWinML + $genaiVer = $pg.OnnxRuntimeGenAIFoundryVersion $winAppSdkVer = $pg.WinAppSdkVersion - $props = "id=$id;version=$(flcVersion);commitId=$(Build.SourceVersion);OnnxRuntimeFoundryVersion=$ortVer;OnnxRuntimeGenAIWinML=$genaiVer;WinAppSdkVersion=$winAppSdkVer" + $props = "id=$id;version=$(flcVersion);commitId=$(Build.SourceVersion);OnnxRuntimeFoundryVersionForWinML=$ortVer;OnnxRuntimeGenAIFoundryVersion=$genaiVer;WinAppSdkVersion=$winAppSdkVer" } else { $nuspec = "$nsRoot/src/FoundryLocalCore/Core/NativeNuget.nuspec" $id = "Microsoft.AI.Foundry.Local.Core" diff --git a/.pipelines/templates/test-cs-steps.yml b/.pipelines/templates/test-cs-steps.yml index f7dc1aff..92c9b6ee 100644 --- a/.pipelines/templates/test-cs-steps.yml +++ b/.pipelines/templates/test-cs-steps.yml @@ -68,6 +68,7 @@ steps: + diff --git a/.pipelines/templates/test-python-steps.yml b/.pipelines/templates/test-python-steps.yml index 1da74ee2..6fc86b3b 100644 --- a/.pipelines/templates/test-python-steps.yml +++ b/.pipelines/templates/test-python-steps.yml @@ -98,8 +98,12 @@ steps: Write-Warning "No FLC wheel found matching $filter" } -- script: pip install onnxruntime-core==1.24.3 onnxruntime-genai-core==0.12.1 - displayName: 'Install ORT native packages' +- ${{ if eq(parameters.isWinML, true) }}: + - script: pip install onnxruntime-core==1.23.2.3 onnxruntime-genai-core==0.12.1 + displayName: 'Install ORT native packages (WinML)' +- ${{ else }}: + - script: pip install onnxruntime-core==1.24.3 onnxruntime-genai-core==0.12.1 + displayName: 'Install ORT native packages' - script: pip install "pydantic>=2.0.0" "requests>=2.32.4" "openai>=2.24.0" displayName: 'Install pure python dependencies' diff --git a/samples/cs/Directory.Packages.props b/samples/cs/Directory.Packages.props index 21384b45..e3269f0f 100644 --- a/samples/cs/Directory.Packages.props +++ b/samples/cs/Directory.Packages.props @@ -5,8 +5,8 @@ 1.23.2 - - + + diff --git a/sdk/cs/src/Detail/CoreInterop.cs b/sdk/cs/src/Detail/CoreInterop.cs index d7867cad..2f0a0512 100644 --- a/sdk/cs/src/Detail/CoreInterop.cs +++ b/sdk/cs/src/Detail/CoreInterop.cs @@ -178,7 +178,7 @@ public CallbackHelper(CallbackFn callback) } } - private static void HandleCallback(nint data, int length, nint callbackHelper) + private static int HandleCallback(nint data, int length, nint callbackHelper) { var callbackData = string.Empty; CallbackHelper? helper = null; @@ -196,14 +196,24 @@ private static void HandleCallback(nint data, int length, nint callbackHelper) helper = (CallbackHelper)GCHandle.FromIntPtr(callbackHelper).Target!; helper.Callback.Invoke(callbackData); + return 0; // continue } - catch (Exception ex) when (ex is not OperationCanceledException) + catch (OperationCanceledException ex) + { + if (helper != null && helper.Exception == null) + { + helper.Exception = ex; + } + return 1; // cancel + } + catch (Exception ex) { FoundryLocalManager.Instance.Logger.LogError(ex, $"Error in callback. Callback data: {callbackData}"); if (helper != null && helper.Exception == null) { helper.Exception = ex; } + return 1; // cancel on error } } diff --git a/sdk/cs/src/Detail/ICoreInterop.cs b/sdk/cs/src/Detail/ICoreInterop.cs index 1fff9dde..380eff1c 100644 --- a/sdk/cs/src/Detail/ICoreInterop.cs +++ b/sdk/cs/src/Detail/ICoreInterop.cs @@ -40,8 +40,9 @@ protected unsafe struct ResponseBuffer } // native callback function signature + // Return: 0 = continue, 1 = cancel [UnmanagedFunctionPointer(CallingConvention.Cdecl)] - protected unsafe delegate void NativeCallbackFn(nint data, int length, nint userData); + protected unsafe delegate int NativeCallbackFn(nint data, int length, nint userData); Response ExecuteCommand(string commandName, CoreInteropRequest? commandInput = null); Response ExecuteCommandWithCallback(string commandName, CoreInteropRequest? commandInput, CallbackFn callback); diff --git a/sdk/cs/src/Microsoft.AI.Foundry.Local.csproj b/sdk/cs/src/Microsoft.AI.Foundry.Local.csproj index 95398017..f2f5c705 100644 --- a/sdk/cs/src/Microsoft.AI.Foundry.Local.csproj +++ b/sdk/cs/src/Microsoft.AI.Foundry.Local.csproj @@ -29,7 +29,7 @@ true false @@ -100,8 +100,8 @@ $(FoundryLocalCoreVersion) - 1.0.0-rc1 - 1.0.0-rc1 + 1.0.0-rc2 + 1.0.0-rc2 True diff --git a/sdk/js/script/install-standard.cjs b/sdk/js/script/install-standard.cjs index f56df943..26ed54f3 100644 --- a/sdk/js/script/install-standard.cjs +++ b/sdk/js/script/install-standard.cjs @@ -8,12 +8,10 @@ const os = require('os'); const { NUGET_FEED, ORT_NIGHTLY_FEED, runInstall } = require('./install-utils.cjs'); -const useNightly = process.env.npm_config_nightly === 'true'; - const ARTIFACTS = [ - { name: 'Microsoft.AI.Foundry.Local.Core', version: '1.0.0-rc1', feed: ORT_NIGHTLY_FEED, nightly: useNightly }, - { name: os.platform() === 'linux' ? 'Microsoft.ML.OnnxRuntime.Gpu.Linux' : 'Microsoft.ML.OnnxRuntime.Foundry', version: '1.24.3', feed: NUGET_FEED, nightly: false }, - { name: 'Microsoft.ML.OnnxRuntimeGenAI.Foundry', version: '0.12.2', feed: NUGET_FEED, nightly: false }, + { name: 'Microsoft.AI.Foundry.Local.Core', version: '1.0.0-rc2', feed: ORT_NIGHTLY_FEED }, + { name: os.platform() === 'linux' ? 'Microsoft.ML.OnnxRuntime.Gpu.Linux' : 'Microsoft.ML.OnnxRuntime.Foundry', version: '1.24.4', feed: NUGET_FEED }, + { name: 'Microsoft.ML.OnnxRuntimeGenAI.Foundry', version: '0.13.0', feed: NUGET_FEED }, ]; (async () => { diff --git a/sdk/js/script/install-utils.cjs b/sdk/js/script/install-utils.cjs index f9a5186c..cc61f0db 100644 --- a/sdk/js/script/install-utils.cjs +++ b/sdk/js/script/install-utils.cjs @@ -104,24 +104,9 @@ async function getBaseAddress(feedUrl) { return baseAddress.endsWith('/') ? baseAddress : baseAddress + '/'; } -async function resolveLatestVersion(feedUrl, packageName) { - const baseAddress = await getBaseAddress(feedUrl); - const versionsUrl = `${baseAddress}${packageName.toLowerCase()}/index.json`; - const versionData = await downloadJson(versionsUrl); - const versions = versionData.versions || []; - if (versions.length === 0) throw new Error(`No versions found for ${packageName}`); - versions.sort((a, b) => b.localeCompare(a)); - console.log(`[foundry-local] Latest version of ${packageName}: ${versions[0]}`); - return versions[0]; -} - async function installPackage(artifact, tempDir) { const pkgName = artifact.name; - let pkgVer = artifact.version; - if (artifact.nightly) { - console.log(` Resolving latest version for ${pkgName}...`); - pkgVer = await resolveLatestVersion(artifact.feed, pkgName); - } + const pkgVer = artifact.version; const baseAddress = await getBaseAddress(artifact.feed); const nameLower = pkgName.toLowerCase(); @@ -167,13 +152,8 @@ async function runInstall(artifacts) { } if (fs.existsSync(BIN_DIR) && REQUIRED_FILES.every(f => fs.existsSync(path.join(BIN_DIR, f)))) { - if (process.env.npm_config_nightly === 'true') { - console.log(`[foundry-local] Nightly requested. Forcing reinstall...`); - fs.rmSync(BIN_DIR, { recursive: true, force: true }); - } else { - console.log(`[foundry-local] Native libraries already installed.`); - return; - } + console.log(`[foundry-local] Native libraries already installed.`); + return; } console.log(`[foundry-local] Installing native libraries for ${RID}...`); diff --git a/sdk/js/script/install-winml.cjs b/sdk/js/script/install-winml.cjs index aa5e3d22..a7262fb8 100644 --- a/sdk/js/script/install-winml.cjs +++ b/sdk/js/script/install-winml.cjs @@ -7,12 +7,10 @@ const { NUGET_FEED, ORT_NIGHTLY_FEED, runInstall } = require('./install-utils.cjs'); -const useNightly = process.env.npm_config_nightly === 'true'; - const ARTIFACTS = [ - { name: 'Microsoft.AI.Foundry.Local.Core.WinML', version: '1.0.0-rc1', feed: ORT_NIGHTLY_FEED, nightly: useNightly }, - { name: 'Microsoft.ML.OnnxRuntime.Foundry', version: '1.23.2.3', feed: NUGET_FEED, nightly: false }, - { name: 'Microsoft.ML.OnnxRuntimeGenAI.WinML', version: '0.12.2', feed: NUGET_FEED, nightly: false }, + { name: 'Microsoft.AI.Foundry.Local.Core.WinML', version: '1.0.0-rc2', feed: ORT_NIGHTLY_FEED }, + { name: 'Microsoft.ML.OnnxRuntime.Foundry', version: '1.23.2.3', feed: NUGET_FEED }, + { name: 'Microsoft.ML.OnnxRuntimeGenAI.Foundry', version: '0.13.0', feed: NUGET_FEED }, ]; (async () => { diff --git a/sdk/js/src/detail/coreInterop.ts b/sdk/js/src/detail/coreInterop.ts index b4cbf36c..cfca254d 100644 --- a/sdk/js/src/detail/coreInterop.ts +++ b/sdk/js/src/detail/coreInterop.ts @@ -19,7 +19,7 @@ koffi.struct('ResponseBuffer', { ErrorLength: 'int32_t', }); -const CallbackType = koffi.proto('void CallbackType(void *data, int32_t length, void *userData)'); +const CallbackType = koffi.proto('int32_t CallbackType(void *data, int32_t length, void *userData)'); const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); @@ -139,8 +139,13 @@ export class CoreInterop { koffi.encode(dataBuf, 'char', dataStr, dataBytes.length + 1); const cb = koffi.register((data: any, length: number, userData: any) => { - const chunk = koffi.decode(data, 'char', length); - callback(chunk); + try { + const chunk = koffi.decode(data, 'char', length); + callback(chunk); + return 0; // continue + } catch { + return 1; // cancel on error + } }, koffi.pointer(CallbackType)); return new Promise((resolve, reject) => { diff --git a/sdk/python/requirements-winml.txt b/sdk/python/requirements-winml.txt index eb9d4aa1..b1cde215 100644 --- a/sdk/python/requirements-winml.txt +++ b/sdk/python/requirements-winml.txt @@ -2,6 +2,6 @@ pydantic>=2.0.0 requests>=2.32.4 openai>=2.24.0 # WinML native binary packages from the ORT-Nightly PyPI feed. -foundry-local-core-winml==1.0.0-rc1 -onnxruntime-core==1.24.3 -onnxruntime-genai-core==0.12.1 \ No newline at end of file +foundry-local-core-winml==1.0.0-rc2 +onnxruntime-core==1.23.2.3 +onnxruntime-genai-core==0.13.0 \ No newline at end of file diff --git a/sdk/python/requirements.txt b/sdk/python/requirements.txt index 1c0f62ac..acebf81d 100644 --- a/sdk/python/requirements.txt +++ b/sdk/python/requirements.txt @@ -2,6 +2,8 @@ pydantic>=2.0.0 requests>=2.32.4 openai>=2.24.0 # Standard native binary packages from the ORT-Nightly PyPI feed. -foundry-local-core==1.0.0-rc1 -onnxruntime-core==1.24.3 -onnxruntime-genai-core==0.12.1 \ No newline at end of file +foundry-local-core==1.0.0-rc2 +onnxruntime-core==1.24.4; sys_platform != "linux" +onnxruntime-gpu==1.24.4; sys_platform == "linux" +onnxruntime-genai-core==0.13.0; sys_platform != "linux" +onnxruntime-genai-cuda==0.13.0; sys_platform == "linux" \ No newline at end of file diff --git a/sdk/python/src/detail/core_interop.py b/sdk/python/src/detail/core_interop.py index 4f4ddb67..1cd53e33 100644 --- a/sdk/python/src/detail/core_interop.py +++ b/sdk/python/src/detail/core_interop.py @@ -79,9 +79,11 @@ def callback(data_ptr, length, self_ptr): data_bytes = ctypes.string_at(data_ptr, length) data_str = data_bytes.decode('utf-8') self._py_callback(data_str) + return 0 # continue except Exception as e: if self is not None and self.exception is None: self.exception = e # keep the first only as they are likely all the same + return 1 # cancel on error def __init__(self, py_callback: Callable[[str], None]): self._py_callback = py_callback @@ -103,8 +105,8 @@ class CoreInterop: instance = None # Callback function for native interop. - # This returns a string and its length, and an optional user provided object. - CALLBACK_TYPE = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p) + # Returns c_int: 0 = continue, 1 = cancel. + CALLBACK_TYPE = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p) @staticmethod def _initialize_native_libraries() -> 'NativeBinaryPaths': @@ -129,8 +131,9 @@ def _initialize_native_libraries() -> 'NativeBinaryPaths': logger.info("Native libraries found — Core: %s ORT: %s GenAI: %s", paths.core, paths.ort, paths.genai) - # Create the onnxruntime.dll symlink on Linux/macOS if needed. - # create_ort_symlinks(paths) + # Create compatibility symlinks on Linux/macOS so Core can resolve + # ORT/GenAI names regardless of package layout. + create_ort_symlinks(paths) os.environ["ORT_LIB_PATH"] = str(paths.ort) # For ORT-GENAI to find ORT dependency if sys.platform.startswith("win"): diff --git a/sdk/python/src/detail/model_data_types.py b/sdk/python/src/detail/model_data_types.py index 46525dc7..e000c9c8 100644 --- a/sdk/python/src/detail/model_data_types.py +++ b/sdk/python/src/detail/model_data_types.py @@ -4,7 +4,7 @@ # -------------------------------------------------------------------------- from typing import Optional, List -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from enum import StrEnum @@ -53,6 +53,8 @@ class ModelInfo(BaseModel): Fields are populated from the JSON response of the ``get_model_list`` command. """ + model_config = ConfigDict(protected_namespaces=()) + id: str = Field(alias="id", description="Unique identifier of the model. Generally :") name: str = Field(alias="name", description="Model variant name") version: int = Field(alias="version") diff --git a/sdk/python/src/detail/utils.py b/sdk/python/src/detail/utils.py index 5a054610..5780cfc9 100644 --- a/sdk/python/src/detail/utils.py +++ b/sdk/python/src/detail/utils.py @@ -12,7 +12,6 @@ import argparse import importlib.util -import json import logging import os import sys @@ -90,9 +89,9 @@ def _find_file_in_package(package_name: str, filename: str) -> Path | None: # Quick checks for well-known sub-directories first for candidate_dir in (pkg_root, pkg_root / "capi", pkg_root / "native", pkg_root / "lib", pkg_root / "bin"): - candidate = candidate_dir / filename - if candidate.exists(): - return candidate + candidates = list(candidate_dir.glob(f"*{filename}*")) + if candidates: + return candidates[0] # Recursive fallback for match in pkg_root.rglob(filename): @@ -144,8 +143,18 @@ def get_native_binary_paths() -> NativeBinaryPaths | None: # Probe WinML packages first; fall back to standard if not installed. core_path = _find_file_in_package("foundry-local-core-winml", core_name) or _find_file_in_package("foundry-local-core", core_name) - ort_path = _find_file_in_package("onnxruntime-core", ort_name) - genai_path = _find_file_in_package("onnxruntime-genai-core", genai_name) + + # On Linux, ORT is shipped by onnxruntime-gpu (libonnxruntime.so in capi/). + if sys.platform.startswith("linux"): + ort_path = _find_file_in_package("onnxruntime", ort_name) or _find_file_in_package("onnxruntime-core", ort_name) + else: + ort_path = _find_file_in_package("onnxruntime-core", ort_name) + + # On Linux, ORTGenAI is shipped by onnxruntime-genai-cuda (libonnxruntime-genai.so in the package root). + if sys.platform.startswith("linux"): + genai_path = _find_file_in_package("onnxruntime-genai", genai_name) or _find_file_in_package("onnxruntime-genai-core", genai_name) + else: + genai_path = _find_file_in_package("onnxruntime-genai-core", genai_name) if core_path and ort_path and genai_path: return NativeBinaryPaths(core=core_path, ort=ort_path, genai=genai_path) @@ -254,6 +263,9 @@ def foundry_local_install(args: list[str] | None = None) -> None: if parsed.winml: variant = "WinML" packages = ["foundry-local-core-winml", "onnxruntime-core", "onnxruntime-genai-core"] + elif sys.platform.startswith("linux"): + variant = "Linux (GPU)" + packages = ["foundry-local-core", "onnxruntime-gpu", "onnxruntime-genai-cuda"] else: variant = "standard" packages = ["foundry-local-core", "onnxruntime-core", "onnxruntime-genai-core"] @@ -271,10 +283,18 @@ def foundry_local_install(args: list[str] | None = None) -> None: else: if _find_file_in_package("foundry-local-core", core_name) is None: missing.append("foundry-local-core") - if _find_file_in_package("onnxruntime-core", ort_name) is None: + if sys.platform.startswith("linux"): + if _find_file_in_package("onnxruntime", ort_name) is None: + missing.append("onnxruntime-gpu") + else: + if _find_file_in_package("onnxruntime-core", ort_name) is None: missing.append("onnxruntime-core") - if _find_file_in_package("onnxruntime-genai-core", genai_name) is None: - missing.append("onnxruntime-genai-core") + if sys.platform.startswith("linux"): + if _find_file_in_package("onnxruntime-genai", genai_name) is None: + missing.append("onnxruntime-genai-cuda") + else: + if _find_file_in_package("onnxruntime-genai-core", genai_name) is None: + missing.append("onnxruntime-genai-core") print( "[foundry-local] ERROR: Could not locate native binaries after installation. " f"Missing: {', '.join(missing)}", @@ -289,6 +309,3 @@ def foundry_local_install(args: list[str] | None = None) -> None: print(f" Core : {paths.core}") print(f" ORT : {paths.ort}") print(f" GenAI : {paths.genai}") - - - diff --git a/sdk/rust/build.rs b/sdk/rust/build.rs index 660985c8..15a9f906 100644 --- a/sdk/rust/build.rs +++ b/sdk/rust/build.rs @@ -7,9 +7,9 @@ const NUGET_FEED: &str = "https://api.nuget.org/v3/index.json"; const ORT_NIGHTLY_FEED: &str = "https://pkgs.dev.azure.com/aiinfra/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json"; -const CORE_VERSION: &str = "1.0.0-rc1"; -const ORT_VERSION: &str = "1.24.3"; -const GENAI_VERSION: &str = "0.13.0-dev-20260319-1131106-439ca0d5"; +const CORE_VERSION: &str = "1.0.0-rc2"; +const ORT_VERSION: &str = "1.24.4"; +const GENAI_VERSION: &str = "0.13.0"; const WINML_ORT_VERSION: &str = "1.23.2.3"; @@ -62,9 +62,9 @@ fn get_packages(rid: &str) -> Vec { feed_url: NUGET_FEED, }); packages.push(NuGetPackage { - name: "Microsoft.ML.OnnxRuntimeGenAI.WinML", + name: "Microsoft.ML.OnnxRuntimeGenAI.Foundry", version: GENAI_VERSION.to_string(), - feed_url: ORT_NIGHTLY_FEED, + feed_url: NUGET_FEED, }); } else { packages.push(NuGetPackage { @@ -90,7 +90,7 @@ fn get_packages(rid: &str) -> Vec { packages.push(NuGetPackage { name: "Microsoft.ML.OnnxRuntimeGenAI.Foundry", version: GENAI_VERSION.to_string(), - feed_url: ORT_NIGHTLY_FEED, + feed_url: NUGET_FEED, }); } diff --git a/sdk/rust/src/detail/core_interop.rs b/sdk/rust/src/detail/core_interop.rs index 75146164..43884d7f 100644 --- a/sdk/rust/src/detail/core_interop.rs +++ b/sdk/rust/src/detail/core_interop.rs @@ -52,7 +52,8 @@ impl ResponseBuffer { type ExecuteCommandFn = unsafe extern "C" fn(*const RequestBuffer, *mut ResponseBuffer); /// Signature for the streaming callback invoked by the native library. -type CallbackFn = unsafe extern "C" fn(*const u8, i32, *mut std::ffi::c_void); +/// Returns 0 to continue, 1 to cancel. +type CallbackFn = unsafe extern "C" fn(*const u8, i32, *mut std::ffi::c_void) -> i32; /// Signature for `execute_command_with_callback`. type ExecuteCommandWithCallbackFn = unsafe extern "C" fn( @@ -197,12 +198,12 @@ unsafe extern "C" fn streaming_trampoline( data: *const u8, length: i32, user_data: *mut std::ffi::c_void, -) { +) -> i32 { if data.is_null() || length <= 0 { - return; + return 0; } // catch_unwind prevents UB if the closure panics across the FFI boundary. - let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { // SAFETY: `user_data` points to a `StreamingCallbackState` kept alive // by the caller of `execute_command_with_callback` for the duration of // the native call. @@ -212,6 +213,11 @@ unsafe extern "C" fn streaming_trampoline( let slice = std::slice::from_raw_parts(data, length as usize); state.push(slice); })); + if result.is_err() { + 1 + } else { + 0 + } } // ── CoreInterop ──────────────────────────────────────────────────────────────