diff --git a/.github/workflows/build-cs-steps.yml b/.github/workflows/build-cs-steps.yml
new file mode 100644
index 0000000..a77b9d5
--- /dev/null
+++ b/.github/workflows/build-cs-steps.yml
@@ -0,0 +1,118 @@
+name: Build C# SDK
+
+on:
+ workflow_call:
+ inputs:
+ version:
+ required: true
+ type: string
+ useWinML:
+ required: false
+ type: boolean
+ default: false
+ buildConfiguration:
+ required: false
+ type: string
+ default: 'Debug' # or 'Release'
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+ runs-on: windows-latest
+ env:
+ buildConfiguration: 'Debug'
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ clean: true
+
+ # adapted from https://github.com/actions/setup-dotnet?tab=readme-ov-file#azure-artifacts
+ # saves auth credentials to ../nuget.config
+ - name: Setup .NET 9 SDK
+ uses: actions/setup-dotnet@v5
+ with:
+ dotnet-version: '9.0.x'
+ source-url: https://pkgs.dev.azure.com/microsoft/windows.ai.toolkit/_packaging/Neutron/nuget/v3/index.json
+ env:
+ NUGET_AUTH_TOKEN: ${{ secrets.AZURE_DEVOPS_PAT }}
+
+ - name: Restore dependencies
+ run: |
+ dotnet restore sdk_v2\cs\src\Microsoft.AI.Foundry.Local.csproj /p:UseWinML=${{ inputs.useWinML }} --configfile ../nuget.config
+
+ - name: Build solution
+ run: |
+ dotnet build sdk_v2\cs\src\Microsoft.AI.Foundry.Local.csproj --configfile ../nuget.config --no-restore --configuration ${{ inputs.buildConfiguration }} /p:UseWinML=${{ inputs.useWinML }}
+
+ # need to use direct git commands to clone from Azure DevOps instead of actions/checkout
+ - name: Checkout test-data-shared from Azure DevOps
+ shell: pwsh
+ working-directory: ${{ github.workspace }}\..
+ run: |
+ $pat = "${{ secrets.AZURE_DEVOPS_PAT }}"
+ $encodedPat = [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes(":$pat"))
+
+ # Configure git to use the PAT
+ git config --global http.https://dev.azure.com.extraheader "AUTHORIZATION: Basic $encodedPat"
+
+ # Clone with LFS to parent directory
+ git lfs install
+ git clone --depth 1 https://dev.azure.com/microsoft/windows.ai.toolkit/_git/test-data-shared test-data-shared
+
+ Write-Host "Clone completed successfully to ${{ github.workspace }}\..\test-data-shared"
+
+ - name: Checkout specific commit in test-data-shared
+ shell: pwsh
+ working-directory: ${{ github.workspace }}\..\test-data-shared
+ run: |
+ Write-Host "Current directory: $(Get-Location)"
+ git checkout 231f820fe285145b7ea4a449b112c1228ce66a41
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "Git checkout failed."
+ exit 1
+ }
+ Write-Host "`nDirectory contents:"
+ Get-ChildItem -Recurse -Depth 2 | ForEach-Object { Write-Host " $($_.FullName)" }
+
+ - name: Run Foundry Local Core tests
+ run: |
+ dotnet test sdk_v2\cs\test\FoundryLocal.Tests\Microsoft.AI.Foundry.Local.Tests.csproj --verbosity normal /p:UseWinML=${{ inputs.useWinML }}
+
+ - name: Pack NuGet package
+ shell: pwsh
+ run: |
+ $projectPath = "sdk_v2\cs\src\Microsoft.AI.Foundry.Local.csproj"
+ $outputDir = "sdk_v2\cs\bin"
+ $version = "${{ inputs.version }}"
+ $config = "${{ inputs.buildConfiguration }}"
+ $useWinML = "${{ inputs.useWinML }}"
+
+ Write-Host "Packing project: $projectPath"
+ Write-Host "Output directory: $outputDir"
+ Write-Host "Version: $version"
+ Write-Host "Configuration: $config"
+ Write-Host "UseWinML: $useWinML"
+
+ & dotnet pack $projectPath --no-build --configuration $config --output $outputDir /p:PackageVersion=$version /p:UseWinML=$useWinML /p:IncludeSymbols=true /p:SymbolPackageFormat=snupkg --verbosity normal
+
+ if ($LASTEXITCODE -ne 0) {
+ Write-Error "dotnet pack failed with exit code $LASTEXITCODE"
+ exit $LASTEXITCODE
+ }
+
+ Write-Host "Pack completed successfully"
+ Write-Host "Generated packages:"
+ Get-ChildItem -Path $outputDir -Filter "*.nupkg" | ForEach-Object { Write-Host " $($_.Name)" }
+ Get-ChildItem -Path $outputDir -Filter "*.snupkg" | ForEach-Object { Write-Host " $($_.Name)" }
+
+ - name: Upload NuGet packages
+ uses: actions/upload-artifact@v4
+ with:
+ name: cs-sdk
+ path: |
+ sdk_v2\cs\bin\*.nupkg
+ sdk_v2\cs\bin\*.snupkg
\ No newline at end of file
diff --git a/.github/workflows/foundry-local-sdk-build.yml b/.github/workflows/foundry-local-sdk-build.yml
new file mode 100644
index 0000000..e9e1539
--- /dev/null
+++ b/.github/workflows/foundry-local-sdk-build.yml
@@ -0,0 +1,24 @@
+name: SDK V2 Build
+
+on:
+ pull_request:
+ paths:
+ - 'sdk_v2/**'
+ - '.github/workflows/sdk_v2/**'
+ push:
+ paths:
+ - 'sdk_v2/**'
+ - '.github/workflows/sdk_v2/**'
+ branches:
+ - main
+ workflow_dispatch:
+
+permissions:
+ contents: read
+
+jobs:
+ build-cs:
+ uses: ./.github/workflows/build-cs-steps.yml
+ with:
+ version: '0.9.0.${{ github.run_number }}'
+ secrets: inherit
diff --git a/.github/workflows/sdk_v2/foundry-local-sdk-build.yml b/.github/workflows/sdk_v2/foundry-local-sdk-build.yml
deleted file mode 100644
index 4816dbf..0000000
--- a/.github/workflows/sdk_v2/foundry-local-sdk-build.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-name: SDK V2 Build
-
-on:
- pull_request:
- paths:
- # - 'sdk_v2/**'
- - '.github/workflows/sdk_v2/**'
- push:
- paths:
- # - 'sdk_v2/**'
- - '.github/workflows/sdk_v2/**'
- branches:
- - main
- workflow_dispatch: # allows manual triggering from GitHub UI
-
-jobs:
- build-cs:
- uses: ./.github/workflows/sdk_v2/templates/build-cs-steps.yml
\ No newline at end of file
diff --git a/.github/workflows/sdk_v2/templates/build-cs-steps.yml b/.github/workflows/sdk_v2/templates/build-cs-steps.yml
deleted file mode 100644
index f860271..0000000
--- a/.github/workflows/sdk_v2/templates/build-cs-steps.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-name: Build C# SDK
-
-on:
- workflow_call:
-
-jobs:
- build:
- runs-on: windows-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v4
- with:
- clean: true
\ No newline at end of file
diff --git a/sdk_v2/cs/.editorconfig b/sdk_v2/cs/.editorconfig
new file mode 100644
index 0000000..a14a741
--- /dev/null
+++ b/sdk_v2/cs/.editorconfig
@@ -0,0 +1,349 @@
+# EditorConfig is awesome: https://EditorConfig.org
+
+# top-most EditorConfig file
+root = true
+
+# Don't use tabs for indentation.
+[*]
+indent_style = space
+# (Please don't specify an indent_size here; that has too many unintended consequences.)
+
+# Documentation files
+[*.md]
+indent_size = 4
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+# Code files
+[*.{cs,csx,vb,vbx,h,cpp}]
+indent_size = 4
+insert_final_newline = true
+charset = utf-8-bom
+trim_trailing_whitespace = true
+
+# Adds guidelines for the EditorGuidelines VS extension. See https://github.com/pharring/EditorGuidelines.
+guidelines = 80, 120
+
+file_header_template = --------------------------------------------------------------------------------------------------------------------\n\n Copyright (c) Microsoft. All rights reserved.\n\n--------------------------------------------------------------------------------------------------------------------
+
+# XML project files
+[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}]
+indent_size = 2
+
+# XML config files
+[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}]
+indent_size = 2
+
+# YAML files
+[*.{yml,yaml}]
+indent_size = 2
+insert_final_newline = true
+trim_trailing_whitespace = true
+
+# JSON files
+[*.json]
+indent_size = 2
+
+# Shell script files
+[*.sh]
+end_of_line = lf
+indent_size = 2
+
+# Dotnet code style settings:
+[*.{cs,vb}]
+
+# Sort using and Import directives with System.* appearing first
+dotnet_sort_system_directives_first = true
+dotnet_separate_import_directive_groups=true
+# Avoid "this." and "Me." if not necessary
+dotnet_style_qualification_for_field = false:error
+dotnet_style_qualification_for_property = false:error
+dotnet_style_qualification_for_method = false:error
+dotnet_style_qualification_for_event = false:error
+
+# Use language keywords instead of framework type names for type references
+dotnet_style_predefined_type_for_locals_parameters_members = true:error
+dotnet_style_predefined_type_for_member_access = true:error
+
+# Suggest more modern language features when available
+dotnet_style_object_initializer = true:error
+dotnet_style_collection_initializer = true:error
+dotnet_style_coalesce_expression = true:error
+dotnet_style_null_propagation = true:error
+dotnet_style_explicit_tuple_names = true:error
+
+# Non-private static fields are PascalCase
+dotnet_naming_rule.non_private_static_fields_should_be_pascal_case.severity = error
+dotnet_naming_rule.non_private_static_fields_should_be_pascal_case.symbols = non_private_static_fields
+dotnet_naming_rule.non_private_static_fields_should_be_pascal_case.style = non_private_static_field_style
+
+dotnet_naming_symbols.non_private_static_fields.applicable_kinds = field
+dotnet_naming_symbols.non_private_static_fields.applicable_accessibilities = public, protected, internal, protected internal, private protected
+dotnet_naming_symbols.non_private_static_fields.required_modifiers = static
+
+dotnet_naming_style.non_private_static_field_style.capitalization = pascal_case
+
+# Non-private fields are PascalCase
+dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.severity = error
+dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.symbols = non_private_readonly_fields
+dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.style = non_private_readonly_field_style
+
+dotnet_naming_symbols.non_private_readonly_fields.applicable_kinds = field
+dotnet_naming_symbols.non_private_readonly_fields.applicable_accessibilities = public, protected, internal, protected internal, private protected
+
+dotnet_naming_style.non_private_readonly_field_style.capitalization = pascal_case
+
+# Constants are PascalCase
+dotnet_naming_rule.constants_should_be_pascal_case.severity = error
+dotnet_naming_rule.constants_should_be_pascal_case.symbols = constants
+dotnet_naming_rule.constants_should_be_pascal_case.style = constant_style
+
+dotnet_naming_symbols.constants.applicable_kinds = field
+dotnet_naming_symbols.constants.required_modifiers = const
+
+dotnet_naming_style.constant_style.capitalization = pascal_case
+
+# Static fields are camelCase and start with s_
+dotnet_naming_rule.static_fields_should_be_camel_case.severity = suggestion
+dotnet_naming_rule.static_fields_should_be_camel_case.symbols = static_fields
+dotnet_naming_rule.static_fields_should_be_camel_case.style = static_field_style
+
+dotnet_naming_symbols.static_fields.applicable_kinds = field
+dotnet_naming_symbols.static_fields.required_modifiers = static
+
+dotnet_naming_style.static_field_style.capitalization = camel_case
+# dotnet_naming_style.static_field_style.required_prefix = s_
+
+# Instance fields are camelCase and start with _
+dotnet_naming_rule.instance_fields_should_be_camel_case.severity = error
+dotnet_naming_rule.instance_fields_should_be_camel_case.symbols = instance_fields
+dotnet_naming_rule.instance_fields_should_be_camel_case.style = instance_field_style
+
+dotnet_naming_symbols.instance_fields.applicable_kinds = field
+
+dotnet_naming_style.instance_field_style.capitalization = camel_case
+dotnet_naming_style.instance_field_style.required_prefix = _
+
+# Locals and parameters are camelCase
+dotnet_naming_rule.locals_should_be_camel_case.severity = error
+dotnet_naming_rule.locals_should_be_camel_case.symbols = locals_and_parameters
+dotnet_naming_rule.locals_should_be_camel_case.style = camel_case_style
+
+dotnet_naming_symbols.locals_and_parameters.applicable_kinds = parameter, local
+
+dotnet_naming_style.camel_case_style.capitalization = camel_case
+
+# Local functions are PascalCase
+dotnet_naming_rule.local_functions_should_be_pascal_case.severity = error
+dotnet_naming_rule.local_functions_should_be_pascal_case.symbols = local_functions
+dotnet_naming_rule.local_functions_should_be_pascal_case.style = local_function_style
+
+dotnet_naming_symbols.local_functions.applicable_kinds = local_function
+
+dotnet_naming_style.local_function_style.capitalization = pascal_case
+
+# By default, name items with PascalCase
+dotnet_naming_rule.members_should_be_pascal_case.severity = error
+dotnet_naming_rule.members_should_be_pascal_case.symbols = all_members
+dotnet_naming_rule.members_should_be_pascal_case.style = pascal_case_style
+
+dotnet_naming_symbols.all_members.applicable_kinds = *
+
+dotnet_naming_style.pascal_case_style.capitalization = pascal_case
+
+# CSharp code style settings:
+# IDE0045: Convert to conditional expression
+dotnet_diagnostic.IDE0045.severity = suggestion
+
+[*.cs]
+# Indentation preferences
+csharp_indent_block_contents = true
+csharp_indent_braces = false
+csharp_indent_case_contents = true
+csharp_indent_case_contents_when_block = true
+csharp_indent_switch_labels = true
+csharp_indent_labels = flush_left
+
+# Prefer "var" nowhere
+csharp_style_var_for_built_in_types = true:error
+csharp_style_var_when_type_is_apparent = true:error
+csharp_style_var_elsewhere = false:error
+csharp_style_implicit_object_creation_when_type_is_apparent = true:error
+
+# Prefer method-like constructs to have a block body
+csharp_style_expression_bodied_methods = false:none
+csharp_style_expression_bodied_constructors = false:none
+csharp_style_expression_bodied_operators = false:none
+
+# Code-block preferences
+csharp_style_namespace_declarations = file_scoped:error
+
+# Unused value expressions
+csharp_style_unused_value_expression_statement_preference = discard_variable:warning
+
+# 'using' directive preferences
+csharp_using_directive_placement = inside_namespace:error
+
+# Prefer property-like constructs to have an expression-body
+csharp_style_expression_bodied_properties = true:none
+csharp_style_expression_bodied_indexers = true:none
+csharp_style_expression_bodied_accessors = true:none
+
+# Suggest more modern language features when available
+csharp_style_pattern_matching_over_is_with_cast_check = true:error
+csharp_style_pattern_matching_over_as_with_null_check = true:error
+csharp_style_inlined_variable_declaration = true:error
+csharp_style_throw_expression = true:error
+csharp_style_conditional_delegate_call = true:error
+
+# Newline settings
+csharp_new_line_before_open_brace = all
+csharp_new_line_before_else = true
+csharp_new_line_before_catch = true
+csharp_new_line_before_finally = true
+csharp_new_line_before_members_in_object_initializers = true
+csharp_new_line_before_members_in_anonymous_types = true
+csharp_new_line_between_query_expression_clauses = true
+
+# Spacing
+csharp_space_after_cast = false
+csharp_space_after_colon_in_inheritance_clause = true
+csharp_space_after_keywords_in_control_flow_statements = true
+csharp_space_around_binary_operators = before_and_after
+csharp_space_before_colon_in_inheritance_clause = true
+csharp_space_between_method_call_empty_parameter_list_parentheses = false
+csharp_space_between_method_call_name_and_opening_parenthesis = false
+csharp_space_between_method_call_parameter_list_parentheses = false
+csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
+csharp_space_between_method_declaration_parameter_list_parentheses = false
+csharp_space_between_parentheses = false
+
+# Blocks are allowed
+csharp_prefer_braces = true:silent
+csharp_preserve_single_line_blocks = true
+csharp_preserve_single_line_statements = true
+
+# Build severity configuration
+# Everything above essentially configures IDE behavior and will not reflect in the build.
+# https://docs.microsoft.com/dotnet/fundamentals/code-analysis/configuration-options
+
+# Default severity for all analyzer diagnostics
+dotnet_analyzer_diagnostic.severity = warning
+
+# SA1600: Elements should be documented
+dotnet_diagnostic.SA1600.severity = suggestion
+
+# CS1591: Missing XML comment for publicly visible type or member
+dotnet_diagnostic.CS1591.severity = silent
+
+# CA1303: Do not pass literals as localized parameters
+dotnet_diagnostic.CA1303.severity = silent
+
+# CA2007: Consider calling ConfigureAwait on the awaited task
+dotnet_diagnostic.CA2007.severity = silent
+
+# SA1402: File may only contain a single type
+dotnet_diagnostic.SA1402.severity = none
+
+# SA1101: Prefix local calls with this
+dotnet_diagnostic.SA1101.severity = none
+
+# SA1649: File name should match first type name
+dotnet_diagnostic.SA1649.severity = error
+
+# SA1309: Field names should not begin with underscore
+dotnet_diagnostic.SA1309.severity = none
+
+# CA1062: Validate arguments of public methods
+dotnet_diagnostic.CA1062.severity = silent
+
+# CA1707: Identifiers should not contain underscores
+dotnet_diagnostic.CA1707.severity = silent
+
+# CA1031: Do not catch general exception types
+dotnet_diagnostic.CA1031.severity = suggestion
+
+# CA1822: Mark members as static
+dotnet_diagnostic.CA1822.severity = suggestion
+
+# CA1815: Override equals and operator equals on value types
+dotnet_diagnostic.CA1815.severity = suggestion
+
+# SA1201: Elements should appear in the correct order
+dotnet_diagnostic.SA1201.severity = silent
+
+# SA1602: Enumeration items should be documented
+dotnet_diagnostic.SA1602.severity = suggestion
+
+# SA1118: Parameter should not span multiple lines
+dotnet_diagnostic.SA1118.severity = suggestion
+
+# CA2201: Do not raise reserved exception types
+dotnet_diagnostic.CA2201.severity = suggestion
+
+# CA1050: Declare types in namespaces
+dotnet_diagnostic.CA1050.severity = suggestion
+
+# IDE0005: Remove unnecessary import
+dotnet_diagnostic.IDE0005.severity = error
+
+# IDE1006: Naming Styles
+dotnet_diagnostic.IDE1006.severity = error
+
+# IDE0008: Use explicit type
+dotnet_diagnostic.IDE0008.severity = silent
+
+# IDE0090: Use 'new(...)'
+dotnet_diagnostic.IDE0090.severity = error
+
+# IDE0072: Add missing cases
+## Suppressing this particular case due to issues in the analyzer's understanding of pattern matching.
+dotnet_diagnostic.IDE0072.severity = suggestion
+
+# CA2000: Dispose objects before losing scope
+dotnet_diagnostic.CA2000.severity = warning
+
+# IDE0046: Convert to conditional expression
+dotnet_diagnostic.IDE0046.severity = silent
+
+# IDE0050: Convert to tuple
+dotnet_diagnostic.IDE0050.severity = suggestion
+
+# IDE0066: Convert switch statement to expression
+dotnet_diagnostic.IDE0066.severity = suggestion
+
+# IDE0130: Namespace does not match folder structure
+dotnet_diagnostic.IDE0130.severity = silent
+
+# IDE0161: Convert to file-scoped namespace
+dotnet_diagnostic.IDE0161.severity = error
+
+# IDE0058: Expression value is never used
+dotnet_diagnostic.IDE0058.severity = none
+
+# VSTHRD111: Use ConfigureAwait(bool)
+dotnet_diagnostic.VSTHRD111.severity = suggestion
+
+# IDE0042: Deconstruct variable declaration
+dotnet_diagnostic.IDE0042.severity = suggestion
+
+# IDE0039: Use local function
+dotnet_diagnostic.IDE0039.severity = suggestion
+
+# CA1848: Use the LoggerMessage delegates
+dotnet_diagnostic.CA1848.severity = suggestion
+
+# CA2254: Template should be a static expression
+dotnet_diagnostic.CA2254.severity = suggestion
+
+# IDE0290: Use primary constructor
+dotnet_diagnostic.IDE0290.severity = suggestion
+
+# CA1711: Identifiers should not have incorrect suffix
+dotnet_diagnostic.CA1711.severity = suggestion
+
+# IDE0305: Collection initialization can be simplified
+dotnet_diagnostic.IDE0305.severity = suggestion
+
+# Unused value expressions
+csharp_style_unused_value_expression_statement_preference = unused_local_variable:none
diff --git a/sdk_v2/cs/.gitignore b/sdk_v2/cs/.gitignore
new file mode 100644
index 0000000..b3ed4ac
--- /dev/null
+++ b/sdk_v2/cs/.gitignore
@@ -0,0 +1,295 @@
+# Custom
+.dotnet/
+artifacts/
+.build/
+.vscode/
+
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
+
+# User-specific files
+*.suo
+*.user
+*.userosscache
+*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+*.userprefs
+
+# Build results
+[Dd]ebug/
+[Dd]ebugPublic/
+[Rr]elease/
+[Rr]eleases/
+x64/
+x86/
+bld/
+[Bb]in/
+[Oo]bj/
+[Ll]og/
+
+# Visual Studio 2015 cache/options directory
+.vs/
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# MSTest test Results
+[Tt]est[Rr]esult*/
+[Bb]uild[Ll]og.*
+
+# NUNIT
+*.VisualState.xml
+TestResult.xml
+
+# Build Results of an ATL Project
+[Dd]ebugPS/
+[Rr]eleasePS/
+dlldata.c
+
+# .NET Core
+project.lock.json
+project.fragment.lock.json
+artifacts/
+**/Properties/launchSettings.json
+
+*_i.c
+*_p.c
+*_i.h
+*.ilk
+*.meta
+*.obj
+*.pch
+*.pdb
+*.pgc
+*.pgd
+*.rsp
+*.sbr
+*.tlb
+*.tli
+*.tlh
+*.tmp
+*.tmp_proj
+*.log
+*.vspscc
+*.vssscc
+.builds
+*.pidb
+*.svclog
+*.scc
+
+# Chutzpah Test files
+_Chutzpah*
+
+# Visual C++ cache files
+ipch/
+*.aps
+*.ncb
+*.opendb
+*.opensdf
+*.sdf
+*.cachefile
+*.VC.db
+*.VC.VC.opendb
+
+# Visual Studio profiler
+*.psess
+*.vsp
+*.vspx
+*.sap
+
+# TFS 2012 Local Workspace
+$tf/
+
+# Guidance Automation Toolkit
+*.gpState
+
+# ReSharper is a .NET coding add-in
+_ReSharper*/
+*.[Rr]e[Ss]harper
+*.DotSettings.user
+
+# JustCode is a .NET coding add-in
+.JustCode
+
+# TeamCity is a build add-in
+_TeamCity*
+
+# DotCover is a Code Coverage Tool
+*.dotCover
+
+# Visual Studio code coverage results
+*.coverage
+*.coveragexml
+
+# NCrunch
+_NCrunch_*
+.*crunch*.local.xml
+nCrunchTemp_*
+
+# MightyMoose
+*.mm.*
+AutoTest.Net/
+
+# Web workbench (sass)
+.sass-cache/
+
+# Installshield output folder
+[Ee]xpress/
+
+# DocProject is a documentation generator add-in
+DocProject/buildhelp/
+DocProject/Help/*.HxT
+DocProject/Help/*.HxC
+DocProject/Help/*.hhc
+DocProject/Help/*.hhk
+DocProject/Help/*.hhp
+DocProject/Help/Html2
+DocProject/Help/html
+
+# Click-Once directory
+publish/
+
+# Publish Web Output
+*.[Pp]ublish.xml
+*.azurePubxml
+# TODO: Comment the next line if you want to checkin your web deploy settings
+# but database connection strings (with potential passwords) will be unencrypted
+*.pubxml
+*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+PublishScripts/
+
+# NuGet Packages
+*.nupkg
+# The packages folder can be ignored because of Package Restore
+**/packages/*
+# except build/, which is used as an MSBuild target.
+!**/packages/build/
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/packages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+*.nuget.props
+*.nuget.targets
+
+# Microsoft Azure Build Output
+csx/
+*.build.csdef
+
+# Microsoft Azure Emulator
+ecf/
+rcf/
+
+# Windows Store app package directories and files
+AppPackages/
+BundleArtifacts/
+Package.StoreAssociation.xml
+_pkginfo.txt
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+*.[Cc]ache
+# but keep track of directories ending in .cache
+!*.[Cc]ache/
+
+# Others
+ClientBin/
+~$*
+*~
+*.dbmdl
+*.dbproj.schemaview
+*.jfm
+*.pfx
+*.publishsettings
+orleans.codegen.cs
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+Generated_Code/
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+_UpgradeReport_Files/
+Backup*/
+UpgradeLog*.XML
+UpgradeLog*.htm
+
+# SQL Server files
+*.mdf
+*.ldf
+
+# Business Intelligence projects
+*.rdl.data
+*.bim.layout
+*.bim_*.settings
+
+# Microsoft Fakes
+FakesAssemblies/
+
+# GhostDoc plugin setting file
+*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+.ntvs_analysis.dat
+node_modules/
+
+# Typescript v1 declaration files
+typings/
+
+# Visual Studio 6 build log
+*.plg
+
+# Visual Studio 6 workspace options file
+*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+*.vbw
+
+# Visual Studio LightSwitch build output
+**/*.HTMLClient/GeneratedArtifacts
+**/*.DesktopClient/GeneratedArtifacts
+**/*.DesktopClient/ModelManifest.xml
+**/*.Server/GeneratedArtifacts
+**/*.Server/ModelManifest.xml
+_Pvt_Extensions
+
+# Paket dependency manager
+.paket/paket.exe
+paket-files/
+
+# FAKE - F# Make
+.fake/
+
+# JetBrains Rider
+.idea/
+*.sln.iml
+
+# CodeRush
+.cr/
+
+# Python Tools for Visual Studio (PTVS)
+__pycache__/
+*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Perfview trace
+*.etl.zip
+*.orig
+/src/BenchmarksDriver/results.md
+*.trace.zip
+/src/BenchmarksDriver/*.zip
+eventpipe.netperf
+*.netperf
+*.bench.json
+BenchmarkDotNet.Artifacts/
\ No newline at end of file
diff --git a/sdk_v2/cs/LICENSE.txt b/sdk_v2/cs/LICENSE.txt
new file mode 100644
index 0000000..48bc6bb
--- /dev/null
+++ b/sdk_v2/cs/LICENSE.txt
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) Microsoft Corporation
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/sdk_v2/cs/Microsoft.AI.Foundry.Local.SDK.sln b/sdk_v2/cs/Microsoft.AI.Foundry.Local.SDK.sln
new file mode 100644
index 0000000..2958f0d
--- /dev/null
+++ b/sdk_v2/cs/Microsoft.AI.Foundry.Local.SDK.sln
@@ -0,0 +1,39 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio Version 17
+VisualStudioVersion = 17.0.31903.59
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.AI.Foundry.Local", "src\Microsoft.AI.Foundry.Local.csproj", "{247537D6-CBBA-C748-B91D-AA7B236563B4}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{0C88DD14-F956-CE84-757C-A364CCF449FC}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.AI.Foundry.Local.Tests", "test\FoundryLocal.Tests\Microsoft.AI.Foundry.Local.Tests.csproj", "{CD75C56B-0EB9-41F4-BEE0-9D7C674894CC}"
+EndProject
+Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{02EA681E-C7D8-13C7-8484-4AC65E1B71E8}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {247537D6-CBBA-C748-B91D-AA7B236563B4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {247537D6-CBBA-C748-B91D-AA7B236563B4}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {247537D6-CBBA-C748-B91D-AA7B236563B4}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {247537D6-CBBA-C748-B91D-AA7B236563B4}.Release|Any CPU.Build.0 = Release|Any CPU
+ {CD75C56B-0EB9-41F4-BEE0-9D7C674894CC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {CD75C56B-0EB9-41F4-BEE0-9D7C674894CC}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {CD75C56B-0EB9-41F4-BEE0-9D7C674894CC}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {CD75C56B-0EB9-41F4-BEE0-9D7C674894CC}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+ GlobalSection(NestedProjects) = preSolution
+ {247537D6-CBBA-C748-B91D-AA7B236563B4} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8}
+ {CD75C56B-0EB9-41F4-BEE0-9D7C674894CC} = {0C88DD14-F956-CE84-757C-A364CCF449FC}
+ EndGlobalSection
+ GlobalSection(ExtensibilityGlobals) = postSolution
+ SolutionGuid = {0138DEC3-F200-43EC-A1A2-6FD8F2C609CB}
+ EndGlobalSection
+EndGlobal
diff --git a/sdk_v2/cs/README.md b/sdk_v2/cs/README.md
new file mode 100644
index 0000000..0297937
--- /dev/null
+++ b/sdk_v2/cs/README.md
@@ -0,0 +1,59 @@
+# Foundry Local C# SDK
+
+## Installation
+
+To use the Foundry Local C# SDK, you need to install the NuGet package:
+
+```bash
+dotnet add package Microsoft.AI.Foundry.Local
+```
+
+### Building from source
+To build the SDK, run the following command in your terminal:
+
+```bash
+cd sdk/cs
+dotnet build sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj
+```
+
+You can also load [FoundryLocal.sln](./FoundryLocal.sln) in Visual Studio 2022 or VSCode.
+
+## Usage
+
+> [!NOTE]
+> For this example, you'll need the OpenAI Nuget package installed as well:
+> ```bash
+> dotnet add package OpenAI
+> ```
+
+```csharp
+using Microsoft.AI.Foundry.Local;
+using OpenAI;
+using OpenAI.Chat;
+using System.ClientModel;
+using System.Diagnostics.Metrics;
+
+var alias = "phi-3.5-mini";
+
+var manager = await FoundryLocalManager.StartModelAsync(aliasOrModelId: alias);
+
+var model = await manager.GetModelInfoAsync(aliasOrModelId: alias);
+ApiKeyCredential key = new ApiKeyCredential(manager.ApiKey);
+OpenAIClient client = new OpenAIClient(key, new OpenAIClientOptions
+{
+ Endpoint = manager.Endpoint
+});
+
+var chatClient = client.GetChatClient(model?.ModelId);
+
+var completionUpdates = chatClient.CompleteChatStreaming("Why is the sky blue'");
+
+Console.Write($"[ASSISTANT]: ");
+foreach (var completionUpdate in completionUpdates)
+{
+ if (completionUpdate.ContentUpdate.Count > 0)
+ {
+ Console.Write(completionUpdate.ContentUpdate[0].Text);
+ }
+}
+```
diff --git a/sdk_v2/cs/src/AssemblyInfo.cs b/sdk_v2/cs/src/AssemblyInfo.cs
new file mode 100644
index 0000000..9bebe71
--- /dev/null
+++ b/sdk_v2/cs/src/AssemblyInfo.cs
@@ -0,0 +1,10 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+using System.Runtime.CompilerServices;
+
+[assembly: InternalsVisibleTo("Microsoft.AI.Foundry.Local.Tests")]
+[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")] // for Mock of ICoreInterop
diff --git a/sdk_v2/cs/src/Catalog.cs b/sdk_v2/cs/src/Catalog.cs
new file mode 100644
index 0000000..eb9ba0d
--- /dev/null
+++ b/sdk_v2/cs/src/Catalog.cs
@@ -0,0 +1,200 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+using System;
+using System.Collections.Generic;
+using System.Text.Json;
+using System.Threading.Tasks;
+
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.Extensions.Logging;
+
+internal sealed class Catalog : ICatalog, IDisposable
+{
+ private readonly Dictionary _modelAliasToModel = new();
+ private readonly Dictionary _modelIdToModelVariant = new();
+ private DateTime _lastFetch;
+
+ private readonly IModelLoadManager _modelLoadManager;
+ private readonly ICoreInterop _coreInterop;
+ private readonly ILogger _logger;
+ private readonly AsyncLock _lock = new();
+
+ public string Name { get; init; }
+
+ private Catalog(IModelLoadManager modelLoadManager, ICoreInterop coreInterop, ILogger logger)
+ {
+ _modelLoadManager = modelLoadManager;
+ _coreInterop = coreInterop;
+ _logger = logger;
+
+ _lastFetch = DateTime.MinValue;
+
+ CoreInteropRequest? input = null;
+ var response = coreInterop.ExecuteCommand("get_catalog_name", input);
+ if (response.Error != null)
+ {
+ throw new FoundryLocalException($"Error getting catalog name: {response.Error}", _logger);
+ }
+
+ Name = response.Data!;
+ }
+
+ internal static async Task CreateAsync(IModelLoadManager modelManager, ICoreInterop coreInterop,
+ ILogger logger, CancellationToken? ct = null)
+ {
+ var catalog = new Catalog(modelManager, coreInterop, logger);
+ await catalog.UpdateModels(ct).ConfigureAwait(false);
+ return catalog;
+ }
+
+ public async Task> ListModelsAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => ListModelsImplAsync(ct),
+ "Error listing models.", _logger).ConfigureAwait(false);
+ }
+
+ public async Task> GetCachedModelsAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetCachedModelsImplAsync(ct),
+ "Error getting cached models.", _logger).ConfigureAwait(false);
+ }
+
+ public async Task> GetLoadedModelsAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetLoadedModelsImplAsync(ct),
+ "Error getting loaded models.", _logger).ConfigureAwait(false);
+ }
+
+ public async Task GetModelAsync(string modelAlias, CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetModelImplAsync(modelAlias, ct),
+ $"Error getting model with alias '{modelAlias}'.", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task GetModelVariantAsync(string modelId, CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetModelVariantImplAsync(modelId, ct),
+ $"Error getting model variant with ID '{modelId}'.", _logger)
+ .ConfigureAwait(false);
+ }
+
+ private async Task> ListModelsImplAsync(CancellationToken? ct = null)
+ {
+ await UpdateModels(ct).ConfigureAwait(false);
+
+ using var disposable = await _lock.LockAsync().ConfigureAwait(false);
+ return _modelAliasToModel.Values.OrderBy(m => m.Alias).ToList();
+ }
+
+ private async Task> GetCachedModelsImplAsync(CancellationToken? ct = null)
+ {
+ var cachedModelIds = await Utils.GetCachedModelIdsAsync(_coreInterop, ct).ConfigureAwait(false);
+
+ List cachedModels = new();
+ foreach (var modelId in cachedModelIds)
+ {
+ if (_modelIdToModelVariant.TryGetValue(modelId, out ModelVariant? modelVariant))
+ {
+ cachedModels.Add(modelVariant);
+ }
+ }
+
+ return cachedModels;
+ }
+
+ private async Task> GetLoadedModelsImplAsync(CancellationToken? ct = null)
+ {
+ var loadedModelIds = await _modelLoadManager.ListLoadedModelsAsync(ct).ConfigureAwait(false);
+ List loadedModels = new();
+
+ foreach (var modelId in loadedModelIds)
+ {
+ if (_modelIdToModelVariant.TryGetValue(modelId, out ModelVariant? modelVariant))
+ {
+ loadedModels.Add(modelVariant);
+ }
+ }
+
+ return loadedModels;
+ }
+
+ private async Task GetModelImplAsync(string modelAlias, CancellationToken? ct = null)
+ {
+ await UpdateModels(ct).ConfigureAwait(false);
+
+ using var disposable = await _lock.LockAsync().ConfigureAwait(false);
+ _modelAliasToModel.TryGetValue(modelAlias, out Model? model);
+
+ return model;
+ }
+
+ private async Task GetModelVariantImplAsync(string modelId, CancellationToken? ct = null)
+ {
+ await UpdateModels(ct).ConfigureAwait(false);
+
+ using var disposable = await _lock.LockAsync().ConfigureAwait(false);
+ _modelIdToModelVariant.TryGetValue(modelId, out ModelVariant? modelVariant);
+ return modelVariant;
+ }
+
+ private async Task UpdateModels(CancellationToken? ct)
+ {
+ // TODO: make this configurable
+ if (DateTime.Now - _lastFetch < TimeSpan.FromHours(6))
+ {
+ return;
+ }
+
+ CoreInteropRequest? input = null;
+ var result = await _coreInterop.ExecuteCommandAsync("get_model_list", input, ct).ConfigureAwait(false);
+
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error getting models: {result.Error}", _logger);
+ }
+
+ var models = JsonSerializer.Deserialize(result.Data!, JsonSerializationContext.Default.ListModelInfo);
+ if (models == null)
+ {
+ _logger.LogDebug($"ListModelInfo deserialization error in UpdateModels. Data: {result.Data}");
+ throw new FoundryLocalException($"Failed to deserialize models from response.", _logger);
+ }
+
+ using var disposable = await _lock.LockAsync().ConfigureAwait(false);
+
+ // TODO: Do we need to clear this out, or can we just add new models?
+ _modelAliasToModel.Clear();
+ _modelIdToModelVariant.Clear();
+
+ foreach (var modelInfo in models)
+ {
+ var variant = new ModelVariant(modelInfo, _modelLoadManager, _coreInterop, _logger);
+
+ var existingModel = _modelAliasToModel.TryGetValue(modelInfo.Alias, out Model? value);
+ if (!existingModel)
+ {
+ value = new Model(variant, _logger);
+ _modelAliasToModel[modelInfo.Alias] = value;
+ }
+ else
+ {
+ value!.AddVariant(variant);
+ }
+
+ _modelIdToModelVariant[variant.Id] = variant;
+ }
+
+ _lastFetch = DateTime.Now;
+ }
+
+ public void Dispose()
+ {
+ _lock.Dispose();
+ }
+}
diff --git a/sdk_v2/cs/src/Configuration.cs b/sdk_v2/cs/src/Configuration.cs
new file mode 100644
index 0000000..5b481bf
--- /dev/null
+++ b/sdk_v2/cs/src/Configuration.cs
@@ -0,0 +1,164 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+public class Configuration
+{
+ ///
+ /// Your application name. MUST be set to a valid name.
+ ///
+ public required string AppName { get; set; }
+
+ ///
+ /// Application data directory.
+ /// Default: {home}/.{appname}, where {home} is the user's home directory and {appname} is the AppName value.
+ ///
+ public string? AppDataDir { get; init; }
+
+ ///
+ /// Model cache directory.
+ /// Default: {appdata}/cache/models, where {appdata} is the AppDataDir value.
+ ///
+ public string? ModelCacheDir { get; init; }
+
+ ///
+ /// Log directory.
+ /// Default: {appdata}/logs
+ ///
+ public string? LogsDir { get; init; }
+
+ ///
+ /// Logging level.
+ /// Valid values are: Verbose, Debug, Information, Warning, Error, Fatal.
+ /// Default: LogLevel.Warning
+ ///
+ public LogLevel LogLevel { get; init; } = LogLevel.Warning;
+
+ ///
+ /// Enable manual execution provider download mode. Only meaningful if using WinML.
+ ///
+ /// Default: false
+ ///
+ /// When false, EPs are downloaded automatically in the background when FoundryLocalManager is created.
+ /// When true, EPs are downloaded when FoundryLocalManager.EnsureEpsDownloadedAsync or GetCatalogAsync are called.
+ ///
+ /// Once an EP is downloaded it will not be re-downloaded unless a new version is available.
+ ///
+ // DISABLED: We want to make sure this is required before making it public as supporting this complicates the
+ // Core implementation. Can be specified via AdditionalSettings if needed for testing.
+ // public bool ManualEpDownload { get; init; }
+
+ ///
+ /// Optional configuration for the built-in web service.
+ /// NOTE: This is not included in all builds.
+ ///
+ public WebService? Web { get; init; }
+
+ ///
+ /// Additional settings that Foundry Local Core can consume.
+ /// Keys and values are strings.
+ ///
+ public IDictionary? AdditionalSettings { get; init; }
+
+ ///
+ /// Configuration settings if the optional web service is used.
+ ///
+ public class WebService
+ {
+ ///
+ /// Url/s to bind to the web service when is called.
+ /// After startup, will contain the actual URL/s the service is listening on.
+ ///
+ /// Default: 127.0.0.1:0, which binds to a random ephemeral port.
+ /// Multiple URLs can be specified as a semi-colon separated list.
+ ///
+ public string? Urls { get; init; }
+
+ ///
+ /// If the web service is running in a separate process, it will be accessed using this URI.
+ ///
+ ///
+ /// Both processes should be using the same version of the SDK. If a random port is assigned when creating
+ /// the web service in the external process the actual port must be provided here.
+ ///
+ public Uri? ExternalUrl { get; init; }
+ }
+
+ internal void Validate()
+ {
+ if (string.IsNullOrEmpty(AppName))
+ {
+ throw new ArgumentException("Configuration AppName must be set to a valid application name.");
+ }
+
+ if (AppName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0)
+ {
+ throw new ArgumentException("Configuration AppName value contains invalid characters.");
+ }
+
+
+ if (Web?.ExternalUrl?.Port == 0)
+ {
+ throw new ArgumentException("Configuration Web.ExternalUrl has invalid port of 0.");
+ }
+ }
+
+ internal Dictionary AsDictionary()
+ {
+ if (string.IsNullOrEmpty(AppName))
+ {
+ throw new FoundryLocalException(
+ "Configuration AppName must be set to a valid application name.");
+ }
+
+ var configValues = new Dictionary
+ {
+ { "AppName", AppName },
+ { "LogLevel", LogLevel.ToString() }
+ };
+
+ if (!string.IsNullOrEmpty(AppDataDir))
+ {
+ configValues.Add("AppDataDir", AppDataDir);
+ }
+
+ if (!string.IsNullOrEmpty(ModelCacheDir))
+ {
+ configValues.Add("ModelCacheDir", ModelCacheDir);
+ }
+
+ if (!string.IsNullOrEmpty(LogsDir))
+ {
+ configValues.Add("LogsDir", LogsDir);
+ }
+
+ //configValues.Add("ManualEpDownload", ManualEpDownload.ToString());
+
+ if (Web != null)
+ {
+ if (Web.Urls != null)
+ {
+ configValues["WebServiceUrls"] = Web.Urls;
+ }
+ }
+
+ // Emit any additional settings.
+ if (AdditionalSettings != null)
+ {
+ foreach (var kvp in AdditionalSettings)
+ {
+ if (string.IsNullOrEmpty(kvp.Key))
+ {
+ continue; // skip empty keys
+ }
+ configValues[kvp.Key] = kvp.Value ?? string.Empty;
+ }
+ }
+
+ return configValues;
+ }
+}
diff --git a/sdk_v2/cs/src/Detail/AsyncLock.cs b/sdk_v2/cs/src/Detail/AsyncLock.cs
new file mode 100644
index 0000000..921d7f9
--- /dev/null
+++ b/sdk_v2/cs/src/Detail/AsyncLock.cs
@@ -0,0 +1,62 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Detail;
+using System;
+using System.Threading.Tasks;
+
+public sealed class AsyncLock : IDisposable
+{
+ private readonly Task _releaserTask;
+ private readonly SemaphoreSlim _semaphore = new(1, 1);
+ private readonly IDisposable _releaser;
+
+ public AsyncLock()
+ {
+ _releaser = new Releaser(_semaphore);
+ _releaserTask = Task.FromResult(_releaser);
+ }
+
+ public void Dispose()
+ {
+ _semaphore.Dispose();
+ }
+
+ public IDisposable Lock()
+ {
+ _semaphore.Wait();
+ return _releaser;
+ }
+
+ public Task LockAsync()
+ {
+ Task waitTask = _semaphore.WaitAsync();
+
+ return waitTask.IsCompleted
+ ? _releaserTask
+ : waitTask.ContinueWith(
+ (_, releaser) => (IDisposable)releaser!,
+ _releaser,
+ CancellationToken.None,
+ TaskContinuationOptions.ExecuteSynchronously,
+ TaskScheduler.Default);
+ }
+
+ private sealed class Releaser : IDisposable
+ {
+ private readonly SemaphoreSlim _semaphore;
+
+ public Releaser(SemaphoreSlim semaphore)
+ {
+ _semaphore = semaphore;
+ }
+
+ public void Dispose()
+ {
+ _semaphore.Release();
+ }
+ }
+}
diff --git a/sdk_v2/cs/src/Detail/CoreInterop.cs b/sdk_v2/cs/src/Detail/CoreInterop.cs
new file mode 100644
index 0000000..8411473
--- /dev/null
+++ b/sdk_v2/cs/src/Detail/CoreInterop.cs
@@ -0,0 +1,334 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Detail;
+
+using System.Diagnostics;
+using System.Runtime.InteropServices;
+
+using Microsoft.Extensions.Logging;
+
+using static Microsoft.AI.Foundry.Local.Detail.ICoreInterop;
+
+internal partial class CoreInterop : ICoreInterop
+{
+ // TODO: Android and iOS may need special handling. See ORT C# NativeMethods.shared.cs
+ internal const string LibraryName = "Microsoft.AI.Foundry.Local.Core";
+ private readonly ILogger _logger;
+
+ private static string AddLibraryExtension(string name) =>
+ RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? $"{name}.dll" :
+ RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? $"{name}.so" :
+ RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? $"{name}.dylib" :
+ throw new PlatformNotSupportedException();
+
+ private static IntPtr genaiLibHandle = IntPtr.Zero;
+ private static IntPtr ortLibHandle = IntPtr.Zero;
+
+ // we need to manually load ORT and ORT GenAI dlls on Windows to ensure
+ // a) we're using the libraries we think we are
+ // b) that dependencies are resolved correctly as the dlls may not be in the default load path.
+ // it's a 'Try' as we can't do anything else if it fails as the dlls may be available somewhere else.
+ private static void LoadOrtDllsIfInSameDir(string path)
+ {
+ var genaiLibName = AddLibraryExtension("onnxruntime-genai");
+ var ortLibName = AddLibraryExtension("onnxruntime");
+ var genaiPath = Path.Combine(path, genaiLibName);
+ var ortPath = Path.Combine(path, ortLibName);
+
+ // need to load ORT first as the winml GenAI library redirects and tries to load a winml onnxruntime.dll,
+ // which will not have the EPs we expect/require. if/when we don't bundle our own onnxruntime.dll we need to
+ // revisit this.
+ var loadedOrt = NativeLibrary.TryLoad(ortPath, out ortLibHandle);
+ var loadedGenAI = NativeLibrary.TryLoad(genaiPath, out genaiLibHandle);
+
+#if DEBUG
+ Console.WriteLine($"Loaded ORT:{loadedOrt} handle={ortLibHandle}");
+ Console.WriteLine($"Loaded GenAI: {loadedGenAI} handle={genaiLibHandle}");
+#endif
+ }
+
+ static CoreInterop()
+ {
+ NativeLibrary.SetDllImportResolver(typeof(CoreInterop).Assembly, (libraryName, assembly, searchPath) =>
+ {
+ if (libraryName == LibraryName)
+ {
+#if DEBUG
+ Console.WriteLine($"Resolving {libraryName}. BaseDirectory: {AppContext.BaseDirectory}");
+#endif
+ var isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
+
+ // check if this build is platform specific. in that case all files are flattened in the one directory
+ // and there's no need to look in runtimes/-/native.
+ // e.g. `dotnet publish -r win-x64` copies all the dependencies into the publish output folder.
+ var libraryPath = Path.Combine(AppContext.BaseDirectory, AddLibraryExtension(LibraryName));
+ if (File.Exists(libraryPath))
+ {
+ if (NativeLibrary.TryLoad(libraryPath, out var handle))
+ {
+#if DEBUG
+ Console.WriteLine($"Loaded native library from: {libraryPath}");
+#endif
+ if (isWindows)
+ {
+ LoadOrtDllsIfInSameDir(AppContext.BaseDirectory);
+ }
+
+ return handle;
+ }
+ }
+
+ // TODO: figure out what is required on Android and iOS
+ // The nuget has an AAR and xcframework respectively so we need to determine what files are where
+ // after a build.
+ var os = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "win" :
+ RuntimeInformation.IsOSPlatform(OSPlatform.Linux) ? "linux" :
+ RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? "osx" :
+ throw new PlatformNotSupportedException();
+
+ var arch = RuntimeInformation.OSArchitecture.ToString().ToLowerInvariant();
+ var runtimePath = Path.Combine(AppContext.BaseDirectory, "runtimes", $"{os}-{arch}", "native");
+ libraryPath = Path.Combine(runtimePath, AddLibraryExtension(LibraryName));
+
+#if DEBUG
+ Console.WriteLine($"Looking for native library at: {libraryPath}");
+#endif
+ if (File.Exists(libraryPath))
+ {
+ if (NativeLibrary.TryLoad(libraryPath, out var handle))
+ {
+#if DEBUG
+ Console.WriteLine($"Loaded native library from: {libraryPath}");
+#endif
+ if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
+ {
+ LoadOrtDllsIfInSameDir(runtimePath);
+ }
+
+ return handle;
+ }
+ }
+ }
+
+ return IntPtr.Zero;
+ });
+ }
+
+ internal CoreInterop(Configuration config, ILogger logger)
+ {
+
+ _logger = logger ?? throw new ArgumentNullException(nameof(logger));
+
+ var request = new CoreInteropRequest { Params = config.AsDictionary() };
+ var response = ExecuteCommand("initialize", request);
+
+ if (response.Error != null)
+ {
+ throw new FoundryLocalException($"Error initializing Foundry.Local.Core library: {response.Error}");
+ }
+ else
+ {
+ _logger.LogInformation("Foundry.Local.Core initialized successfully: {Response}", response.Data);
+ }
+ }
+
+ // For testing. Skips the 'initialize' command so assumes this has been done previously.
+ internal CoreInterop(ILogger logger)
+ {
+ _logger = logger ?? throw new ArgumentNullException(nameof(logger));
+ }
+
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private unsafe delegate void ExecuteCommandDelegate(RequestBuffer* req, ResponseBuffer* resp);
+
+ // Import the function from the AOT-compiled library
+ [LibraryImport(LibraryName, EntryPoint = "execute_command")]
+ [UnmanagedCallConv(CallConvs = new[] { typeof(System.Runtime.CompilerServices.CallConvCdecl) })]
+ private static unsafe partial void CoreExecuteCommand(RequestBuffer* request, ResponseBuffer* response);
+
+ [LibraryImport(LibraryName, EntryPoint = "execute_command_with_callback")]
+ [UnmanagedCallConv(CallConvs = new[] { typeof(System.Runtime.CompilerServices.CallConvCdecl) })]
+ private static unsafe partial void CoreExecuteCommandWithCallback(RequestBuffer* nativeRequest,
+ ResponseBuffer* nativeResponse,
+ nint callbackPtr, // NativeCallbackFn pointer
+ nint userData);
+
+ // helper to capture exceptions in callbacks
+ internal class CallbackHelper
+ {
+ public CallbackFn Callback { get; }
+ public Exception? Exception { get; set; } // keep the first only. most likely it will be the same issue in all
+ public CallbackHelper(CallbackFn callback)
+ {
+ Callback = callback ?? throw new ArgumentNullException(nameof(callback));
+ }
+ }
+
+ private static void HandleCallback(nint data, int length, nint callbackHelper)
+ {
+ var callbackData = string.Empty;
+ CallbackHelper? helper = null;
+
+ try
+ {
+ if (data != IntPtr.Zero && length > 0)
+ {
+ var managedData = new byte[length];
+ Marshal.Copy(data, managedData, 0, length);
+ callbackData = System.Text.Encoding.UTF8.GetString(managedData);
+ }
+
+ Debug.Assert(callbackHelper != IntPtr.Zero, "Callback helper pointer is required.");
+
+ helper = (CallbackHelper)GCHandle.FromIntPtr(callbackHelper).Target!;
+ helper.Callback.Invoke(callbackData);
+ }
+ catch (Exception ex) when (ex is not OperationCanceledException)
+ {
+ FoundryLocalManager.Instance.Logger.LogError(ex, $"Error in callback. Callback data: {callbackData}");
+ if (helper != null && helper.Exception == null)
+ {
+ helper.Exception = ex;
+ }
+ }
+ }
+
+ private static readonly NativeCallbackFn handleCallbackDelegate = HandleCallback;
+
+
+ public Response ExecuteCommandImpl(string commandName, string? commandInput,
+ CallbackFn? callback = null)
+ {
+ try
+ {
+ byte[] commandBytes = System.Text.Encoding.UTF8.GetBytes(commandName);
+ // Allocate unmanaged memory for the command bytes
+ IntPtr commandPtr = Marshal.AllocHGlobal(commandBytes.Length);
+ Marshal.Copy(commandBytes, 0, commandPtr, commandBytes.Length);
+
+ byte[]? inputBytes = null;
+ IntPtr? inputPtr = null;
+
+ if (commandInput != null)
+ {
+ inputBytes = System.Text.Encoding.UTF8.GetBytes(commandInput);
+ inputPtr = Marshal.AllocHGlobal(inputBytes.Length);
+ Marshal.Copy(inputBytes, 0, inputPtr.Value, inputBytes.Length);
+ }
+
+ // Prepare request
+ var request = new RequestBuffer
+ {
+ Command = commandPtr,
+ CommandLength = commandBytes.Length,
+ Data = inputPtr ?? IntPtr.Zero,
+ DataLength = inputBytes?.Length ?? 0
+ };
+
+ ResponseBuffer response = default;
+
+ if (callback != null)
+ {
+ // NOTE: This assumes the command will NOT return until complete, so the lifetime of the
+ // objects involved in the callback is limited to the duration of the call to
+ // CoreExecuteCommandWithCallback.
+
+ var helper = new CallbackHelper(callback);
+
+ var funcPtr = Marshal.GetFunctionPointerForDelegate(handleCallbackDelegate);
+ var helperHandle = GCHandle.Alloc(helper);
+ var helperPtr = GCHandle.ToIntPtr(helperHandle);
+
+ unsafe
+ {
+ CoreExecuteCommandWithCallback(&request, &response, funcPtr, helperPtr);
+ }
+
+ helperHandle.Free();
+
+ if (helper.Exception != null)
+ {
+ throw new FoundryLocalException("Exception in callback handler. See InnerException for details",
+ helper.Exception);
+ }
+ }
+ else
+ {
+ // Pin request/response on the stack
+ unsafe
+ {
+ CoreExecuteCommand(&request, &response);
+ }
+ }
+
+ Response result = new();
+
+ // Marshal response. Will have either Data or Error populated. Not both.
+ if (response.Data != IntPtr.Zero && response.DataLength > 0)
+ {
+ byte[] managedResponse = new byte[response.DataLength];
+ Marshal.Copy(response.Data, managedResponse, 0, response.DataLength);
+ result.Data = System.Text.Encoding.UTF8.GetString(managedResponse);
+ _logger.LogDebug($"Command: {commandName} succeeded.");
+ }
+
+ if (response.Error != IntPtr.Zero && response.ErrorLength > 0)
+ {
+ result.Error = Marshal.PtrToStringUTF8(response.Error, response.ErrorLength)!;
+ _logger.LogDebug($"Input:{commandInput ?? "null"}");
+ _logger.LogDebug($"Command: {commandName} Error: {result.Error}");
+ }
+
+ // TODO: Validate this works. C# specific. Attempting to avoid calling free_response to do this
+ Marshal.FreeHGlobal(response.Data);
+ Marshal.FreeHGlobal(response.Error);
+
+ Marshal.FreeHGlobal(commandPtr);
+ if (commandInput != null)
+ {
+ Marshal.FreeHGlobal(inputPtr!.Value);
+ }
+
+ return result;
+ }
+ catch (Exception ex) when (ex is not OperationCanceledException)
+ {
+ var msg = $"Error executing command '{commandName}' with input {commandInput ?? "null"}";
+ throw new FoundryLocalException(msg, ex, _logger);
+ }
+ }
+
+ public Response ExecuteCommand(string commandName, CoreInteropRequest? commandInput = null)
+ {
+ var commandInputJson = commandInput?.ToJson();
+ return ExecuteCommandImpl(commandName, commandInputJson);
+ }
+
+ public Response ExecuteCommandWithCallback(string commandName, CoreInteropRequest? commandInput,
+ CallbackFn callback)
+ {
+ var commandInputJson = commandInput?.ToJson();
+ return ExecuteCommandImpl(commandName, commandInputJson, callback);
+ }
+
+ public Task ExecuteCommandAsync(string commandName, CoreInteropRequest? commandInput = null,
+ CancellationToken? cancellationToken = null)
+ {
+ var ct = cancellationToken ?? CancellationToken.None;
+ return Task.Run(() => ExecuteCommand(commandName, commandInput), ct);
+ }
+
+ public Task ExecuteCommandWithCallbackAsync(string commandName, CoreInteropRequest? commandInput,
+ CallbackFn callback,
+ CancellationToken? cancellationToken = null)
+ {
+ var ct = cancellationToken ?? CancellationToken.None;
+ return Task.Run(() => ExecuteCommandWithCallback(commandName, commandInput, callback), ct);
+ }
+
+}
diff --git a/sdk_v2/cs/src/Detail/CoreInteropRequest.cs b/sdk_v2/cs/src/Detail/CoreInteropRequest.cs
new file mode 100644
index 0000000..50365ad
--- /dev/null
+++ b/sdk_v2/cs/src/Detail/CoreInteropRequest.cs
@@ -0,0 +1,22 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Detail;
+using System.Collections.Generic;
+using System.Text.Json;
+
+public class CoreInteropRequest
+{
+ public Dictionary Params { get; set; } = new();
+}
+
+internal static class RequestExtensions
+{
+ public static string ToJson(this CoreInteropRequest request)
+ {
+ return JsonSerializer.Serialize(request, JsonSerializationContext.Default.CoreInteropRequest);
+ }
+}
diff --git a/sdk_v2/cs/src/Detail/ICoreInterop.cs b/sdk_v2/cs/src/Detail/ICoreInterop.cs
new file mode 100644
index 0000000..1fff9dd
--- /dev/null
+++ b/sdk_v2/cs/src/Detail/ICoreInterop.cs
@@ -0,0 +1,54 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Detail;
+
+using System.Runtime.InteropServices;
+using System.Threading;
+using System.Threading.Tasks;
+
+
+internal interface ICoreInterop
+{
+ internal record Response
+ {
+ internal string? Data;
+ internal string? Error;
+ }
+
+ public delegate void CallbackFn(string callbackData);
+
+ [StructLayout(LayoutKind.Sequential)]
+ protected unsafe struct RequestBuffer
+ {
+ public nint Command;
+ public int CommandLength;
+ public nint Data;
+ public int DataLength;
+ }
+
+ [StructLayout(LayoutKind.Sequential)]
+ protected unsafe struct ResponseBuffer
+ {
+ public nint Data;
+ public int DataLength;
+ public nint Error;
+ public int ErrorLength;
+ }
+
+ // native callback function signature
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ protected unsafe delegate void NativeCallbackFn(nint data, int length, nint userData);
+
+ Response ExecuteCommand(string commandName, CoreInteropRequest? commandInput = null);
+ Response ExecuteCommandWithCallback(string commandName, CoreInteropRequest? commandInput, CallbackFn callback);
+
+ Task ExecuteCommandAsync(string commandName, CoreInteropRequest? commandInput = null,
+ CancellationToken? ct = null);
+ Task ExecuteCommandWithCallbackAsync(string commandName, CoreInteropRequest? commandInput,
+ CallbackFn callback,
+ CancellationToken? ct = null);
+}
diff --git a/sdk_v2/cs/src/Detail/IModelLoadManager.cs b/sdk_v2/cs/src/Detail/IModelLoadManager.cs
new file mode 100644
index 0000000..a96c669
--- /dev/null
+++ b/sdk_v2/cs/src/Detail/IModelLoadManager.cs
@@ -0,0 +1,19 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Detail;
+using System.Threading.Tasks;
+
+///
+/// Interface for model load management.
+/// These operations can be done directly or via the optional web service.
+///
+internal interface IModelLoadManager
+{
+ internal abstract Task LoadAsync(string modelName, CancellationToken? ct = null);
+ internal abstract Task UnloadAsync(string modelName, CancellationToken? ct = null);
+ internal abstract Task ListLoadedModelsAsync(CancellationToken? ct = null);
+}
diff --git a/sdk_v2/cs/src/Detail/JsonSerializationContext.cs b/sdk_v2/cs/src/Detail/JsonSerializationContext.cs
new file mode 100644
index 0000000..b903142
--- /dev/null
+++ b/sdk_v2/cs/src/Detail/JsonSerializationContext.cs
@@ -0,0 +1,28 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Detail;
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+
+using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
+using Betalgo.Ranul.OpenAI.ObjectModels.ResponseModels;
+
+using Microsoft.AI.Foundry.Local.OpenAI;
+
+[JsonSerializable(typeof(ModelInfo))]
+[JsonSerializable(typeof(List))]
+[JsonSerializable(typeof(CoreInteropRequest))]
+[JsonSerializable(typeof(ChatCompletionCreateRequestExtended))]
+[JsonSerializable(typeof(ChatCompletionCreateResponse))]
+[JsonSerializable(typeof(AudioCreateTranscriptionRequest))]
+[JsonSerializable(typeof(AudioCreateTranscriptionResponse))]
+[JsonSerializable(typeof(string[]))] // list loaded or cached models
+[JsonSourceGenerationOptions(DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
+ WriteIndented = false)]
+internal partial class JsonSerializationContext : JsonSerializerContext
+{
+}
diff --git a/sdk_v2/cs/src/Detail/ModelLoadManager.cs b/sdk_v2/cs/src/Detail/ModelLoadManager.cs
new file mode 100644
index 0000000..f8bdaca
--- /dev/null
+++ b/sdk_v2/cs/src/Detail/ModelLoadManager.cs
@@ -0,0 +1,177 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Detail;
+
+using System.Collections.Generic;
+using System.Text.Json;
+using System.Threading;
+
+using Microsoft.Extensions.Logging;
+
+internal sealed class ModelLoadManager : IModelLoadManager, IDisposable
+{
+ private readonly Uri? _externalServiceUrl;
+ private readonly HttpClient? _httpClient;
+ private readonly ICoreInterop _coreInterop;
+ private readonly ILogger _logger;
+
+ internal ModelLoadManager(Uri? externalServiceUrl, ICoreInterop coreInterop, ILogger logger)
+ {
+ _externalServiceUrl = externalServiceUrl;
+ _coreInterop = coreInterop;
+ _logger = logger;
+
+ if (_externalServiceUrl != null)
+ {
+ // We only have a single instance of ModelLoadManager so we don't need HttpClient to be static.
+#pragma warning disable IDISP014 // Use a single instance of HttpClient.
+ _httpClient = new HttpClient
+ {
+ BaseAddress = _externalServiceUrl,
+ };
+#pragma warning restore IDISP014 // Use a single instance of HttpClient
+
+ // TODO: Wire in Config AppName here
+ var userAgent = $"foundry-local-cs-sdk/{FoundryLocalManager.AssemblyVersion}";
+ _httpClient.DefaultRequestHeaders.UserAgent.ParseAdd(userAgent);
+ }
+ }
+
+ public async Task LoadAsync(string modelId, CancellationToken? ct = null)
+ {
+ if (_externalServiceUrl != null)
+ {
+ await WebLoadModelAsync(modelId, ct).ConfigureAwait(false);
+ return;
+ }
+
+ var request = new CoreInteropRequest { Params = new() { { "Model", modelId } } };
+ var result = await _coreInterop.ExecuteCommandAsync("load_model", request, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error loading model {modelId}: {result.Error}");
+ }
+
+ // currently just a 'model loaded successfully' message
+ _logger.LogInformation("Model {ModelId} loaded successfully: {Message}", modelId, result.Data);
+ }
+
+ public async Task UnloadAsync(string modelId, CancellationToken? ct = null)
+ {
+ if (_externalServiceUrl != null)
+ {
+ await WebUnloadModelAsync(modelId, ct).ConfigureAwait(false);
+ return;
+ }
+
+ var request = new CoreInteropRequest { Params = new() { { "Model", modelId } } };
+ var result = await _coreInterop.ExecuteCommandAsync("unload_model", request, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error unloading model {modelId}: {result.Error}");
+ }
+
+ _logger.LogInformation("Model {ModelId} unloaded successfully: {Message}", modelId, result.Data);
+ }
+
+ public async Task ListLoadedModelsAsync(CancellationToken? ct = null)
+ {
+ if (_externalServiceUrl != null)
+ {
+ return await WebListLoadedModelAsync(ct).ConfigureAwait(false);
+ }
+
+ var result = await _coreInterop.ExecuteCommandAsync("list_loaded_models", null, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error listing loaded models: {result.Error}");
+ }
+
+ _logger.LogDebug("Loaded models json: {Data}", result.Data);
+
+ var typeInfo = JsonSerializationContext.Default.StringArray;
+ var modelList = JsonSerializer.Deserialize(result.Data!, typeInfo);
+
+ return modelList ?? [];
+ }
+
+ private async Task WebListLoadedModelAsync(CancellationToken? ct = null)
+ {
+ using var response = await _httpClient!.GetAsync("models/loaded", ct ?? CancellationToken.None)
+ .ConfigureAwait(false);
+ if (!response.IsSuccessStatusCode)
+ {
+ throw new FoundryLocalException($"Error listing loaded models from {_externalServiceUrl}: " +
+ $"{response.ReasonPhrase}");
+ }
+
+ var content = await response.Content.ReadAsStringAsync(ct ?? CancellationToken.None).ConfigureAwait(false);
+ _logger.LogDebug("Loaded models json from {WebService}: {Data}", _externalServiceUrl, content);
+ var typeInfo = JsonSerializationContext.Default.StringArray;
+ var modelList = JsonSerializer.Deserialize(content, typeInfo);
+ return modelList ?? [];
+ }
+
+ private async Task WebLoadModelAsync(string modelId, CancellationToken? ct = null)
+ {
+ var queryParams = new Dictionary
+ {
+ // { "timeout", ... }
+ };
+
+ // TODO: What do we need around EP override in the latest setup?
+ // Can we do this in FLC and limit to generic-gpu models only, picking the vendor GPU EP over WebGPU?
+ // Not sure there's any other valid override. WebGPU will always try and use the discrete GPU, so vendor
+ // EP will always be better.
+ //if (!string.IsNullOrEmpty(modelInfo.EpOverride))
+ //{
+ // queryParams["ep"] = modelInfo.EpOverride!;
+ //}
+
+ var uriBuilder = new UriBuilder(_externalServiceUrl!)
+ {
+ Path = $"models/load/{modelId}",
+ Query = string.Join("&", queryParams.Select(kvp =>
+ $"{Uri.EscapeDataString(kvp.Key)}={Uri.EscapeDataString(kvp.Value)}"))
+ };
+
+ using var response = await _httpClient!.GetAsync(uriBuilder.Uri, ct ?? CancellationToken.None)
+ .ConfigureAwait(false);
+ if (!response.IsSuccessStatusCode)
+ {
+ throw new FoundryLocalException($"Error loading model {modelId} from {_externalServiceUrl}: " +
+ $"{response.ReasonPhrase}");
+ }
+
+ var content = await response.Content.ReadAsStringAsync(ct ?? CancellationToken.None).ConfigureAwait(false);
+ _logger.LogInformation("Model {ModelId} loaded successfully from {WebService}: {Message}",
+ modelId, _externalServiceUrl, content);
+ }
+
+ private async Task WebUnloadModelAsync(string modelId, CancellationToken? ct = null)
+ {
+ using var response = await _httpClient!.GetAsync(new Uri($"models/unload/{modelId}"),
+ ct ?? CancellationToken.None)
+ .ConfigureAwait(false);
+
+ // TODO: Do we need to handle a 400 (not found) explicitly or does that not provide any real value?
+ if (!response.IsSuccessStatusCode)
+ {
+ throw new FoundryLocalException($"Error unloading model {modelId} from {_externalServiceUrl}: " +
+ $"{response.ReasonPhrase}");
+ }
+
+ var content = await response.Content.ReadAsStringAsync(ct ?? CancellationToken.None).ConfigureAwait(false);
+ _logger.LogInformation("Model {ModelId} loaded successfully from {WebService}: {Message}",
+ modelId, _externalServiceUrl, content);
+ }
+
+ public void Dispose()
+ {
+ _httpClient?.Dispose();
+ }
+}
diff --git a/sdk_v2/cs/src/FoundryLocalException.cs b/sdk_v2/cs/src/FoundryLocalException.cs
new file mode 100644
index 0000000..d6e606c
--- /dev/null
+++ b/sdk_v2/cs/src/FoundryLocalException.cs
@@ -0,0 +1,35 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+using System;
+using System.Diagnostics;
+
+using Microsoft.Extensions.Logging;
+
+public class FoundryLocalException : Exception
+{
+ public FoundryLocalException(string message) : base(message)
+ {
+ }
+
+ public FoundryLocalException(string message, Exception innerException) : base(message, innerException)
+ {
+ }
+
+ internal FoundryLocalException(string message, ILogger logger) : base(message)
+ {
+ Debug.Assert(logger != null);
+ logger.LogError(message);
+ }
+
+ internal FoundryLocalException(string message, Exception innerException, ILogger logger)
+ : base(message, innerException)
+ {
+ Debug.Assert(logger != null);
+ logger.LogError(innerException, message);
+ }
+}
diff --git a/sdk_v2/cs/src/FoundryLocalManager.cs b/sdk_v2/cs/src/FoundryLocalManager.cs
new file mode 100644
index 0000000..ce3712c
--- /dev/null
+++ b/sdk_v2/cs/src/FoundryLocalManager.cs
@@ -0,0 +1,309 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+namespace Microsoft.AI.Foundry.Local;
+
+using System;
+using System.Text.Json;
+using System.Threading.Tasks;
+
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.Extensions.Logging;
+
+public class FoundryLocalManager : IDisposable
+{
+ private static FoundryLocalManager? instance;
+ private static readonly AsyncLock asyncLock = new();
+
+ internal static readonly string AssemblyVersion =
+ typeof(FoundryLocalManager).Assembly.GetName().Version?.ToString() ?? "unknown";
+
+ private readonly Configuration _config;
+ private CoreInterop _coreInterop = default!;
+ private Catalog _catalog = default!;
+ private ModelLoadManager _modelManager = default!;
+ private readonly AsyncLock _lock = new();
+ private bool _disposed;
+ private readonly ILogger _logger;
+
+ internal Configuration Configuration => _config;
+ internal ILogger Logger => _logger;
+ internal ICoreInterop CoreInterop => _coreInterop!; // always valid once the instance is created
+
+ public static bool IsInitialized => instance != null;
+ public static FoundryLocalManager Instance => instance ??
+ throw new FoundryLocalException("FoundryLocalManager has not been created. Call CreateAsync first.");
+
+ ///
+ /// Bound Urls if the web service has been started. Null otherwise.
+ /// See .
+ ///
+ public string[]? Urls { get; private set; }
+
+ ///
+ /// Create the singleton instance.
+ ///
+ /// Configuration to use.
+ /// Application logger to use.
+ /// Use Microsoft.Extensions.Logging.NullLogger.Instance if you wish to ignore log output from the SDK.
+ ///
+ /// Optional cancellation token for the initialization.
+ /// Task creating the instance.
+ ///
+ public static async Task CreateAsync(Configuration configuration, ILogger logger,
+ CancellationToken? ct = null)
+ {
+ using var disposable = await asyncLock.LockAsync().ConfigureAwait(false);
+
+ if (instance != null)
+ {
+ // throw as we're not going to use the provided configuration in case it differs from the original.
+ throw new FoundryLocalException("FoundryLocalManager has already been created.", logger);
+ }
+
+ FoundryLocalManager? manager = null;
+ try
+ {
+ // use a local variable to ensure fully initialized before assigning to static instance.
+ manager = new FoundryLocalManager(configuration, logger);
+ await manager.InitializeAsync(ct).ConfigureAwait(false);
+
+ // there is no previous as we only get here if instance is null.
+ // ownership is transferred to the static instance.
+#pragma warning disable IDISP003 // Dispose previous before re-assigning
+ instance = manager;
+ manager = null;
+#pragma warning restore IDISP003
+ }
+ catch (Exception ex)
+ {
+ manager?.Dispose();
+
+ if (ex is FoundryLocalException or OperationCanceledException)
+ {
+ throw;
+ }
+
+ // log and throw as FoundryLocalException
+ throw new FoundryLocalException("Error during initialization.", ex, logger);
+ }
+ }
+
+ ///
+ /// Get the model catalog instance.
+ ///
+ /// Optional canellation token.
+ /// The model catalog.
+ ///
+ /// The catalog is populated on first use.
+ /// If you are using a WinML build this will trigger a one-off execution provider download if not already done.
+ /// It is recommended to call first to separate out the two steps.
+ ///
+ public async Task GetCatalogAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetCatalogImplAsync(ct),
+ "Error getting Catalog.", _logger).ConfigureAwait(false);
+ }
+
+ ///
+ /// Start the optional web service. This will provide an OpenAI-compatible REST endpoint that supports
+ /// /v1/chat_completions
+ /// /v1/models to list downloaded models
+ /// /v1/models/{model_id} to get model details
+ ///
+ /// is populated with the actual bound Urls after startup.
+ ///
+ /// Optional cancellation token.
+ /// Task starting the web service.
+ public async Task StartWebServiceAsync(CancellationToken? ct = null)
+ {
+ await Utils.CallWithExceptionHandling(() => StartWebServiceImplAsync(ct),
+ "Error starting web service.", _logger).ConfigureAwait(false);
+ }
+
+ ///
+ /// Stops the web service if started.
+ ///
+ /// Optional cancellation token.
+ /// Task stopping the web service.
+ public async Task StopWebServiceAsync(CancellationToken? ct = null)
+ {
+ await Utils.CallWithExceptionHandling(() => StopWebServiceImplAsync(ct),
+ "Error stopping web service.", _logger).ConfigureAwait(false);
+ }
+
+ ///
+ /// Ensure execution providers are downloaded and registered.
+ /// Only relevant when using WinML.
+ ///
+ /// Execution provider download can be time consuming due to the size of the packages.
+ /// Once downloaded, EPs are not re-downloaded unless a new version is available, so this method will be fast
+ /// on subsequent calls.
+ ///
+ /// Optional cancellation token.
+ public async Task EnsureEpsDownloadedAsync(CancellationToken? ct = null)
+ {
+ await Utils.CallWithExceptionHandling(() => EnsureEpsDownloadedImplAsync(ct),
+ "Error ensuring execution providers downloaded.", _logger)
+ .ConfigureAwait(false);
+ }
+
+ private FoundryLocalManager(Configuration configuration, ILogger logger)
+ {
+ _config = configuration ?? throw new ArgumentNullException(nameof(configuration));
+ _logger = logger;
+ }
+
+ private async Task InitializeAsync(CancellationToken? ct = null)
+ {
+ _config.Validate();
+ _coreInterop = new CoreInterop(_config, _logger);
+
+#pragma warning disable IDISP003 // Dispose previous before re-assigning. Always null when this is called.
+ _modelManager = new ModelLoadManager(_config.Web?.ExternalUrl, _coreInterop, _logger);
+#pragma warning restore IDISP003
+
+ if (_config.ModelCacheDir != null)
+ {
+ CoreInteropRequest? input = null;
+ var result = await _coreInterop!.ExecuteCommandAsync("get_cache_directory", input, ct)
+ .ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error getting current model cache directory: {result.Error}",
+ _logger);
+ }
+
+ var curCacheDir = result.Data!;
+ if (curCacheDir != _config.ModelCacheDir)
+ {
+ var request = new CoreInteropRequest
+ {
+ Params = new Dictionary { { "Directory", _config.ModelCacheDir } }
+ };
+
+ result = await _coreInterop!.ExecuteCommandAsync("set_cache_directory", request, ct)
+ .ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException(
+ $"Error setting model cache directory to '{_config.ModelCacheDir}': {result.Error}", _logger);
+ }
+ }
+ }
+
+ return;
+ }
+
+ private async Task GetCatalogImplAsync(CancellationToken? ct = null)
+ {
+ // create on first use
+ if (_catalog == null)
+ {
+ using var disposable = await _lock.LockAsync().ConfigureAwait(false);
+ if (_catalog == null)
+ {
+ _catalog = await Catalog.CreateAsync(_modelManager!, _coreInterop!, _logger, ct).ConfigureAwait(false);
+ }
+ }
+
+ return _catalog;
+ }
+
+ private async Task StartWebServiceImplAsync(CancellationToken? ct = null)
+ {
+ if (_config?.Web?.Urls == null)
+ {
+ throw new FoundryLocalException("Web service configuration was not provided.", _logger);
+ }
+
+ using var disposable = await asyncLock.LockAsync().ConfigureAwait(false);
+
+ CoreInteropRequest? input = null;
+ var result = await _coreInterop!.ExecuteCommandAsync("start_service", input, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error starting web service: {result.Error}", _logger);
+ }
+
+ var typeInfo = JsonSerializationContext.Default.StringArray;
+ var boundUrls = JsonSerializer.Deserialize(result.Data!, typeInfo);
+ if (boundUrls == null || boundUrls.Length == 0)
+ {
+ throw new FoundryLocalException("Failed to get bound URLs from web service start response.", _logger);
+ }
+
+ Urls = boundUrls;
+ }
+
+ private async Task StopWebServiceImplAsync(CancellationToken? ct = null)
+ {
+ if (_config?.Web?.Urls == null)
+ {
+ throw new FoundryLocalException("Web service configuration was not provided.", _logger);
+ }
+
+ using var disposable = await asyncLock.LockAsync().ConfigureAwait(false);
+
+ CoreInteropRequest? input = null;
+ var result = await _coreInterop!.ExecuteCommandAsync("stop_service", input, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error stopping web service: {result.Error}", _logger);
+ }
+
+ // Should we clear these even if there's an error response?
+ // Service is probably in a bad state or was not running.
+ Urls = null;
+ }
+
+ private async Task EnsureEpsDownloadedImplAsync(CancellationToken? ct = null)
+ {
+
+ using var disposable = await asyncLock.LockAsync().ConfigureAwait(false);
+
+ CoreInteropRequest? input = null;
+ var result = await _coreInterop!.ExecuteCommandAsync("ensure_eps_downloaded", input, ct);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error ensuring execution providers downloaded: {result.Error}", _logger);
+ }
+ }
+
+ protected virtual void Dispose(bool disposing)
+ {
+ if (!_disposed)
+ {
+ if (disposing)
+ {
+ if (Urls != null)
+ {
+ // best effort stop
+ try
+ {
+ StopWebServiceImplAsync().GetAwaiter().GetResult();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Error stopping web service during Dispose.");
+ }
+ }
+
+ _catalog?.Dispose();
+ _modelManager?.Dispose();
+ _lock.Dispose();
+ }
+
+ _disposed = true;
+ }
+ }
+
+ public void Dispose()
+ {
+ // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method
+ Dispose(disposing: true);
+ GC.SuppressFinalize(this);
+ }
+}
diff --git a/sdk_v2/cs/src/FoundryModelInfo.cs b/sdk_v2/cs/src/FoundryModelInfo.cs
new file mode 100644
index 0000000..1f795d2
--- /dev/null
+++ b/sdk_v2/cs/src/FoundryModelInfo.cs
@@ -0,0 +1,122 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+using System.Text.Json.Serialization;
+
+[JsonConverter(typeof(JsonStringEnumConverter))]
+public enum DeviceType
+{
+ Invalid,
+ CPU,
+ GPU,
+ NPU
+}
+
+public record PromptTemplate
+{
+ [JsonPropertyName("system")]
+ public string? System { get; init; }
+
+ [JsonPropertyName("user")]
+ public string? User { get; init; }
+
+ [JsonPropertyName("assistant")]
+ public string Assistant { get; init; } = default!;
+
+ [JsonPropertyName("prompt")]
+ public string Prompt { get; init; } = default!;
+}
+
+public record Runtime
+{
+ [JsonPropertyName("deviceType")]
+ public DeviceType DeviceType { get; init; } = default!;
+
+ // there are many different possible values; keep it open‑ended
+ [JsonPropertyName("executionProvider")]
+ public string ExecutionProvider { get; init; } = default!;
+}
+
+public record Parameter
+{
+ public required string Name { get; set; }
+ public string? Value { get; set; }
+}
+
+public record ModelSettings
+{
+ [JsonPropertyName("parameters")]
+ public Parameter[]? Parameters { get; set; }
+}
+
+public record ModelInfo
+{
+ [JsonPropertyName("id")]
+ public required string Id { get; init; }
+
+ [JsonPropertyName("name")]
+ public required string Name { get; init; }
+
+ [JsonPropertyName("version")]
+ public int Version { get; init; }
+
+ [JsonPropertyName("alias")]
+ public required string Alias { get; init; }
+
+ [JsonPropertyName("displayName")]
+ public string? DisplayName { get; init; }
+
+ [JsonPropertyName("providerType")]
+ public required string ProviderType { get; init; }
+
+ [JsonPropertyName("uri")]
+ public required string Uri { get; init; }
+
+ [JsonPropertyName("modelType")]
+ public required string ModelType { get; init; }
+
+ [JsonPropertyName("promptTemplate")]
+ public PromptTemplate? PromptTemplate { get; init; }
+
+ [JsonPropertyName("publisher")]
+ public string? Publisher { get; init; }
+
+ [JsonPropertyName("modelSettings")]
+ public ModelSettings? ModelSettings { get; init; }
+
+ [JsonPropertyName("license")]
+ public string? License { get; init; }
+
+ [JsonPropertyName("licenseDescription")]
+ public string? LicenseDescription { get; init; }
+
+ [JsonPropertyName("cached")]
+ public bool Cached { get; init; }
+
+
+ [JsonPropertyName("task")]
+ public string? Task { get; init; }
+
+ [JsonPropertyName("runtime")]
+ public Runtime? Runtime { get; init; }
+
+ [JsonPropertyName("fileSizeMb")]
+ public int? FileSizeMb { get; init; }
+
+ [JsonPropertyName("supportsToolCalling")]
+ public bool? SupportsToolCalling { get; init; }
+
+ [JsonPropertyName("maxOutputTokens")]
+ public long? MaxOutputTokens { get; init; }
+
+ [JsonPropertyName("minFLVersion")]
+ public string? MinFLVersion { get; init; }
+
+ [JsonPropertyName("createdAt")]
+ public long CreatedAtUnix { get; init; }
+}
diff --git a/sdk_v2/cs/src/GlobalSuppressions.cs b/sdk_v2/cs/src/GlobalSuppressions.cs
new file mode 100644
index 0000000..42d5754
--- /dev/null
+++ b/sdk_v2/cs/src/GlobalSuppressions.cs
@@ -0,0 +1,10 @@
+// This file is used by Code Analysis to maintain SuppressMessage
+// attributes that are applied to this project.
+// Project-level suppressions either have no target or are given
+// a specific target and scoped to a namespace, type, member, etc.
+
+using System.Diagnostics.CodeAnalysis;
+
+// Neutron code. Appears that the _releaser is deliberately not disposed of because it may be being used elsewhere
+// due to being returned from the LockAsync method.
+[assembly: SuppressMessage("IDisposableAnalyzers.Correctness", "IDISP002:Dispose member", Justification = "The _releaser is not disposed because it may be used elsewhere after being returned from the LockAsync method.", Scope = "member", Target = "~F:Microsoft.AI.Foundry.Local.Detail.AsyncLock._releaser")]
diff --git a/sdk_v2/cs/src/ICatalog.cs b/sdk_v2/cs/src/ICatalog.cs
new file mode 100644
index 0000000..1234794
--- /dev/null
+++ b/sdk_v2/cs/src/ICatalog.cs
@@ -0,0 +1,53 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+using System.Collections.Generic;
+
+public interface ICatalog
+{
+ ///
+ /// The catalog name.
+ ///
+ string Name { get; }
+
+ ///
+ /// List the available models in the catalog.
+ ///
+ /// Optional CancellationToken.
+ /// List of Model instances.
+ Task> ListModelsAsync(CancellationToken? ct = null);
+
+ ///
+ /// Lookup a model by its alias.
+ ///
+ /// Model alias.
+ /// Optional CancellationToken.
+ /// Model if found.
+ Task GetModelAsync(string modelAlias, CancellationToken? ct = null);
+
+ ///
+ /// Lookup a model variant by its unique model id.
+ ///
+ /// Model id.
+ /// Optional CancellationToken.
+ /// Model variant if found.
+ Task GetModelVariantAsync(string modelId, CancellationToken? ct = null);
+
+ ///
+ /// Get a list of currently downloaded models from the model cache.
+ ///
+ /// Optional CancellationToken.
+ /// List of ModelVariant instances.
+ Task> GetCachedModelsAsync(CancellationToken? ct = null);
+
+ ///
+ /// Get a list of the currently loaded models.
+ ///
+ /// Optional CancellationToken.
+ /// List of ModelVariant instances.
+ Task> GetLoadedModelsAsync(CancellationToken? ct = null);
+}
diff --git a/sdk_v2/cs/src/IModel.cs b/sdk_v2/cs/src/IModel.cs
new file mode 100644
index 0000000..c3acba6
--- /dev/null
+++ b/sdk_v2/cs/src/IModel.cs
@@ -0,0 +1,70 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+using System.Threading;
+using System.Threading.Tasks;
+
+public interface IModel
+{
+ string Id { get; }
+ [System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1716:Identifiers should not match keywords",
+ Justification = "Alias is a suitable name in this context.")]
+ string Alias { get; }
+
+ Task IsCachedAsync(CancellationToken? ct = null);
+ Task IsLoadedAsync(CancellationToken? ct = null);
+
+ ///
+ /// Download the model to local cache if not already present.
+ ///
+ ///
+ /// Optional progress callback for download progress.
+ /// Percentage download (0 - 100.0) is reported.
+ /// Optional cancellation token.
+ Task DownloadAsync(Action? downloadProgress = null,
+ CancellationToken? ct = null);
+
+ ///
+ /// Gets the model path if cached.
+ ///
+ /// Optional cancellation token.
+ /// Path of model directory.
+ Task GetPathAsync(CancellationToken? ct = null);
+
+ ///
+ /// Load the model into memory if not already loaded.
+ ///
+ /// Optional cancellation token.
+ Task LoadAsync(CancellationToken? ct = null);
+
+ ///
+ /// Remove the model from the local cache.
+ ///
+ /// Optional cancellation token.
+ Task RemoveFromCacheAsync(CancellationToken? ct = null);
+
+ ///
+ /// Unload the model if loaded.
+ ///
+ /// Optional cancellation token.
+ Task UnloadAsync(CancellationToken? ct = null);
+
+ ///
+ /// Get an OpenAI API based ChatClient
+ ///
+ /// Optional cancellation token.
+ /// OpenAI.ChatClient
+ Task GetChatClientAsync(CancellationToken? ct = null);
+
+ ///
+ /// Get an OpenAI API based AudioClient
+ ///
+ /// Optional cancellation token.
+ /// OpenAI.AudioClient
+ Task GetAudioClientAsync(CancellationToken? ct = null);
+}
diff --git a/sdk_v2/cs/src/LogLevel.cs b/sdk_v2/cs/src/LogLevel.cs
new file mode 100644
index 0000000..6362ded
--- /dev/null
+++ b/sdk_v2/cs/src/LogLevel.cs
@@ -0,0 +1,17 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+public enum LogLevel
+{
+ Verbose = 0,
+ Debug = 1,
+ Information = 2,
+ Warning = 3,
+ Error = 4,
+ Fatal = 5
+}
diff --git a/sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj b/sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj
new file mode 100644
index 0000000..c292ef8
--- /dev/null
+++ b/sdk_v2/cs/src/Microsoft.AI.Foundry.Local.csproj
@@ -0,0 +1,108 @@
+
+
+ Microsoft AI Foundry Local
+ Microsoft Foundry Local SDK
+ Microsoft
+ Microsoft Corporation
+ © Microsoft Corporation. All rights reserved.
+ LICENSE.txt
+ https://github.com/microsoft/Foundry-Local
+ Microsoft AI Foundry Local SDK for .NET
+ Microsoft AI Foundry SDK
+ README.md
+ https://github.com/microsoft/Foundry-Local
+ git
+
+ net8.0
+ win-x64;win-arm64;linux-x64;linux-arm64;osx-arm64
+
+ true
+ False
+ enable
+ True
+ True
+ enable
+
+
+ true
+ snupkg
+
+
+ false
+ win-x64;win-arm64
+
+
+
+
+ $([System.DateTime]::Now.ToString("yyyyMMddHHmmss"))
+ 0.5.0-dev.local.$(BuildTimestamp)
+
+
+
+ true
+ true
+ true
+
+
+ $(DefineConstants);IS_WINDOWS
+ $(DefineConstants);IS_OSX
+ $(DefineConstants);IS_LINUX
+ latest-recommended
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Microsoft AI Foundry Local for WinML
+ Microsoft Foundry Local SDK for WinML
+ Microsoft.AI.Foundry.Local.WinML
+ Microsoft.AI.Foundry.Local.WinML
+ net8.0-windows10.0.26100.0
+ win-x64;win-arm64
+
+ 10.0.17763.0
+
+
+ $(NoWarn);CsWinRT1028
+
+
+ True
+
+
+ True
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/sdk_v2/cs/src/Model.cs b/sdk_v2/cs/src/Model.cs
new file mode 100644
index 0000000..83bcef6
--- /dev/null
+++ b/sdk_v2/cs/src/Model.cs
@@ -0,0 +1,126 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+using Microsoft.Extensions.Logging;
+
+public class Model : IModel
+{
+ private readonly ILogger _logger;
+
+ public List Variants { get; internal set; }
+ public ModelVariant SelectedVariant { get; internal set; } = default!;
+
+ public string Alias { get; init; }
+ public string Id => SelectedVariant.Id;
+
+ ///
+ /// Is the currently selected variant cached locally?
+ ///
+ public Task IsCachedAsync(CancellationToken? ct = null) => SelectedVariant.IsCachedAsync(ct);
+
+ ///
+ /// Is the currently selected variant loaded in memory?
+ ///
+ public Task IsLoadedAsync(CancellationToken? ct = null) => SelectedVariant.IsLoadedAsync(ct);
+
+ internal Model(ModelVariant modelVariant, ILogger logger)
+ {
+ _logger = logger;
+
+ Alias = modelVariant.Alias;
+ Variants = new() { modelVariant };
+
+ // variants are sorted by Core, so the first one added is the default
+ SelectedVariant = modelVariant;
+ }
+
+ internal void AddVariant(ModelVariant variant)
+ {
+ if (Alias != variant.Alias)
+ {
+ // internal error so log
+ throw new FoundryLocalException($"Variant alias {variant.Alias} does not match model alias {Alias}",
+ _logger);
+ }
+
+ Variants.Add(variant);
+
+ // prefer the highest priority locally cached variant
+ if (variant.Info.Cached && !SelectedVariant.Info.Cached)
+ {
+ SelectedVariant = variant;
+ }
+ }
+
+ ///
+ /// Select a specific model variant by its unique model ID.
+ /// The selected variant will be used for operations.
+ ///
+ /// Model Id of the variant to select.
+ /// If variant is not valid for this model.
+ public void SelectVariant(ModelVariant variant)
+ {
+ _ = Variants.FirstOrDefault(v => v == variant) ??
+ // user error so don't log
+ throw new FoundryLocalException($"Model {Alias} does not have a {variant.Id} variant.");
+
+ SelectedVariant = variant;
+ }
+
+ ///
+ /// Get the latest version of the specified model variant.
+ ///
+ /// Model variant.
+ /// ModelVariant for latest version. Same as `variant` if that is the latest version.
+ /// If variant is not valid for this model.
+ public ModelVariant GetLatestVersion(ModelVariant variant)
+ {
+ // variants are sorted by version, so the first one matching the name is the latest version for that variant.
+ var latest = Variants.FirstOrDefault(v => v.Info.Name == variant.Info.Name) ??
+ // user error so don't log
+ throw new FoundryLocalException($"Model {Alias} does not have a {variant.Id} variant.");
+
+ return latest;
+ }
+
+ public async Task GetPathAsync(CancellationToken? ct = null)
+ {
+ return await SelectedVariant.GetPathAsync(ct).ConfigureAwait(false);
+ }
+
+ public async Task DownloadAsync(Action? downloadProgress = null,
+ CancellationToken? ct = null)
+ {
+ await SelectedVariant.DownloadAsync(downloadProgress, ct).ConfigureAwait(false);
+ }
+
+ public async Task LoadAsync(CancellationToken? ct = null)
+ {
+ await SelectedVariant.LoadAsync(ct).ConfigureAwait(false);
+ }
+
+ public async Task GetChatClientAsync(CancellationToken? ct = null)
+ {
+ return await SelectedVariant.GetChatClientAsync(ct).ConfigureAwait(false);
+ }
+
+ public async Task GetAudioClientAsync(CancellationToken? ct = null)
+ {
+ return await SelectedVariant.GetAudioClientAsync(ct).ConfigureAwait(false);
+ }
+
+ public async Task UnloadAsync(CancellationToken? ct = null)
+ {
+ await SelectedVariant.UnloadAsync(ct).ConfigureAwait(false);
+ }
+
+ public async Task RemoveFromCacheAsync(CancellationToken? ct = null)
+ {
+ await SelectedVariant.RemoveFromCacheAsync(ct).ConfigureAwait(false);
+ }
+}
diff --git a/sdk_v2/cs/src/ModelVariant.cs b/sdk_v2/cs/src/ModelVariant.cs
new file mode 100644
index 0000000..6ca7cda
--- /dev/null
+++ b/sdk_v2/cs/src/ModelVariant.cs
@@ -0,0 +1,193 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.Extensions.Logging;
+
+public class ModelVariant : IModel
+{
+ private readonly IModelLoadManager _modelLoadManager;
+ private readonly ICoreInterop _coreInterop;
+ private readonly ILogger _logger;
+
+ public ModelInfo Info { get; } // expose the full info record
+
+ // expose a few common properties directly
+ public string Id => Info.Id;
+ public string Alias => Info.Alias;
+ public int Version { get; init; } // parsed from Info.Version if possible, else 0
+
+ internal ModelVariant(ModelInfo modelInfo, IModelLoadManager modelLoadManager, ICoreInterop coreInterop,
+ ILogger logger)
+ {
+ Info = modelInfo;
+ Version = modelInfo.Version;
+
+ _modelLoadManager = modelLoadManager;
+ _coreInterop = coreInterop;
+ _logger = logger;
+
+ }
+
+ // simpler and always correct to check if loaded from the model load manager
+ // this allows for multiple instances of ModelVariant to exist
+ public async Task IsLoadedAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => IsLoadedImplAsync(ct),
+ "Error checking if model is loaded", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task IsCachedAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => IsCachedImplAsync(ct),
+ "Error checking if model is cached", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task GetPathAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetPathImplAsync(ct),
+ "Error getting path for model", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task DownloadAsync(Action? downloadProgress = null,
+ CancellationToken? ct = null)
+ {
+ await Utils.CallWithExceptionHandling(() => DownloadImplAsync(downloadProgress, ct),
+ $"Error downloading model {Id}", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task LoadAsync(CancellationToken? ct = null)
+ {
+ await Utils.CallWithExceptionHandling(() => _modelLoadManager.LoadAsync(Id, ct),
+ "Error loading model", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task UnloadAsync(CancellationToken? ct = null)
+ {
+ await Utils.CallWithExceptionHandling(() => _modelLoadManager.UnloadAsync(Id, ct),
+ "Error unloading model", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task RemoveFromCacheAsync(CancellationToken? ct = null)
+ {
+ await Utils.CallWithExceptionHandling(() => RemoveFromCacheImplAsync(ct),
+ $"Error removing model {Id} from cache", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task GetChatClientAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetChatClientImplAsync(ct),
+ "Error getting chat client for model", _logger)
+ .ConfigureAwait(false);
+ }
+
+ public async Task GetAudioClientAsync(CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => GetAudioClientImplAsync(ct),
+ "Error getting audio client for model", _logger)
+ .ConfigureAwait(false);
+ }
+
+ private async Task IsLoadedImplAsync(CancellationToken? ct = null)
+ {
+ var loadedModels = await _modelLoadManager.ListLoadedModelsAsync(ct).ConfigureAwait(false);
+ return loadedModels.Contains(Id);
+ }
+
+ private async Task IsCachedImplAsync(CancellationToken? ct = null)
+ {
+ var cachedModelIds = await Utils.GetCachedModelIdsAsync(_coreInterop, ct).ConfigureAwait(false);
+ return cachedModelIds.Contains(Id);
+ }
+
+ private async Task GetPathImplAsync(CancellationToken? ct = null)
+ {
+ var request = new CoreInteropRequest { Params = new Dictionary { { "Model", Id } } };
+ var result = await _coreInterop.ExecuteCommandAsync("get_model_path", request, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException(
+ $"Error getting path for model {Id}: {result.Error}. Has it been downloaded?");
+ }
+
+ var path = result.Data!;
+ return path;
+ }
+
+ private async Task DownloadImplAsync(Action? downloadProgress = null,
+ CancellationToken? ct = null)
+ {
+ var request = new CoreInteropRequest
+ {
+ Params = new() { { "Model", Id } }
+ };
+
+ ICoreInterop.Response? response;
+
+ if (downloadProgress == null)
+ {
+ response = await _coreInterop.ExecuteCommandAsync("download_model", request, ct).ConfigureAwait(false);
+ }
+ else
+ {
+ var callback = new ICoreInterop.CallbackFn(progressString =>
+ {
+ if (float.TryParse(progressString, out var progress))
+ {
+ downloadProgress(progress);
+ }
+ });
+
+ response = await _coreInterop.ExecuteCommandWithCallbackAsync("download_model", request,
+ callback, ct).ConfigureAwait(false);
+ }
+
+ if (response.Error != null)
+ {
+ throw new FoundryLocalException($"Error downloading model {Id}: {response.Error}");
+ }
+ }
+
+ private async Task RemoveFromCacheImplAsync(CancellationToken? ct = null)
+ {
+ var request = new CoreInteropRequest { Params = new Dictionary { { "Model", Id } } };
+
+ var result = await _coreInterop.ExecuteCommandAsync("remove_cached_model", request, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error removing model {Id} from cache: {result.Error}");
+ }
+ }
+
+ private async Task GetChatClientImplAsync(CancellationToken? ct = null)
+ {
+ if (!await IsLoadedAsync(ct))
+ {
+ throw new FoundryLocalException($"Model {Id} is not loaded. Call LoadAsync first.");
+ }
+
+ return new OpenAIChatClient(Id);
+ }
+
+ private async Task GetAudioClientImplAsync(CancellationToken? ct = null)
+ {
+ if (!await IsLoadedAsync(ct))
+ {
+ throw new FoundryLocalException($"Model {Id} is not loaded. Call LoadAsync first.");
+ }
+
+ return new OpenAIAudioClient(Id);
+ }
+}
diff --git a/sdk_v2/cs/src/OpenAI/AudioClient.cs b/sdk_v2/cs/src/OpenAI/AudioClient.cs
new file mode 100644
index 0000000..98f40a6
--- /dev/null
+++ b/sdk_v2/cs/src/OpenAI/AudioClient.cs
@@ -0,0 +1,182 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+using System.Runtime.CompilerServices;
+using System.Threading.Channels;
+
+using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
+using Betalgo.Ranul.OpenAI.ObjectModels.ResponseModels;
+
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.AI.Foundry.Local.OpenAI;
+using Microsoft.Extensions.Logging;
+
+///
+/// Audio Client that uses the OpenAI API.
+/// Implemented using Betalgo.Ranul.OpenAI SDK types.
+///
+public class OpenAIAudioClient
+{
+ private readonly string _modelId;
+
+ private readonly ICoreInterop _coreInterop = FoundryLocalManager.Instance.CoreInterop;
+ private readonly ILogger _logger = FoundryLocalManager.Instance.Logger;
+
+ internal OpenAIAudioClient(string modelId)
+ {
+ _modelId = modelId;
+ }
+
+ ///
+ /// Transcribe audio from a file.
+ ///
+ ///
+ /// Path to file containing audio recording.
+ /// Supported formats: ????
+ ///
+ /// Optional cancellation token.
+ /// Transcription response.
+ public async Task TranscribeAudioAsync(string audioFilePath,
+ CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(() => TranscribeAudioImplAsync(audioFilePath, ct),
+ "Error during audio transcription.", _logger)
+ .ConfigureAwait(false);
+ }
+
+ ///
+ /// Transcribe audio from a file with streamed output.
+ ///
+ ///
+ /// Path to file containing audio recording.
+ /// Supported formats: ????
+ ///
+ /// Cancellation token.
+ /// An asynchronous enumerable of transcription responses.
+ public async IAsyncEnumerable TranscribeAudioStreamingAsync(
+ string audioFilePath, [EnumeratorCancellation] CancellationToken ct)
+ {
+ var enumerable = Utils.CallWithExceptionHandling(
+ () => TranscribeAudioStreamingImplAsync(audioFilePath, ct),
+ "Error during streaming audio transcription.", _logger).ConfigureAwait(false);
+
+ await foreach (var item in enumerable)
+ {
+ yield return item;
+ }
+ }
+
+ private async Task TranscribeAudioImplAsync(string audioFilePath,
+ CancellationToken? ct)
+ {
+ var openaiRequest = new AudioCreateTranscriptionRequest
+ {
+ Model = _modelId,
+ FileName = audioFilePath
+ };
+
+ var request = new CoreInteropRequest
+ {
+ Params = new Dictionary
+ {
+ { "OpenAICreateRequest", openaiRequest.ToJson() },
+ }
+ };
+
+ var response = await _coreInterop.ExecuteCommandAsync("audio_transcribe", request,
+ ct ?? CancellationToken.None).ConfigureAwait(false);
+
+
+ var output = response.ToAudioTranscription(_logger);
+
+ return output;
+ }
+
+ private async IAsyncEnumerable TranscribeAudioStreamingImplAsync(
+ string audioFilePath, [EnumeratorCancellation] CancellationToken ct)
+ {
+ var openaiRequest = new AudioCreateTranscriptionRequest
+ {
+ Model = _modelId,
+ FileName = audioFilePath
+ };
+
+ var request = new CoreInteropRequest
+ {
+ Params = new Dictionary
+ {
+ { "OpenAICreateRequest", openaiRequest.ToJson() },
+ }
+ };
+
+ var channel = Channel.CreateUnbounded(
+ new UnboundedChannelOptions
+ {
+ SingleWriter = true,
+ SingleReader = true,
+ AllowSynchronousContinuations = true
+ });
+
+ // The callback will push ChatResponse objects into the channel.
+ // The channel reader will return the values to the user.
+ // This setup prevents the user from blocking the thread generating the responses.
+ _ = Task.Run(async () =>
+ {
+ try
+ {
+ var failed = false;
+
+ await _coreInterop.ExecuteCommandWithCallbackAsync(
+ "audio_transcribe",
+ request,
+ async (callbackData) =>
+ {
+ try
+ {
+ if (!failed)
+ {
+ var audioCompletion = callbackData.ToAudioTranscription(_logger);
+ await channel.Writer.WriteAsync(audioCompletion);
+ }
+ }
+ catch (Exception ex)
+ {
+ // propagate exception to reader
+ channel.Writer.TryComplete(
+ new FoundryLocalException(
+ "Error processing streaming audio transcription callback data.", ex, _logger));
+ failed = true;
+ }
+ },
+ ct
+ ).ConfigureAwait(false);
+
+ // use TryComplete as an exception in the callback may have already closed the channel
+ _ = channel.Writer.TryComplete();
+ }
+ // Ignore cancellation exceptions so we don't convert them into errors
+ catch (Exception ex) when (ex is not OperationCanceledException)
+ {
+ channel.Writer.TryComplete(
+ new FoundryLocalException("Error executing streaming chat completion.", ex, _logger));
+ }
+ catch (OperationCanceledException)
+ {
+ // Complete the channel on cancellation but don't turn it into an error
+ channel.Writer.TryComplete();
+ }
+ }, ct);
+
+ // Start reading from the channel as items arrive.
+ // This will continue until ExecuteCommandWithCallbackAsync completes and closes the channel.
+ await foreach (var item in channel.Reader.ReadAllAsync(ct))
+ {
+ yield return item;
+ }
+ }
+}
diff --git a/sdk_v2/cs/src/OpenAI/AudioTranscriptionRequestResponseTypes.cs b/sdk_v2/cs/src/OpenAI/AudioTranscriptionRequestResponseTypes.cs
new file mode 100644
index 0000000..d2dc729
--- /dev/null
+++ b/sdk_v2/cs/src/OpenAI/AudioTranscriptionRequestResponseTypes.cs
@@ -0,0 +1,49 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.OpenAI;
+
+using System.Text.Json;
+
+using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
+using Betalgo.Ranul.OpenAI.ObjectModels.ResponseModels;
+
+using Microsoft.AI.Foundry.Local;
+using Microsoft.AI.Foundry.Local.Detail;
+
+using Microsoft.Extensions.Logging;
+
+internal static class AudioTranscriptionRequestResponseExtensions
+{
+ internal static string ToJson(this AudioCreateTranscriptionRequest request)
+ {
+ return JsonSerializer.Serialize(request, JsonSerializationContext.Default.AudioCreateTranscriptionRequest);
+ }
+ internal static AudioCreateTranscriptionResponse ToAudioTranscription(this ICoreInterop.Response response,
+ ILogger logger)
+ {
+ if (response.Error != null)
+ {
+ logger.LogError("Error from audio_transcribe: {Error}", response.Error);
+ throw new FoundryLocalException($"Error from audio_transcribe command: {response.Error}");
+ }
+
+ return response.Data!.ToAudioTranscription(logger);
+ }
+
+ internal static AudioCreateTranscriptionResponse ToAudioTranscription(this string responseData, ILogger logger)
+ {
+ var typeInfo = JsonSerializationContext.Default.AudioCreateTranscriptionResponse;
+ var response = JsonSerializer.Deserialize(responseData, typeInfo);
+ if (response == null)
+ {
+ logger.LogError("Failed to deserialize AudioCreateTranscriptionResponse. Json={Data}", responseData);
+ throw new FoundryLocalException("Failed to deserialize AudioCreateTranscriptionResponse");
+ }
+
+ return response;
+ }
+}
diff --git a/sdk_v2/cs/src/OpenAI/ChatClient.cs b/sdk_v2/cs/src/OpenAI/ChatClient.cs
new file mode 100644
index 0000000..beab7a5
--- /dev/null
+++ b/sdk_v2/cs/src/OpenAI/ChatClient.cs
@@ -0,0 +1,185 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+
+using System.Collections.Generic;
+using System.Runtime.CompilerServices;
+using System.Threading.Channels;
+
+using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
+using Betalgo.Ranul.OpenAI.ObjectModels.ResponseModels;
+
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.AI.Foundry.Local.OpenAI;
+using Microsoft.Extensions.Logging;
+
+///
+/// Chat Client that uses the OpenAI API.
+/// Implemented using Betalgo.Ranul.OpenAI SDK types.
+///
+public class OpenAIChatClient
+{
+ private readonly string _modelId;
+
+ private readonly ICoreInterop _coreInterop = FoundryLocalManager.Instance.CoreInterop;
+ private readonly ILogger _logger = FoundryLocalManager.Instance.Logger;
+
+ internal OpenAIChatClient(string modelId)
+ {
+ _modelId = modelId;
+ }
+
+ ///
+ /// Settings that are supported by Foundry Local
+ ///
+ public record ChatSettings
+ {
+ public float? FrequencyPenalty { get; set; }
+ public int? MaxTokens { get; set; }
+ public int? N { get; set; }
+ public float? Temperature { get; set; }
+ public float? PresencePenalty { get; set; }
+ public int? RandomSeed { get; set; }
+ internal bool? Stream { get; set; } // this is set internally based on the API used
+ public int? TopK { get; set; }
+ public float? TopP { get; set; }
+ }
+
+ ///
+ /// Settings to use for chat completions using this client.
+ ///
+ public ChatSettings Settings { get; } = new();
+
+ ///
+ /// Execute a chat completion request.
+ ///
+ /// To continue a conversation, add the ChatMessage from the previous response and new prompt to the messages.
+ ///
+ /// Chat messages. The system message is automatically added.
+ /// Optional cancellation token.
+ /// Chat completion response.
+ public async Task CompleteChatAsync(IEnumerable messages,
+ CancellationToken? ct = null)
+ {
+ return await Utils.CallWithExceptionHandling(
+ () => CompleteChatImplAsync(messages, ct),
+ "Error during chat completion.", _logger).ConfigureAwait(false);
+ }
+
+ ///
+ /// Execute a chat completion request with streamed output.
+ ///
+ /// To continue a conversation, add the ChatMessage from the previous response and new prompt to the messages.
+ ///
+ /// Chat messages. The system message is automatically added.
+ /// Optional cancellation token.
+ /// Async enumerable of chat completion responses.
+ public async IAsyncEnumerable CompleteChatStreamingAsync(
+ IEnumerable messages, [EnumeratorCancellation] CancellationToken ct)
+ {
+ var enumerable = Utils.CallWithExceptionHandling(
+ () => ChatStreamingImplAsync(messages, ct),
+ "Error during streaming chat completion.", _logger).ConfigureAwait(false);
+
+ await foreach (var item in enumerable)
+ {
+ yield return item;
+ }
+ }
+
+ private async Task CompleteChatImplAsync(IEnumerable messages,
+ CancellationToken? ct)
+ {
+ Settings.Stream = false;
+
+ var chatRequest = ChatCompletionCreateRequestExtended.FromUserInput(_modelId, messages, Settings);
+ var chatRequestJson = chatRequest.ToJson();
+
+ var request = new CoreInteropRequest { Params = new() { { "OpenAICreateRequest", chatRequestJson } } };
+ var response = await _coreInterop.ExecuteCommandAsync("chat_completions", request,
+ ct ?? CancellationToken.None).ConfigureAwait(false);
+
+ var chatCompletion = response.ToChatCompletion(_logger);
+
+ return chatCompletion;
+ }
+
+ private async IAsyncEnumerable ChatStreamingImplAsync(
+ IEnumerable messages, [EnumeratorCancellation] CancellationToken ct)
+ {
+ Settings.Stream = true;
+
+ var chatRequest = ChatCompletionCreateRequestExtended.FromUserInput(_modelId, messages, Settings);
+ var chatRequestJson = chatRequest.ToJson();
+ var request = new CoreInteropRequest { Params = new() { { "OpenAICreateRequest", chatRequestJson } } };
+
+ var channel = Channel.CreateUnbounded(
+ new UnboundedChannelOptions
+ {
+ SingleWriter = true,
+ SingleReader = true,
+ AllowSynchronousContinuations = true
+ });
+
+ // The callback will push ChatResponse objects into the channel.
+ // The channel reader will return the values to the user.
+ // This setup prevents the user from blocking the thread generating the responses.
+ _ = Task.Run(async () =>
+ {
+ try
+ {
+ var failed = false;
+
+ await _coreInterop.ExecuteCommandWithCallbackAsync(
+ "chat_completions",
+ request,
+ async (callbackData) =>
+ {
+ try
+ {
+ if (!failed)
+ {
+ var chatCompletion = callbackData.ToChatCompletion(_logger);
+ await channel.Writer.WriteAsync(chatCompletion);
+ }
+ }
+ catch (Exception ex)
+ {
+ // propagate exception to reader
+ channel.Writer.TryComplete(
+ new FoundryLocalException("Error processing streaming chat completion callback data.",
+ ex, _logger));
+ failed = true;
+ }
+ },
+ ct
+ ).ConfigureAwait(false);
+
+ // use TryComplete as an exception in the callback may have already closed the channel
+ _ = channel.Writer.TryComplete();
+ }
+ // Ignore cancellation exceptions so we don't convert them into errors
+ catch (Exception ex) when (ex is not OperationCanceledException)
+ {
+ channel.Writer.TryComplete(
+ new FoundryLocalException("Error executing streaming chat completion.", ex, _logger));
+ }
+ catch (OperationCanceledException)
+ {
+ // Complete the channel on cancellation but don't turn it into an error
+ channel.Writer.TryComplete();
+ }
+ }, ct);
+
+ // Start reading from the channel as items arrive.
+ // This will continue until ExecuteCommandWithCallbackAsync completes and closes the channel.
+ await foreach (var item in channel.Reader.ReadAllAsync(ct))
+ {
+ yield return item;
+ }
+ }
+}
diff --git a/sdk_v2/cs/src/OpenAI/ChatCompletionRequestResponseTypes.cs b/sdk_v2/cs/src/OpenAI/ChatCompletionRequestResponseTypes.cs
new file mode 100644
index 0000000..c054a28
--- /dev/null
+++ b/sdk_v2/cs/src/OpenAI/ChatCompletionRequestResponseTypes.cs
@@ -0,0 +1,95 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.OpenAI;
+
+using System.Globalization;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+
+using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
+using Betalgo.Ranul.OpenAI.ObjectModels.ResponseModels;
+
+using Microsoft.AI.Foundry.Local;
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.Extensions.Logging;
+
+// https://platform.openai.com/docs/api-reference/chat/create
+// Using the Betalgo ChatCompletionCreateRequest and extending with the `metadata` field for additional parameters
+// which is part of the OpenAI spec but for some reason not part of the Betalgo request object.
+internal class ChatCompletionCreateRequestExtended : ChatCompletionCreateRequest
+{
+ // Valid entries:
+ // int top_k
+ // int random_seed
+ [JsonPropertyName("metadata")]
+ public Dictionary? Metadata { get; set; }
+
+ internal static ChatCompletionCreateRequestExtended FromUserInput(string modelId,
+ IEnumerable messages,
+ OpenAIChatClient.ChatSettings settings)
+ {
+ var request = new ChatCompletionCreateRequestExtended
+ {
+ Model = modelId,
+ Messages = messages.ToList(),
+
+ // apply our specific settings
+ FrequencyPenalty = settings.FrequencyPenalty,
+ MaxTokens = settings.MaxTokens,
+ N = settings.N,
+ Temperature = settings.Temperature,
+ PresencePenalty = settings.PresencePenalty,
+ Stream = settings.Stream,
+ TopP = settings.TopP
+ };
+
+ var metadata = new Dictionary();
+
+ if (settings.TopK.HasValue)
+ {
+ metadata["top_k"] = settings.TopK.Value.ToString(CultureInfo.InvariantCulture);
+ }
+
+ if (settings.RandomSeed.HasValue)
+ {
+ metadata["random_seed"] = settings.RandomSeed.Value.ToString(CultureInfo.InvariantCulture);
+ }
+
+ if (metadata.Count > 0)
+ {
+ request.Metadata = metadata;
+ }
+
+
+ return request;
+ }
+}
+
+internal static class ChatCompletionsRequestResponseExtensions
+{
+ internal static string ToJson(this ChatCompletionCreateRequestExtended request)
+ {
+ return JsonSerializer.Serialize(request, JsonSerializationContext.Default.ChatCompletionCreateRequestExtended);
+ }
+
+ internal static ChatCompletionCreateResponse ToChatCompletion(this ICoreInterop.Response response, ILogger logger)
+ {
+ if (response.Error != null)
+ {
+ logger.LogError("Error from chat_completions: {Error}", response.Error);
+ throw new FoundryLocalException($"Error from chat_completions command: {response.Error}");
+ }
+
+ return response.Data!.ToChatCompletion(logger);
+ }
+
+ internal static ChatCompletionCreateResponse ToChatCompletion(this string responseData, ILogger logger)
+ {
+ return JsonSerializer.Deserialize(responseData, JsonSerializationContext.Default.ChatCompletionCreateResponse)
+ ?? throw new JsonException("Failed to deserialize ChatCompletion");
+ }
+}
diff --git a/sdk_v2/cs/src/Utils.cs b/sdk_v2/cs/src/Utils.cs
new file mode 100644
index 0000000..8300a96
--- /dev/null
+++ b/sdk_v2/cs/src/Utils.cs
@@ -0,0 +1,53 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local;
+using System.Text.Json;
+using System.Threading.Tasks;
+
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.Extensions.Logging;
+
+internal class Utils
+{
+ internal static async Task GetCachedModelIdsAsync(ICoreInterop coreInterop, CancellationToken? ct = null)
+ {
+ CoreInteropRequest? input = null;
+ var result = await coreInterop.ExecuteCommandAsync("get_cached_models", input, ct).ConfigureAwait(false);
+ if (result.Error != null)
+ {
+ throw new FoundryLocalException($"Error getting cached model ids: {result.Error}");
+ }
+
+ var typeInfo = JsonSerializationContext.Default.StringArray;
+ var cachedModelIds = JsonSerializer.Deserialize(result.Data!, typeInfo);
+ if (cachedModelIds == null)
+ {
+ throw new FoundryLocalException($"Failed to deserialized cached model names. Json:'{result.Data!}'");
+ }
+
+ return cachedModelIds;
+ }
+
+ // Helper to wrap function calls with consistent exception handling
+ internal static T CallWithExceptionHandling(Func func, string errorMsg, ILogger logger)
+ {
+ try
+ {
+ return func();
+ }
+ // we ignore OperationCanceledException to allow proper cancellation propagation
+ // this also covers TaskCanceledException since it derives from OperationCanceledException
+ catch (Exception ex) when (ex is not OperationCanceledException)
+ {
+ if (ex is FoundryLocalException)
+ {
+ throw;
+ }
+ throw new FoundryLocalException(errorMsg, ex, logger);
+ }
+ }
+}
diff --git a/sdk_v2/cs/src/msbuild.binlog b/sdk_v2/cs/src/msbuild.binlog
new file mode 100644
index 0000000..3beb2b7
Binary files /dev/null and b/sdk_v2/cs/src/msbuild.binlog differ
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/AudioClientTests.cs b/sdk_v2/cs/test/FoundryLocal.Tests/AudioClientTests.cs
new file mode 100644
index 0000000..1581901
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/AudioClientTests.cs
@@ -0,0 +1,74 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+
+using System.Text;
+using System.Threading.Tasks;
+
+internal sealed class AudioClientTests
+{
+ private static Model? model;
+
+ [Before(Class)]
+ public static async Task Setup()
+ {
+ var manager = FoundryLocalManager.Instance; // initialized by Utils
+ var catalog = await manager.GetCatalogAsync();
+ var model = await catalog.GetModelAsync("whisper-tiny").ConfigureAwait(false);
+ await Assert.That(model).IsNotNull();
+
+ await model.LoadAsync().ConfigureAwait(false);
+ await Assert.That(await model.IsLoadedAsync()).IsTrue();
+
+ AudioClientTests.model = model;
+ }
+
+ [Test]
+ public async Task AudioTranscription_NoStreaming_Succeeds()
+ {
+ var audioClient = await model!.GetAudioClientAsync();
+ await Assert.That(audioClient).IsNotNull();
+
+
+ var audioFilePath = "testdata/Recording.mp3";
+
+ var response = await audioClient.TranscribeAudioAsync(audioFilePath).ConfigureAwait(false);
+
+ await Assert.That(response).IsNotNull();
+ await Assert.That(response.Text).IsNotNull().And.IsNotEmpty();
+ var content = response.Text;
+ await Assert.That(content).IsEqualTo(" And lots of times you need to give people more than one link at a time. You a band could give their fans a couple new videos from the live concert behind the scenes photo gallery and album to purchase like these next few links.");
+ Console.WriteLine($"Response: {content}");
+ }
+
+ [Test]
+ public async Task AudioTranscription_Streaming_Succeeds()
+ {
+ var audioClient = await model!.GetAudioClientAsync();
+ await Assert.That(audioClient).IsNotNull();
+
+
+ var audioFilePath = "testdata/Recording.mp3";
+
+ var updates = audioClient.TranscribeAudioStreamingAsync(audioFilePath, CancellationToken.None).ConfigureAwait(false);
+
+ StringBuilder responseMessage = new();
+ await foreach (var response in updates)
+ {
+ await Assert.That(response).IsNotNull();
+ await Assert.That(response.Text).IsNotNull().And.IsNotEmpty();
+ var content = response.Text;
+ responseMessage.Append(content);
+ }
+
+ var fullResponse = responseMessage.ToString();
+ Console.WriteLine(fullResponse);
+ await Assert.That(fullResponse).IsEqualTo(" And lots of times you need to give people more than one link at a time. You a band could give their fans a couple new videos from the live concert behind the scenes photo gallery and album to purchase like these next few links.");
+
+
+ }
+}
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/ChatCompletionsTests.cs b/sdk_v2/cs/test/FoundryLocal.Tests/ChatCompletionsTests.cs
new file mode 100644
index 0000000..0f1c7c6
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/ChatCompletionsTests.cs
@@ -0,0 +1,131 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+
+using System.Text;
+using System.Threading.Tasks;
+
+using Betalgo.Ranul.OpenAI.ObjectModels.RequestModels;
+
+internal sealed class ChatCompletionsTests
+{
+ private static Model? model;
+
+ [Before(Class)]
+ public static async Task Setup()
+ {
+ var manager = FoundryLocalManager.Instance; // initialized by Utils
+ var catalog = await manager.GetCatalogAsync();
+
+ // Load the specific cached model variant directly
+ var modelVariant = await catalog.GetModelVariantAsync("qwen2.5-0.5b-instruct-generic-cpu:4").ConfigureAwait(false);
+ await Assert.That(modelVariant).IsNotNull();
+
+ var model = new Model(modelVariant!, manager.Logger);
+ await model.LoadAsync().ConfigureAwait(false);
+ await Assert.That(await model.IsLoadedAsync()).IsTrue();
+
+ ChatCompletionsTests.model = model;
+ }
+
+ [Test]
+ public async Task DirectChat_NoStreaming_Succeeds()
+ {
+ var chatClient = await model!.GetChatClientAsync();
+ await Assert.That(chatClient).IsNotNull();
+
+ chatClient.Settings.MaxTokens = 500;
+ chatClient.Settings.Temperature = 0.0f; // for deterministic results
+
+ List messages = new()
+ {
+ // System prompt is setup by GenAI
+ new ChatMessage { Role = "user", Content = "You are a calculator. Be precise. What is the answer to 7 multiplied by 6?" }
+ };
+
+ var response = await chatClient.CompleteChatAsync(messages).ConfigureAwait(false);
+
+ await Assert.That(response).IsNotNull();
+ await Assert.That(response.Choices).IsNotNull().And.IsNotEmpty();
+ var message = response.Choices[0].Message;
+ await Assert.That(message).IsNotNull();
+ await Assert.That(message.Role).IsEqualTo("assistant");
+ await Assert.That(message.Content).IsNotNull();
+ await Assert.That(message.Content).Contains("42");
+ Console.WriteLine($"Response: {message.Content}");
+
+ messages.Add(new ChatMessage { Role = "assistant", Content = message.Content });
+
+ messages.Add(new ChatMessage
+ {
+ Role = "user",
+ Content = "Is the answer a real number?"
+ });
+
+ response = await chatClient.CompleteChatAsync(messages).ConfigureAwait(false);
+ message = response.Choices[0].Message;
+ await Assert.That(message.Content).IsNotNull();
+ await Assert.That(message.Content).Contains("Yes");
+ }
+
+ [Test]
+ public async Task DirectChat_Streaming_Succeeds()
+ {
+ var chatClient = await model!.GetChatClientAsync();
+ await Assert.That(chatClient).IsNotNull();
+
+ chatClient.Settings.MaxTokens = 500;
+ chatClient.Settings.Temperature = 0.0f; // for deterministic results
+
+ List messages = new()
+ {
+ new ChatMessage { Role = "user", Content = "You are a calculator. Be precise. What is the answer to 7 multiplied by 6?" }
+ };
+
+ var updates = chatClient.CompleteChatStreamingAsync(messages, CancellationToken.None).ConfigureAwait(false);
+
+ StringBuilder responseMessage = new();
+ await foreach (var response in updates)
+ {
+ await Assert.That(response).IsNotNull();
+ await Assert.That(response.Choices).IsNotNull().And.IsNotEmpty();
+ var message = response.Choices[0].Message;
+ await Assert.That(message).IsNotNull();
+ await Assert.That(message.Role).IsEqualTo("assistant");
+ await Assert.That(message.Content).IsNotNull();
+ responseMessage.Append(message.Content);
+ }
+
+ var fullResponse = responseMessage.ToString();
+ Console.WriteLine(fullResponse);
+ await Assert.That(fullResponse).Contains("42");
+
+ messages.Add(new ChatMessage { Role = "assistant", Content = fullResponse });
+ messages.Add(new ChatMessage
+ {
+ Role = "user",
+ Content = "Add 25 to the previous answer. Think hard to be sure of the answer."
+ });
+
+ updates = chatClient.CompleteChatStreamingAsync(messages, CancellationToken.None).ConfigureAwait(false);
+ responseMessage.Clear();
+ await foreach (var response in updates)
+ {
+ await Assert.That(response).IsNotNull();
+ await Assert.That(response.Choices).IsNotNull().And.IsNotEmpty();
+ var message = response.Choices[0].Message;
+ await Assert.That(message).IsNotNull();
+ await Assert.That(message.Role).IsEqualTo("assistant");
+ await Assert.That(message.Content).IsNotNull();
+ responseMessage.Append(message.Content);
+ }
+
+ fullResponse = responseMessage.ToString();
+ Console.WriteLine(fullResponse);
+ await Assert.That(fullResponse).Contains("67");
+ }
+}
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/EndToEnd.cs b/sdk_v2/cs/test/FoundryLocal.Tests/EndToEnd.cs
new file mode 100644
index 0000000..80ab4c0
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/EndToEnd.cs
@@ -0,0 +1,80 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+using System;
+using System.Threading.Tasks;
+
+internal sealed class EndToEnd
+{
+ // end-to-end using real catalog. run manually as a standalone test as it alters the model cache.
+ [Test]
+ public async Task EndToEndTest_Succeeds()
+ {
+ var manager = FoundryLocalManager.Instance; // initialized by Utils
+ var catalog = await manager.GetCatalogAsync();
+
+ var models = await catalog.ListModelsAsync().ConfigureAwait(false);
+
+ await Assert.That(models).IsNotNull();
+ await Assert.That(models.Count).IsGreaterThan(0);
+
+ // Load the specific cached model variant directly
+ var modelVariant = await catalog.GetModelVariantAsync("qwen2.5-0.5b-instruct-generic-cpu:4")
+ .ConfigureAwait(false);
+
+ await Assert.That(modelVariant).IsNotNull();
+ await Assert.That(modelVariant!.Alias).IsEqualTo("qwen2.5-0.5b");
+
+ // Create model from the specific variant
+ var model = new Model(modelVariant, manager.Logger);
+
+ // uncomment this to remove the model first to test the download progress
+ // only do this when manually testing as other tests expect the model to be cached
+ //await modelVariant.RemoveFromCacheAsync().ConfigureAwait(false);
+ //await Assert.That(modelVariant.IsCached).IsFalse(); // check variant status matches
+
+ var expectedCallbackUsed = !await modelVariant.IsCachedAsync();
+ var progressValues = new List();
+ var addProgressCallbackValue = new Action(progressValues.Add);
+
+ await model.DownloadAsync(addProgressCallbackValue);
+
+ if (expectedCallbackUsed)
+ {
+ await Assert.That(progressValues).IsNotEmpty();
+ await Assert.That(progressValues[^1]).IsEqualTo(100.0f);
+ }
+ else
+ {
+ await Assert.That(progressValues).IsEmpty(); // no callback if already cached
+ }
+
+ await Assert.That(await modelVariant.IsCachedAsync()).IsTrue(); // check variant status matches
+
+ var path = await modelVariant.GetPathAsync().ConfigureAwait(false);
+ var modelPath = await model.GetPathAsync().ConfigureAwait(false);
+ await Assert.That(path).IsNotNull();
+ await Assert.That(modelPath).IsEqualTo(path);
+
+ await modelVariant.LoadAsync().ConfigureAwait(false);
+ await Assert.That(await modelVariant.IsLoadedAsync()).IsTrue();
+ await Assert.That(await model.IsLoadedAsync()).IsTrue();
+
+ // check we get the same info from the web service
+ await manager.StartWebServiceAsync();
+ await Assert.That(manager.Urls).IsNotNull();
+ var serviceUri = new Uri(manager.Urls![0]);
+
+ // create model load manager that queries the web service
+ var loadedModels = await catalog.GetLoadedModelsAsync().ConfigureAwait(false);
+ await Assert.That(loadedModels).Contains(modelVariant);
+
+ // Unload happens in TestAssemblySetupCleanup so tests don't affect each other.
+ //await modelVariant.UnloadAsync().ConfigureAwait(false);
+ //await Assert.That(modelVariant.IsLoaded).IsFalse();
+ }
+}
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/FoundryLocalManagerTest.cs b/sdk_v2/cs/test/FoundryLocal.Tests/FoundryLocalManagerTest.cs
new file mode 100644
index 0000000..5227e06
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/FoundryLocalManagerTest.cs
@@ -0,0 +1,103 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+
+using System;
+
+using Microsoft.AI.Foundry.Local;
+using Microsoft.AI.Foundry.Local.Detail;
+
+public class FoundryLocalManagerTests
+{
+ [Test]
+ public async Task Manager_GetCatalog_Succeeds()
+ {
+ var catalog = await FoundryLocalManager.Instance.GetCatalogAsync() as Catalog;
+ await Assert.That(catalog).IsNotNull();
+ await Assert.That(catalog!.Name).IsNotNullOrWhitespace();
+
+ var models = await catalog.ListModelsAsync();
+ await Assert.That(models).IsNotNull().And.IsNotEmpty();
+
+ foreach (var model in models)
+ {
+ Console.WriteLine($"Model Alias: {model.Alias}, Variants: {model.Variants.Count}");
+ Console.WriteLine($"Selected Variant Id: {model.SelectedVariant?.Id ?? "none"}");
+
+ // variants should be in sorted order
+
+ DeviceType lastDeviceType = DeviceType.Invalid;
+ var lastName = string.Empty;
+ var lastVersion = int.MaxValue;
+
+ foreach (var variant in model.Variants)
+ {
+ Console.WriteLine($" Id: {variant.Id}, Cached={variant.Info.Cached}");
+
+ // variants are grouped by name
+ // check if variants are sorted by device type and version
+ if ((variant.Info.Name == lastName) &&
+ ((variant.Info.Runtime?.DeviceType > lastDeviceType) ||
+ (variant.Info.Runtime?.DeviceType == lastDeviceType && variant.Info.Version > lastVersion)))
+ {
+ Assert.Fail($"Variant {variant.Id} is not in the expected order.");
+ }
+
+ lastDeviceType = variant.Info.Runtime?.DeviceType ?? DeviceType.Invalid;
+ lastName = variant.Info.Name;
+ lastVersion = variant.Info.Version;
+ }
+ }
+ }
+
+ [Test]
+ public async Task Catalog_ListCachedLoadUnload_Succeeds()
+ {
+ List logSink = new();
+ var logger = Utils.CreateCapturingLoggerMock(logSink);
+ using var loadManager = new ModelLoadManager(null, Utils.CoreInterop, logger.Object);
+
+ List intercepts = new()
+ {
+ new Utils.InteropCommandInterceptInfo
+ {
+ CommandName = "initialize",
+ CommandInput = null,
+ ResponseData = "Success",
+ ResponseError = null
+ }
+ };
+ var coreInterop = Utils.CreateCoreInteropWithIntercept(Utils.CoreInterop, intercepts);
+ using var catalog = await Catalog.CreateAsync(loadManager, coreInterop.Object, logger.Object);
+ await Assert.That(catalog).IsNotNull();
+
+ var models = await catalog.ListModelsAsync();
+ await Assert.That(models).IsNotNull().And.IsNotEmpty();
+
+ var cachedModels = await catalog.GetCachedModelsAsync();
+ await Assert.That(cachedModels).IsNotNull();
+
+ if (cachedModels.Count == 0)
+ {
+ Console.WriteLine("No cached models found; skipping get path/load/unload test.");
+ return;
+ }
+
+ // find smallest. pick first if no local models have size info.
+ var smallest = cachedModels.Where(m => m.Info.FileSizeMb > 0).OrderBy(m => m.Info.FileSizeMb).FirstOrDefault();
+ var variant = smallest ?? cachedModels[0];
+
+ Console.WriteLine($"Testing GetPath/Load/Unload with ModelId: {variant.Id}");
+ var path = await variant.GetPathAsync();
+ Console.WriteLine($"Model path: {path}");
+ await variant.LoadAsync();
+
+ // We unload any loaded models during cleanup for all tests
+ // await variant.UnloadAsync();
+ }
+}
+
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/LOCAL_MODEL_TESTING.md b/sdk_v2/cs/test/FoundryLocal.Tests/LOCAL_MODEL_TESTING.md
new file mode 100644
index 0000000..77eea3d
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/LOCAL_MODEL_TESTING.md
@@ -0,0 +1,37 @@
+# Running Local Model Tests
+
+## Configuration
+
+The test model cache directory name is configured in `sdk_v2/cs/test/FoundryLocal.Tests/appsettings.Test.json`:
+
+```json
+{
+ "TestModelCacheDirName": "/path/to/model/cache"
+}
+```
+
+## Run the tests
+
+The tests will automatically find the models in the configured test model cache directory.
+
+```bash
+cd /path/to/parent-dir/foundry-local-sdk/sdk_v2/cs/test/FoundryLocal.Tests
+dotnet test Microsoft.AI.Foundry.Local.Tests.csproj --configuration Release# Running Local Model Tests
+
+## Configuration
+
+The test model cache directory name is configured in `sdk_v2/cs/test/FoundryLocal.Tests/appsettings.Test.json`:
+
+```json
+{
+ "TestModelCacheDirName": "/path/to/model/cache"
+}
+```
+
+## Run the tests
+
+The tests will automatically find the models in the configured test model cache directory.
+
+```bash
+cd /path/to/parent-dir/foundry-local-sdk/sdk_v2/cs/test/FoundryLocal.Tests
+dotnet test Microsoft.AI.Foundry.Local.Tests.csproj --configuration Release
\ No newline at end of file
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/Microsoft.AI.Foundry.Local.Tests.csproj b/sdk_v2/cs/test/FoundryLocal.Tests/Microsoft.AI.Foundry.Local.Tests.csproj
new file mode 100644
index 0000000..15a33f7
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/Microsoft.AI.Foundry.Local.Tests.csproj
@@ -0,0 +1,55 @@
+
+
+
+ net9.0
+ enable
+ enable
+ false
+ true
+ false
+
+
+
+
+
+
+
+ $(NETCoreSdkRuntimeIdentifier)
+
+
+
+ net9.0-windows10.0.26100.0
+ 10.0.17763.0
+ None
+ true
+
+
+
+
+
+ PreserveNewest
+
+
+
+
+
+ PreserveNewest
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/ModelTests.cs b/sdk_v2/cs/test/FoundryLocal.Tests/ModelTests.cs
new file mode 100644
index 0000000..b5a4965
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/ModelTests.cs
@@ -0,0 +1,54 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+using System.Collections.Generic;
+using System.Threading.Tasks;
+
+using Microsoft.Extensions.Logging.Abstractions;
+
+using Moq;
+
+internal sealed class ModelTests
+{
+ [Test]
+ public async Task GetLastestVersion_Works()
+ {
+ var loadManager = new Mock();
+ var coreInterop = new Mock();
+ var logger = NullLogger.Instance;
+
+ var createModelInfo = (string name, int version) => new ModelInfo
+ {
+ Id = $"{name}:{version}",
+ Alias = "model",
+ Name = name,
+ Version = version,
+ Uri = "local://model",
+ ProviderType = "local",
+ ModelType = "test"
+ };
+
+ var variants = new List
+ {
+ new(createModelInfo("model_a", 4), loadManager.Object, coreInterop.Object, logger),
+ new(createModelInfo("model_b", 3), loadManager.Object, coreInterop.Object, logger),
+ new(createModelInfo("model_b", 2), loadManager.Object, coreInterop.Object, logger),
+ };
+
+ var model = new Model(variants[0], NullLogger.Instance);
+ foreach (var variant in variants.Skip(1))
+ {
+ model.AddVariant(variant);
+ }
+
+ var latestA = model.GetLatestVersion(variants[0]);
+ await Assert.That(latestA).IsEqualTo(variants[0]);
+
+ var latestB = model.GetLatestVersion(variants[2]);
+ await Assert.That(latestB).IsEqualTo(variants[1]);
+ }
+}
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/SkipInCIAttribute.cs b/sdk_v2/cs/test/FoundryLocal.Tests/SkipInCIAttribute.cs
new file mode 100644
index 0000000..c4d17e5
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/SkipInCIAttribute.cs
@@ -0,0 +1,19 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+
+using TUnit.Core;
+
+using System.Threading.Tasks;
+
+public class SkipInCIAttribute() : SkipAttribute("This test is only supported locally. Skipped on CIs.")
+{
+ public override Task ShouldSkip(TestRegisteredContext context)
+ {
+ return Task.FromResult(Utils.IsRunningInCI());
+ }
+}
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/TestAssemblySetupCleanup.cs b/sdk_v2/cs/test/FoundryLocal.Tests/TestAssemblySetupCleanup.cs
new file mode 100644
index 0000000..ac536d1
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/TestAssemblySetupCleanup.cs
@@ -0,0 +1,36 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+using System.Threading.Tasks;
+
+internal static class TestAssemblySetupCleanup
+{
+
+ [After(Assembly)]
+ public static async Task Cleanup(AssemblyHookContext _)
+ {
+ try
+ {
+ // ensure any loaded models are unloaded
+ var manager = FoundryLocalManager.Instance; // initialized by Utils
+ var catalog = await manager.GetCatalogAsync();
+ var models = await catalog.GetLoadedModelsAsync().ConfigureAwait(false);
+
+ foreach (var model in models)
+ {
+ await Assert.That(await model.IsLoadedAsync()).IsTrue();
+ await model.UnloadAsync().ConfigureAwait(false);
+ await Assert.That(await model.IsLoadedAsync()).IsFalse();
+ }
+ }
+ catch (Exception ex)
+ {
+ Console.WriteLine($"Error during Cleanup: {ex}");
+ throw;
+ }
+ }
+}
diff --git a/sdk_v2/cs/test/FoundryLocal.Tests/Utils.cs b/sdk_v2/cs/test/FoundryLocal.Tests/Utils.cs
new file mode 100644
index 0000000..04ee3fa
--- /dev/null
+++ b/sdk_v2/cs/test/FoundryLocal.Tests/Utils.cs
@@ -0,0 +1,451 @@
+// --------------------------------------------------------------------------------------------------------------------
+//
+// Copyright (c) Microsoft. All rights reserved.
+//
+// --------------------------------------------------------------------------------------------------------------------
+
+namespace Microsoft.AI.Foundry.Local.Tests;
+
+using System;
+using System.Collections.Generic;
+using System.Runtime.CompilerServices;
+using System.Text.Json;
+
+using Microsoft.AI.Foundry.Local.Detail;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.Logging;
+
+using Microsoft.VisualStudio.TestPlatform.TestHost;
+
+using Moq;
+
+internal static class Utils
+{
+ internal struct TestCatalogInfo
+ {
+ internal readonly List TestCatalog { get; }
+ internal readonly string ModelListJson { get; }
+
+ internal TestCatalogInfo(bool includeCuda)
+ {
+
+ TestCatalog = Utils.BuildTestCatalog(includeCuda);
+ ModelListJson = JsonSerializer.Serialize(TestCatalog, JsonSerializationContext.Default.ListModelInfo);
+ }
+ }
+
+ internal static readonly TestCatalogInfo TestCatalog = new(true);
+
+ [Before(Assembly)]
+ public static void AssemblyInit(AssemblyHookContext _)
+ {
+ using var loggerFactory = LoggerFactory.Create(builder =>
+ {
+ builder
+ .AddConsole()
+ .SetMinimumLevel(LogLevel.Debug);
+ });
+
+ ILogger logger = loggerFactory.CreateLogger();
+
+ // Read configuration from appsettings.Test.json
+ logger.LogDebug("Reading configuration from appsettings.Test.json");
+ var configuration = new ConfigurationBuilder()
+ .SetBasePath(Directory.GetCurrentDirectory())
+ .AddJsonFile("appsettings.Test.json", optional: true, reloadOnChange: false)
+ .Build();
+
+ var testModelCacheDirName = configuration["TestModelCacheDirName"] ?? "test-data-shared";
+ string testDataSharedPath;
+ if (Path.IsPathRooted(testModelCacheDirName) ||
+ testModelCacheDirName.Contains(Path.DirectorySeparatorChar) ||
+ testModelCacheDirName.Contains(Path.AltDirectorySeparatorChar))
+ {
+ // It's a relative or complete filepath, resolve from current directory
+ testDataSharedPath = Path.GetFullPath(testModelCacheDirName);
+ }
+ else
+ {
+ // It's just a directory name, combine with repo root parent
+ testDataSharedPath = Path.GetFullPath(Path.Combine(GetRepoRoot(), "..", testModelCacheDirName));
+ }
+
+ logger.LogInformation("Using test model cache directory: {testDataSharedPath}", testDataSharedPath);
+
+ if (!Directory.Exists(testDataSharedPath))
+ {
+ throw new DirectoryNotFoundException($"Test model cache directory does not exist: {testDataSharedPath}");
+
+ }
+
+ var config = new Configuration
+ {
+ AppName = "FoundryLocalSdkTest",
+ LogLevel = Local.LogLevel.Debug,
+ Web = new Configuration.WebService
+ {
+ Urls = "http://127.0.0.1:0"
+ },
+ ModelCacheDir = testDataSharedPath
+ };
+
+ // Initialize the singleton instance.
+ FoundryLocalManager.CreateAsync(config, logger).GetAwaiter().GetResult();
+
+ // standalone instance for testing individual components that skips the 'initialize' command
+ CoreInterop = new CoreInterop(logger);
+ }
+
+ internal static ICoreInterop CoreInterop { get; private set; } = default!;
+
+ internal static Mock CreateCapturingLoggerMock(List sink)
+ {
+ var mock = new Mock();
+ mock.Setup(x => x.Log(
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ It.IsAny(),
+ (Func)It.IsAny