Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 7 additions & 15 deletions sdk/cs/src/FoundryLocalManager.cs
Original file line number Diff line number Diff line change
Expand Up @@ -51,23 +51,15 @@ public partial class FoundryLocalManager : IDisposable, IAsyncDisposable
// Sees if the service is already running
public bool IsServiceRunning => _serviceUri != null;

public static async Task<FoundryLocalManager> StartModelAsync(string aliasOrModelId, DeviceType? device = null, CancellationToken ct = default)
public async Task<ModelInfo> StartModelAsync(string aliasOrModelId, DeviceType? device = null, CancellationToken ct = default)
{
var manager = new FoundryLocalManager();
try
{
await manager.StartServiceAsync(ct);
var modelInfo = await manager.GetModelInfoAsync(aliasOrModelId, device, ct)
await StartServiceAsync(ct);
var modelInfo = await GetModelInfoAsync(aliasOrModelId, device, ct)
?? throw new InvalidOperationException($"Model {aliasOrModelId} not found in catalog.");
await manager.DownloadModelAsync(modelInfo.ModelId, device: device, token: null, force: false,ct: ct);
await manager.LoadModelAsync(aliasOrModelId, device: device, ct: ct);
return manager;
}
catch
{
manager.Dispose();
throw;
}
await DownloadModelAsync(modelInfo.ModelId, device: device, token: null, force: false,ct: ct);
await LoadModelAsync(aliasOrModelId, device: device, ct: ct);
return modelInfo;

}

public async Task StartServiceAsync(CancellationToken ct = default)
Expand Down
63 changes: 63 additions & 0 deletions sdk/cs/test/FoundryLocal.Tests/FoundryLocalManagerTest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -521,6 +521,69 @@ public async Task LoadModelAsync_Succeeds_AndPassesEpOverrideWhenCudaPresent()
Assert.Equal("model-4-generic-gpu:1", result.ModelId);
}

[Fact]
public async Task StartModelAsync_Succeeds_WhenModelIsInCatalogModelsCache()
{
// Arrange
var modelId = "model1";
var model = new ModelInfo
{
ModelId = modelId,
Alias = "alias1",
Uri = "http://model",
ProviderType = "huggingface",
Runtime = new Runtime { DeviceType = DeviceType.GPU }
};

_mockHttp.When("/openai/models").Respond("application/json", $"[\"{modelId}\"]");
_mockHttp.When(HttpMethod.Get, $"/openai/load/{modelId}*").Respond("application/json", "{}");

typeof(FoundryLocalManager)
.GetField("_catalogModels", BindingFlags.NonPublic | BindingFlags.Instance)!
.SetValue(_manager, new List<ModelInfo> { model });

// Act
var result = await _manager.StartModelAsync(modelId);

// Assert
Assert.NotNull(result);
Assert.Equal(modelId, result.ModelId);
}

[Fact]
public async Task StartModelAsync_Succeeds_WhenModelIsDownloadedButNotInCatalogModelsCache()
{
// Arrange
var modelId = "model1";
var model = new ModelInfo
{
ModelId = modelId,
Alias = "alias1",
Uri = "http://model",
ProviderType = "huggingface",
Runtime = new Runtime { DeviceType = DeviceType.GPU }
};

var mockJsonResponse = "some log text... {\"success\": true, \"errorMessage\": null}";
_mockHttp.When("/openai/download").Respond("application/json", mockJsonResponse);
_mockHttp.When("/openai/models").Respond("application/json", $"[\"{modelId}\"]");
_mockHttp.When(HttpMethod.Get, $"/openai/load/{modelId}*").Respond("application/json", "{}");

var catalogJson = JsonSerializer.Serialize(new List<ModelInfo> { model });
_mockHttp.When(HttpMethod.Get, "/foundry/list").Respond("application/json", catalogJson);

typeof(FoundryLocalManager)
.GetField("_catalogModels", BindingFlags.NonPublic | BindingFlags.Instance)!
.SetValue(_manager, null);

// Act
var result = await _manager.StartModelAsync(modelId);

// Assert
Assert.NotNull(result);
Assert.Equal(modelId, result.ModelId);
}

[Fact]
public async Task LoadModelAsync_ThrowsIfNotInCache()
{
Expand Down