-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
41 changed files
with
3,169 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
name: Tests | ||
|
||
on: | ||
push: | ||
branches: [ main ] | ||
pull_request: | ||
branches: [ main ] | ||
|
||
jobs: | ||
test: | ||
runs-on: ubuntu-latest | ||
|
||
steps: | ||
- uses: actions/checkout@v4 | ||
|
||
- name: Setup .NET | ||
uses: actions/setup-dotnet@v3 | ||
with: | ||
dotnet-version: | | ||
6.0.x | ||
7.0.x | ||
8.0.x | ||
- name: Restore dependencies | ||
run: dotnet restore | ||
|
||
- name: Build | ||
run: dotnet build --no-restore | ||
|
||
- name: Test (Unit Only) | ||
run: dotnet test --no-build --verbosity normal --filter "Category!=Integration" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
# Contributing to LocalAI.NET | ||
|
||
## Publishing to NuGet | ||
|
||
### Prerequisites | ||
1. Create a NuGet account at https://www.nuget.org | ||
2. Generate an API key: | ||
- Go to https://www.nuget.org/account/apikeys | ||
- Click "Create" | ||
- Name: "LocalAI.NET.KoboldCpp Publishing" (or your preferred name) | ||
- Expiration: 365 days | ||
- Select "Push new packages and package versions" | ||
- Glob Pattern: "LocalAI.NET.KoboldCpp*" | ||
- Save the generated key securely | ||
|
||
### Publishing Process | ||
1. Update version in `LocalAI.NET.KoboldCpp/LocalAI.NET.KoboldCpp.csproj`: | ||
```xml | ||
<Version>1.0.2</Version> <!-- Change this to new version --> | ||
``` | ||
|
||
2. Clean and pack: | ||
```bash | ||
dotnet clean | ||
dotnet pack -c Release | ||
``` | ||
|
||
3. Push to NuGet: | ||
```bash | ||
dotnet nuget push .\LocalAI.NET.KoboldCpp\bin\Release\LocalAI.NET.KoboldCpp.1.0.2.nupkg --api-key YOUR_API_KEY --source https://api.nuget.org/v3/index.json | ||
``` | ||
Replace: | ||
- `1.0.2` with your new version number | ||
- `YOUR_API_KEY` with your NuGet API key | ||
|
||
4. Wait 15-30 minutes for the package to appear on NuGet.org | ||
|
||
### Version Guidelines | ||
- Use [Semantic Versioning](https://semver.org/): | ||
- MAJOR version for incompatible API changes | ||
- MINOR version for backwards-compatible functionality | ||
- PATCH version for backwards-compatible bug fixes | ||
|
||
## Development Guidelines | ||
|
||
### Code Style | ||
- Use C# latest features and best practices | ||
- Follow Microsoft's [C# Coding Conventions](https://docs.microsoft.com/en-us/dotnet/csharp/fundamentals/coding-style/coding-conventions) | ||
- Use meaningful names for variables, methods, and classes | ||
- Add XML documentation comments for public APIs | ||
|
||
### Testing | ||
1. Write unit tests for new features | ||
2. Ensure all tests pass before submitting PR: | ||
```bash | ||
dotnet test | ||
``` | ||
|
||
### Pull Request Process | ||
1. Fork the repository | ||
2. Create a feature branch | ||
3. Make your changes | ||
4. Update documentation and tests | ||
5. Submit a pull request | ||
|
||
## Questions? | ||
Open an issue on GitHub if you have questions or need help. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
using Xunit; | ||
|
||
[assembly: CollectionBehavior(DisableTestParallelization = true)] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
using Microsoft.Extensions.Logging; | ||
using WireMock.Server; | ||
using Xunit.Abstractions; | ||
|
||
namespace LocalAI.NET.LMStudio.Tests.Common | ||
{ | ||
public abstract class LmStudioTestBase : IDisposable | ||
{ | ||
protected readonly WireMockServer Server; | ||
protected readonly ILogger Logger; | ||
protected readonly string BaseUrl; | ||
|
||
protected LmStudioTestBase(ITestOutputHelper output) | ||
{ | ||
Server = WireMockServer.Start(); | ||
BaseUrl = Server.Urls[0]; | ||
Logger = LoggerFactory | ||
.Create(builder => builder.AddXUnit(output)) | ||
.CreateLogger(GetType()); | ||
} | ||
|
||
public virtual void Dispose() | ||
{ | ||
Server.Dispose(); | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
namespace LocalAI.NET.LMStudio.Tests.Common | ||
{ | ||
public static class TestConfig | ||
{ | ||
private const string DefaultHost = "http://localhost:1234"; | ||
|
||
public static string NativeApiBaseUrl => | ||
Environment.GetEnvironmentVariable("LMSTUDIO_BASE_URL") ?? $"{DefaultHost}/api"; | ||
|
||
public static string OpenAiApiBaseUrl => | ||
Environment.GetEnvironmentVariable("LMSTUDIO_OPENAI_BASE_URL") ?? $"{DefaultHost}/v1"; | ||
|
||
// Extended timeout for large models | ||
public static int TimeoutSeconds => 120; | ||
} | ||
} |
174 changes: 174 additions & 0 deletions
174
LocalAI.NET.LMStudio.Tests/Integration/Providers/Native/NativeGenerationTests.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,174 @@ | ||
using FluentAssertions; | ||
using LocalAI.NET.LMStudio.Models.Completion; | ||
using LocalAI.NET.LMStudio.Models.Chat; | ||
using Xunit; | ||
using Xunit.Abstractions; | ||
|
||
namespace LocalAI.NET.LMStudio.Tests.Integration.Providers.Native | ||
{ | ||
[Trait("Category", "Integration")] | ||
[Trait("API", "Native")] | ||
public class NativeGenerationTests : NativeTestBase | ||
{ | ||
public NativeGenerationTests(ITestOutputHelper output) : base(output) { } | ||
|
||
[SkippableFact] | ||
[Trait("Category", "Integration")] | ||
public async Task Complete_WithSimplePrompt_ShouldReturnResponse() | ||
{ | ||
Skip.If(!ServerAvailable, "LM Studio server is not available"); | ||
|
||
// Arrange | ||
var request = new LmStudioCompletionRequest | ||
{ | ||
Model = "test-model", | ||
Prompt = "Once upon a time", | ||
MaxTokens = 20, | ||
Temperature = 0.7f, | ||
TopP = 0.9f | ||
}; | ||
|
||
// Act | ||
var response = await Provider.CompleteAsync(request); | ||
|
||
// Assert | ||
response.Should().NotBeNull(); | ||
response.Choices.Should().NotBeEmpty(); | ||
response.Choices[0].Text.Should().NotBeNullOrEmpty(); | ||
} | ||
|
||
[SkippableFact] | ||
[Trait("Category", "Integration")] | ||
public async Task StreamCompletion_ShouldStreamTokens() | ||
{ | ||
Skip.If(!ServerAvailable, "LM Studio server is not available"); | ||
|
||
var request = new LmStudioCompletionRequest | ||
{ | ||
Model = "test-model", | ||
Prompt = "Write a short story about", | ||
MaxTokens = 20, | ||
Temperature = 0.7f, | ||
TopP = 0.9f, | ||
Stream = true | ||
}; | ||
|
||
var tokens = new List<string>(); | ||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30)); | ||
|
||
try | ||
{ | ||
await foreach (var token in Provider.StreamCompletionAsync(request, cts.Token)) | ||
{ | ||
tokens.Add(token); | ||
Output.WriteLine($"Received token: {token}"); | ||
|
||
if (tokens.Count >= request.MaxTokens) | ||
{ | ||
Output.WriteLine("Reached max length, breaking"); | ||
break; | ||
} | ||
} | ||
} | ||
catch (OperationCanceledException) when (cts.Token.IsCancellationRequested) | ||
{ | ||
Output.WriteLine($"Stream timed out after receiving {tokens.Count} tokens"); | ||
} | ||
catch (Exception ex) | ||
{ | ||
Output.WriteLine($"Error during streaming: {ex}"); | ||
throw; | ||
} | ||
|
||
tokens.Should().NotBeEmpty("No tokens were received from the stream"); | ||
string.Concat(tokens).Should().NotBeNullOrEmpty("Combined token text should not be empty"); | ||
} | ||
|
||
[SkippableFact] | ||
[Trait("Category", "Integration")] | ||
public async Task Complete_WithStopSequence_ShouldReturnResponse() | ||
{ | ||
Skip.If(!ServerAvailable, "LM Studio server is not available"); | ||
|
||
// Arrange | ||
var request = new LmStudioCompletionRequest | ||
{ | ||
Model = "test-model", | ||
Prompt = "Write a short story", | ||
MaxTokens = 20, | ||
Temperature = 0.7f, | ||
TopP = 0.9f, | ||
Stop = new[] { "." } | ||
}; | ||
|
||
// Act | ||
var response = await Provider.CompleteAsync(request); | ||
|
||
// Assert | ||
response.Should().NotBeNull(); | ||
response.Choices.Should().NotBeEmpty(); | ||
response.Choices[0].Text.Should().NotBeNullOrEmpty(); | ||
} | ||
|
||
[SkippableFact] | ||
[Trait("Category", "Integration")] | ||
public async Task Complete_WithDifferentTemperatures_ShouldWork() | ||
{ | ||
Skip.If(!ServerAvailable, "LM Studio server is not available"); | ||
|
||
// Test various temperature settings | ||
var temperatures = new[] { 0.1f, 0.7f, 1.5f }; | ||
foreach (var temp in temperatures) | ||
{ | ||
// Arrange | ||
var request = new LmStudioCompletionRequest | ||
{ | ||
Model = "test-model", | ||
Prompt = "The quick brown fox", | ||
MaxTokens = 20, | ||
Temperature = temp, | ||
TopP = 0.9f | ||
}; | ||
|
||
// Act | ||
var response = await Provider.CompleteAsync(request); | ||
|
||
// Assert | ||
response.Should().NotBeNull(); | ||
response.Choices.Should().NotBeEmpty(); | ||
response.Choices[0].Text.Should().NotBeNullOrEmpty(); | ||
Output.WriteLine($"Temperature {temp} response: {response.Choices[0].Text}"); | ||
|
||
await Task.Delay(500); | ||
} | ||
} | ||
|
||
[SkippableFact] | ||
[Trait("Category", "Integration")] | ||
public async Task ChatComplete_ShouldReturnResponse() | ||
{ | ||
Skip.If(!ServerAvailable, "LM Studio server is not available"); | ||
|
||
// Arrange | ||
var request = new LmStudioChatRequest | ||
{ | ||
Model = DefaultModel.Id, // Make sure to use the actual model ID | ||
Messages = new List<LmStudioChatMessage> | ||
{ | ||
new() { Role = "system", Content = "Always answer in rhymes." }, | ||
new() { Role = "user", Content = "Introduce yourself." } | ||
}, | ||
Temperature = 0.7f | ||
}; | ||
|
||
// Act | ||
var response = await Provider.ChatCompleteAsync(request); | ||
|
||
// Assert | ||
response.Should().NotBeNull(); | ||
response.Choices.Should().NotBeEmpty(); | ||
response.Choices[0].Message.Should().NotBeNull(); | ||
response.Choices[0].Message!.Content.Should().NotBeNullOrEmpty(); | ||
} | ||
} | ||
} |
Oops, something went wrong.