Skip to content

Commit

Permalink
initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
uxmond committed Dec 31, 2024
1 parent 683ef03 commit 59942a7
Show file tree
Hide file tree
Showing 41 changed files with 3,169 additions and 4 deletions.
31 changes: 31 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Tests

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
test:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- name: Setup .NET
uses: actions/setup-dotnet@v3
with:
dotnet-version: |
6.0.x
7.0.x
8.0.x
- name: Restore dependencies
run: dotnet restore

- name: Build
run: dotnet build --no-restore

- name: Test (Unit Only)
run: dotnet test --no-build --verbosity normal --filter "Category!=Integration"
5 changes: 2 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,6 @@ StyleCopReport.xml
*.pgc
*.pgd
*.rsp
# but not Directory.Build.rsp, as it configures directory-level build defaults
!Directory.Build.rsp
*.sbr
*.tlb
*.tli
Expand Down Expand Up @@ -397,4 +395,5 @@ FodyWeavers.xsd
*.msp

# JetBrains Rider
*.sln.iml
.idea/
*.sln.iml
67 changes: 67 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
# Contributing to LocalAI.NET

## Publishing to NuGet

### Prerequisites
1. Create a NuGet account at https://www.nuget.org
2. Generate an API key:
- Go to https://www.nuget.org/account/apikeys
- Click "Create"
- Name: "LocalAI.NET.KoboldCpp Publishing" (or your preferred name)
- Expiration: 365 days
- Select "Push new packages and package versions"
- Glob Pattern: "LocalAI.NET.KoboldCpp*"
- Save the generated key securely

### Publishing Process
1. Update version in `LocalAI.NET.KoboldCpp/LocalAI.NET.KoboldCpp.csproj`:
```xml
<Version>1.0.2</Version> <!-- Change this to new version -->
```

2. Clean and pack:
```bash
dotnet clean
dotnet pack -c Release
```

3. Push to NuGet:
```bash
dotnet nuget push .\LocalAI.NET.KoboldCpp\bin\Release\LocalAI.NET.KoboldCpp.1.0.2.nupkg --api-key YOUR_API_KEY --source https://api.nuget.org/v3/index.json
```
Replace:
- `1.0.2` with your new version number
- `YOUR_API_KEY` with your NuGet API key

4. Wait 15-30 minutes for the package to appear on NuGet.org

### Version Guidelines
- Use [Semantic Versioning](https://semver.org/):
- MAJOR version for incompatible API changes
- MINOR version for backwards-compatible functionality
- PATCH version for backwards-compatible bug fixes

## Development Guidelines

### Code Style
- Use C# latest features and best practices
- Follow Microsoft's [C# Coding Conventions](https://docs.microsoft.com/en-us/dotnet/csharp/fundamentals/coding-style/coding-conventions)
- Use meaningful names for variables, methods, and classes
- Add XML documentation comments for public APIs

### Testing
1. Write unit tests for new features
2. Ensure all tests pass before submitting PR:
```bash
dotnet test
```

### Pull Request Process
1. Fork the repository
2. Create a feature branch
3. Make your changes
4. Update documentation and tests
5. Submit a pull request

## Questions?
Open an issue on GitHub if you have questions or need help.
3 changes: 3 additions & 0 deletions LocalAI.NET.LMStudio.Tests/Common/AssemblyInfo.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
using Xunit;

[assembly: CollectionBehavior(DisableTestParallelization = true)]
27 changes: 27 additions & 0 deletions LocalAI.NET.LMStudio.Tests/Common/LmStudioTestBase.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
using Microsoft.Extensions.Logging;
using WireMock.Server;
using Xunit.Abstractions;

namespace LocalAI.NET.LMStudio.Tests.Common
{
public abstract class LmStudioTestBase : IDisposable
{
protected readonly WireMockServer Server;
protected readonly ILogger Logger;
protected readonly string BaseUrl;

protected LmStudioTestBase(ITestOutputHelper output)
{
Server = WireMockServer.Start();
BaseUrl = Server.Urls[0];
Logger = LoggerFactory
.Create(builder => builder.AddXUnit(output))
.CreateLogger(GetType());
}

public virtual void Dispose()
{
Server.Dispose();
}
}
}
16 changes: 16 additions & 0 deletions LocalAI.NET.LMStudio.Tests/Common/TestConfig.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
namespace LocalAI.NET.LMStudio.Tests.Common
{
public static class TestConfig
{
private const string DefaultHost = "http://localhost:1234";

public static string NativeApiBaseUrl =>
Environment.GetEnvironmentVariable("LMSTUDIO_BASE_URL") ?? $"{DefaultHost}/api";

public static string OpenAiApiBaseUrl =>
Environment.GetEnvironmentVariable("LMSTUDIO_OPENAI_BASE_URL") ?? $"{DefaultHost}/v1";

// Extended timeout for large models
public static int TimeoutSeconds => 120;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
using FluentAssertions;
using LocalAI.NET.LMStudio.Models.Completion;
using LocalAI.NET.LMStudio.Models.Chat;
using Xunit;
using Xunit.Abstractions;

namespace LocalAI.NET.LMStudio.Tests.Integration.Providers.Native
{
[Trait("Category", "Integration")]
[Trait("API", "Native")]
public class NativeGenerationTests : NativeTestBase
{
public NativeGenerationTests(ITestOutputHelper output) : base(output) { }

[SkippableFact]
[Trait("Category", "Integration")]
public async Task Complete_WithSimplePrompt_ShouldReturnResponse()
{
Skip.If(!ServerAvailable, "LM Studio server is not available");

// Arrange
var request = new LmStudioCompletionRequest
{
Model = "test-model",
Prompt = "Once upon a time",
MaxTokens = 20,
Temperature = 0.7f,
TopP = 0.9f
};

// Act
var response = await Provider.CompleteAsync(request);

// Assert
response.Should().NotBeNull();
response.Choices.Should().NotBeEmpty();
response.Choices[0].Text.Should().NotBeNullOrEmpty();
}

[SkippableFact]
[Trait("Category", "Integration")]
public async Task StreamCompletion_ShouldStreamTokens()
{
Skip.If(!ServerAvailable, "LM Studio server is not available");

var request = new LmStudioCompletionRequest
{
Model = "test-model",
Prompt = "Write a short story about",
MaxTokens = 20,
Temperature = 0.7f,
TopP = 0.9f,
Stream = true
};

var tokens = new List<string>();
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));

try
{
await foreach (var token in Provider.StreamCompletionAsync(request, cts.Token))
{
tokens.Add(token);
Output.WriteLine($"Received token: {token}");

if (tokens.Count >= request.MaxTokens)
{
Output.WriteLine("Reached max length, breaking");
break;
}
}
}
catch (OperationCanceledException) when (cts.Token.IsCancellationRequested)
{
Output.WriteLine($"Stream timed out after receiving {tokens.Count} tokens");
}
catch (Exception ex)
{
Output.WriteLine($"Error during streaming: {ex}");
throw;
}

tokens.Should().NotBeEmpty("No tokens were received from the stream");
string.Concat(tokens).Should().NotBeNullOrEmpty("Combined token text should not be empty");
}

[SkippableFact]
[Trait("Category", "Integration")]
public async Task Complete_WithStopSequence_ShouldReturnResponse()
{
Skip.If(!ServerAvailable, "LM Studio server is not available");

// Arrange
var request = new LmStudioCompletionRequest
{
Model = "test-model",
Prompt = "Write a short story",
MaxTokens = 20,
Temperature = 0.7f,
TopP = 0.9f,
Stop = new[] { "." }
};

// Act
var response = await Provider.CompleteAsync(request);

// Assert
response.Should().NotBeNull();
response.Choices.Should().NotBeEmpty();
response.Choices[0].Text.Should().NotBeNullOrEmpty();
}

[SkippableFact]
[Trait("Category", "Integration")]
public async Task Complete_WithDifferentTemperatures_ShouldWork()
{
Skip.If(!ServerAvailable, "LM Studio server is not available");

// Test various temperature settings
var temperatures = new[] { 0.1f, 0.7f, 1.5f };
foreach (var temp in temperatures)
{
// Arrange
var request = new LmStudioCompletionRequest
{
Model = "test-model",
Prompt = "The quick brown fox",
MaxTokens = 20,
Temperature = temp,
TopP = 0.9f
};

// Act
var response = await Provider.CompleteAsync(request);

// Assert
response.Should().NotBeNull();
response.Choices.Should().NotBeEmpty();
response.Choices[0].Text.Should().NotBeNullOrEmpty();
Output.WriteLine($"Temperature {temp} response: {response.Choices[0].Text}");

await Task.Delay(500);
}
}

[SkippableFact]
[Trait("Category", "Integration")]
public async Task ChatComplete_ShouldReturnResponse()
{
Skip.If(!ServerAvailable, "LM Studio server is not available");

// Arrange
var request = new LmStudioChatRequest
{
Model = DefaultModel.Id, // Make sure to use the actual model ID
Messages = new List<LmStudioChatMessage>
{
new() { Role = "system", Content = "Always answer in rhymes." },
new() { Role = "user", Content = "Introduce yourself." }
},
Temperature = 0.7f
};

// Act
var response = await Provider.ChatCompleteAsync(request);

// Assert
response.Should().NotBeNull();
response.Choices.Should().NotBeEmpty();
response.Choices[0].Message.Should().NotBeNull();
response.Choices[0].Message!.Content.Should().NotBeNullOrEmpty();
}
}
}
Loading

0 comments on commit 59942a7

Please sign in to comment.