Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

The log levels defined on llama.cpp and LlamaSharp side were not aligned anymore (issue #995) #997

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 63 additions & 0 deletions LLama.Unittest/LLamaContextWithCustomLoggerTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
using LLama.Common;
using LLama.Native;
using Microsoft.Extensions.Logging;

namespace LLama.Unittest
{
public sealed class LLamaContextWithCustomLoggerTests
: IDisposable
{
private sealed class CustomLogger : ILogger
{
public IDisposable? BeginScope<TState>(TState state) where TState : notnull => default;

public void Log<TState>(
LogLevel logLevel,
EventId eventId,
TState state,
Exception? exception,
Func<TState, Exception, string> formatter)
{
}

public bool IsEnabled(LogLevel logLevel) => true;
}

private readonly LLamaWeights _weights;
private readonly LLamaContext _context;

public LLamaContextWithCustomLoggerTests()
{
var @params = new ModelParams(Constants.GenerativeModelPath)
{
ContextSize = 128,
GpuLayerCount = Constants.CIGpuLayerCount,
};

// This unit test used to fail when loading the weights with such a naive logger set.
//
// See https://github.com/SciSharp/LLamaSharp/issues/995
//
// So the unit test here doesn't check that the logger is actually used
// but at least that setting one doesn't crash the weights load.
NativeLogConfig.llama_log_set(new CustomLogger());

_weights = LLamaWeights.LoadFromFile(@params);
_context = _weights.CreateContext(@params);
}

public void Dispose()
{
_weights.Dispose();
_context.Dispose();
}

[Fact]
public void CheckProperties()
{
Assert.Equal(128u, _context.ContextSize);
Assert.Equal(2048, _context.EmbeddingSize);
Assert.Equal(128256, _context.VocabCount);
}
}
}
44 changes: 32 additions & 12 deletions LLama/Native/LLamaLogLevel.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,43 +4,63 @@
namespace LLama.Native
{
/// <summary>
/// Severity level of a log message
/// Severity level of a log message. This enum should always be aligned with
/// the one defined on llama.cpp side at
/// https://github.com/ggerganov/llama.cpp/blob/0eb4e12beebabae46d37b78742f4c5d4dbe52dc1/ggml/include/ggml.h#L559
/// </summary>
public enum LLamaLogLevel
{
/// <summary>
/// Logs that highlight when the current flow of execution is stopped due to a failure.
/// Logs are never written.
/// </summary>
None = 0,

/// <summary>
/// Logs that are used for interactive investigation during development.
/// </summary>
Error = 2,
Debug = 1,

/// <summary>
/// Logs that track the general flow of the application.
/// </summary>
Info = 2,

/// <summary>
/// Logs that highlight an abnormal or unexpected event in the application flow, but do not otherwise cause the application execution to stop.
/// </summary>
Warning = 3,

/// <summary>
/// Logs that track the general flow of the application.
/// Logs that highlight when the current flow of execution is stopped due to a failure.
/// </summary>
Info = 4,
Error = 4,

/// <summary>
/// Logs that are used for interactive investigation during development.
/// Continue log level is equivalent to None in the way it is used in llama.cpp.
/// </summary>
Debug = 5,
Continue = 5,
}

internal static class LLamaLogLevelExtensions
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
internal static class LLamaLogLevelExtensions
internal static class LLamaLogLevelExtensions
{
[ThreadStatic] private static LogLevel _previous;
public static LogLevel ToLogLevel(this LLamaLogLevel llama)
{
_previous = (llama) switch
{
LLamaLogLevel.None => LogLevel.None,
LLamaLogLevel.Debug => LogLevel.Debug,
LLamaLogLevel.Info => LogLevel.Information,
LLamaLogLevel.Warning => LogLevel.Warning,
LLamaLogLevel.Error => LogLevel.Error,
LLamaLogLevel.Continue => _previous,
_ => throw new ArgumentOutOfRangeException(nameof(llama), llama, null)
};
return _previous;
}

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(Note that I haven't tested this, but hopefully it illustrates what I mean)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Got it, good idea

{
/// <summary>
/// Keeps track of the previous log level to be able to handle the log level <see cref="LLamaLogLevel.Continue"/>.
/// </summary>
[ThreadStatic] private static LogLevel _previous;

public static LogLevel ToLogLevel(this LLamaLogLevel llama)
{
return (llama) switch
_previous = llama switch
{
LLamaLogLevel.Error => LogLevel.Error,
LLamaLogLevel.Warning => LogLevel.Warning,
LLamaLogLevel.Info => LogLevel.Information,
LLamaLogLevel.None => LogLevel.None,
LLamaLogLevel.Debug => LogLevel.Debug,
LLamaLogLevel.Info => LogLevel.Information,
LLamaLogLevel.Warning => LogLevel.Warning,
LLamaLogLevel.Error => LogLevel.Error,
LLamaLogLevel.Continue => _previous,
_ => throw new ArgumentOutOfRangeException(nameof(llama), llama, null)
};
return _previous;
}
}
}
}
Loading