mirror of
https://github.com/nomic-ai/gpt4all.git
synced 2024-10-01 01:06:10 -04:00
797891c995
* Initial Library Loader * Load library as part of Model factory * Dynamically search and find the dlls * Update tests to use locally built runtimes * Fix dylib loading, add macos runtime support for sample/tests * Bypass automatic loading by default. * Only set CMAKE_OSX_ARCHITECTURES if not already set, allow cross-compile * Switch Loading again * Update build scripts for mac/linux * Update bindings to support newest breaking changes * Fix build * Use llmodel for Windows * Actually, it does need to be libllmodel * Name * Remove TFMs, bypass loading by default * Fix script * Delete mac script --------- Co-authored-by: Tim Miller <innerlogic4321@ghmail.com>
32 lines
1.4 KiB
C#
32 lines
1.4 KiB
C#
namespace Gpt4All;
|
|
|
|
/// <summary>
|
|
/// Interface for text prediction services
|
|
/// </summary>
|
|
public interface ITextPrediction
|
|
{
|
|
/// <summary>
|
|
/// Get prediction results for the prompt and provided options.
|
|
/// </summary>
|
|
/// <param name="text">The text to complete</param>
|
|
/// <param name="opts">The prediction settings</param>
|
|
/// <param name="cancellation">The <see cref="CancellationToken"/> for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
|
|
/// <returns>The prediction result generated by the model</returns>
|
|
Task<ITextPredictionResult> GetPredictionAsync(
|
|
string text,
|
|
PredictRequestOptions opts,
|
|
CancellationToken cancellation = default);
|
|
|
|
/// <summary>
|
|
/// Get streaming prediction results for the prompt and provided options.
|
|
/// </summary>
|
|
/// <param name="text">The text to complete</param>
|
|
/// <param name="opts">The prediction settings</param>
|
|
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
|
|
/// <returns>The prediction result generated by the model</returns>
|
|
Task<ITextPredictionStreamingResult> GetStreamingPredictionAsync(
|
|
string text,
|
|
PredictRequestOptions opts,
|
|
CancellationToken cancellationToken = default);
|
|
}
|