RSCG – SKPromptGenerator
| name | SKPromptGenerator |
| nuget | https://www.nuget.org/packages/SKPromptGenerator/ |
| link | https://github.com/CharlieDigital/SKPromptGenerator |
| author | Charlie Chen |
Generate typed prompts for Semantic Kernel
This is how you can use SKPromptGenerator .
The code that you start with is
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SemanticKernel" Version="1.64.0" />
<PackageReference Include="SKPromptGenerator" Version="0.5.1">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<PropertyGroup>
<EmitCompilerGeneratedFiles>true</EmitCompilerGeneratedFiles>
<CompilerGeneratedFilesOutputPath>$(BaseIntermediateOutputPath)\GX</CompilerGeneratedFilesOutputPath>
</PropertyGroup>
</Project>
The code that you will use is
// See https://aka.ms/new-console-template for more information
Console.WriteLine("Hello, World!");
var capitol = new DemoAI.WeatherPrompt("Bucuresti");
using SKPromptGenerator; // <-- Add namespace here
namespace DemoAI
{
public static partial class MyPrompts
{
[PromptTemplate] // <-- Remove namespace here
public const string Weather = """
What is the weather in the city {{$city}} ?
Respond directly in a single line
""";
}
}
[/code]
The code that is generated is
[code lang="csharp"]
using System;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using SKPromptGenerator;
namespace DemoAI;
/// <summary>
/// Generated prompt for `Weather`
/// </summary>
public partial class WeatherPrompt(
string city
) : PromptTemplateBase
{
/// <summary>
/// The base prompt template string for `Weather`
/// </summary>
public override string Text => $$"""
What is the weather in the city {{city}} ?
Respond directly in a single line
""";
/// <summary>
/// Settings for the prompt `Weather`:
/// MaxTokens = 500
/// Temperature = 0.5d
/// TopP = 0d
/// </summary>
public override OpenAIPromptExecutionSettings Settings => new OpenAIPromptExecutionSettings
{
MaxTokens = 500,
Temperature = 0.5d,
TopP = 0d,
};
}
using System;
using System.Reflection;
namespace SKPromptGenerator;
/// <summary>
/// Attribute applied to `const string` class fields to generate a prompt class.
/// Use this when specifying a custom base class for executing the prompt.
/// </summary>
/// <param name="maxTokens">The maximum number of tokens; default is 500</param>
/// <param name="temperature">The temperature; default is 0.5</param>
/// <param name="topP">The Top P parameter; default is 0</param>
/// <typeparam name="T">The base type for the template inheriting from `PromptTemplateBase`</typeparam>
[AttributeUsage(AttributeTargets.Field | AttributeTargets.Property, Inherited = false, AllowMultiple = false)]
public class PromptTemplateAttribute<T>(
int maxTokens = 500,
double temperature = 0.5,
double topP = 0
) : Attribute where T : PromptTemplateBase {
public int MaxTokens => maxTokens;
public double Temperature => temperature;
public double TopP => topP;
}
/// <summary>
/// Attribute applied to `const string` class fields to generate a prompt class.
/// </summary>
/// <param name="maxTokens">The maximum number of tokens; default is 500</param>
/// <param name="temperature">The temperature; default is 0.5</param>
/// <param name="topP">The Top P parameter; default is 0</param>
/// <typeparam name="T">The base type for the template inheriting from `PromptTemplateBase`</typeparam>
[AttributeUsage(AttributeTargets.Field | AttributeTargets.Property, Inherited = false, AllowMultiple = false)]
public class PromptTemplateAttribute(
int maxTokens = 500,
double temperature = 0.5,
double topP = 0
) : PromptTemplateAttribute<PromptTemplateBase>(maxTokens, temperature, topP) {
}
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace SKPromptGenerator;
/// <summary>
/// Abstract base class for executing the prompt. Override this class to
/// provide custom execution of the prompt.
/// </summary>
public abstract class PromptTemplateBase
{
protected static readonly JsonSerializerOptions SerializerOptions = new() {
PropertyNameCaseInsensitive = true
};
/// <summary>
/// The execution settings for this prompt.
/// </summary>
public abstract OpenAIPromptExecutionSettings Settings { get; }
/// <summary>
/// The text of this prompt.
/// </summary>
public abstract string Text { get; }
/// <summary>
/// Executes the prompt using the default execution. Override this method
/// to provide custom execution logic (e.g. logging, telemetry, etc.)
/// </summary>
/// <param name="kernel">The Semantic Kernel instance.</param>
/// <param name="serviceId">An optional service ID to specify for execution.</param>
/// <param name="historyBuilder">An optional builder for the chat history.</param>
/// <param name="cancellation">An optional cancellation token.</param>
/// <returns>A string with the results of execution.</returns>
public virtual async Task<string> ExecuteAsync(
Kernel kernel,
#nullable enable
string? serviceId = null,
Action<ChatHistory>? historyBuilder = null,
#nullable disable
CancellationToken cancellation = default
)
{
var chat = kernel.GetRequiredService<IChatCompletionService>(serviceId);
var history = new ChatHistory();
if (historyBuilder != null)
{
historyBuilder(history);
}
history.AddUserMessage(Text);
var result = await chat.GetChatMessageContentAsync(history, Settings, kernel, cancellation);
return result.ToString();
}
/// <summary>
/// Executes the prompt and expects a JSON response that will be deserialized
/// to the type `T`.
/// </summary>
/// <param name="kernel">The Semantic Kernel instance.</param>
/// <param name="serviceId">An optional service ID to specify for execution.</param>
/// <param name="historyBuilder">An optional builder for the chat history.</param>
/// <param name="cancellation">An optional cancellation token.</param>
/// <typeparam name="T">The type `T` of the response object.</typeparam>
/// <returns>An instance of type `T` deserialized from the JSON response.</returns>
#nullable enable
public virtual async Task<T?> ExecuteAsync<T>(
Kernel kernel,
#nullable enable
string? serviceId = null,
Action<ChatHistory>? historyBuilder = null,
#nullable disable
CancellationToken cancellation = default
) {
var (result, _) = await ExecuteWithJsonAsync<T>(kernel, serviceId, historyBuilder, cancellation);
return result;
}
#nullable disable
/// <summary>
/// Executes the prompt and expects a JSON response that will be deserialized
/// to the type `T`. This call includes the JSON result as part of the tuple.
/// This method call will perform trimming of JSON fences if present using
/// regular string find/replace.
/// </summary>
/// <param name="kernel">The Semantic Kernel instance.</param>
/// <param name="serviceId">An optional service ID to specify for execution.</param>
/// <param name="historyBuilder">An optional builder for the chat history.</param>
/// <param name="cancellation">An optional cancellation token.</param>
/// <typeparam name="T">The type `T` of the response object.</typeparam>
/// <returns>An instance of type `T` deserialized from the JSON response in a tuple with the full JSON response as well..</returns>
#nullable enable
public virtual async Task<(T? Result, string Json)> ExecuteWithJsonAsync<T>(
Kernel kernel,
#nullable enable
string? serviceId = null,
Action<ChatHistory>? historyBuilder = null,
#nullable disable
CancellationToken cancellation = default
) {
var json = await ExecuteAsync(kernel, serviceId, historyBuilder, cancellation);
json = json.Trim().Replace("```json", "").Replace("```", "");
return (JsonSerializer.Deserialize<T>(json, SerializerOptions), json);
}
#nullable disable
}
Code and pdf at
https://ignatandrei.github.io/RSCG_Examples/v2/docs/SKPromptGenerator