Added an explicit OpenAI moderation call to Advanced Paste

This commit is contained in:
Craig Loewen 2024-11-22 00:00:08 -06:00
parent 40acdea356
commit c78aade96f
3 changed files with 19 additions and 1 deletions

View File

@ -57,6 +57,7 @@
<PackageVersion Include="NLog" Version="5.0.4" />
<PackageVersion Include="NLog.Extensions.Logging" Version="5.3.8" />
<PackageVersion Include="NLog.Schema" Version="5.2.8" />
<PackageVersion Include="OpenAI" Version="2.0.0" />
<PackageVersion Include="ReverseMarkdown" Version="4.1.0" />
<PackageVersion Include="ScipBe.Common.Office.OneNote" Version="3.0.1" />
<PackageVersion Include="SharpCompress" Version="0.37.2" />

View File

@ -62,6 +62,7 @@
<PackageReference Include="Microsoft.Windows.Compatibility" />
<PackageReference Include="Microsoft.Windows.CsWin32" />
<PackageReference Include="Microsoft.Windows.SDK.BuildTools" />
<PackageReference Include="OpenAI" />
<PackageReference Include="ReverseMarkdown" />
<PackageReference Include="StreamJsonRpc" />
<PackageReference Include="WinUIEx" />

View File

@ -12,6 +12,7 @@ using Azure.AI.OpenAI;
using ManagedCommon;
using Microsoft.PowerToys.Settings.UI.Library;
using Microsoft.PowerToys.Telemetry;
using OpenAI.Moderations;
using Windows.Security.Credentials;
namespace AdvancedPaste.Helpers
@ -76,13 +77,28 @@ namespace AdvancedPaste.Helpers
{
OpenAIClient azureAIClient = new OpenAIClient(_openAIKey);
string inputString = systemInstructions + "\n\n" + userMessage;
ModerationClient moderationClient = new("omni-moderation-latest", _openAIKey);
// TODO: Run this as async along with the chat completion result to maintain speed
ModerationResult moderationResult = moderationClient.ClassifyText(inputString);
if (moderationResult.Flagged)
{
#pragma warning disable CA2201 // Use explicit type
// TODO: Use a more explicit type and handle the error more gracefully
throw new Exception("Flagged by moderation");
#pragma warning restore CA2201 // Use explicit type
}
var response = azureAIClient.GetCompletions(
new CompletionsOptions()
{
DeploymentName = _modelName,
Prompts =
{
systemInstructions + "\n\n" + userMessage,
inputString,
},
Temperature = 0.01F,
MaxTokens = 2000,