add: Added support for any openai compatible api for the chatterbot feature

change: Changed games.yml to allow input of the apiUrl (needs to be openai compatible) and modelName as a string.
This commit is contained in:
Kwoth
2024-07-13 15:26:00 +00:00
parent ae1ddd82d0
commit db66264bc6
15 changed files with 260 additions and 170 deletions

View File

@@ -225,5 +225,19 @@ public partial class Administration
if (!enabled) if (!enabled)
await Response().Pending(strs.greetdmmsg_enable($"`{prefix}greetdm`")).SendAsync(); await Response().Pending(strs.greetdmmsg_enable($"`{prefix}greetdm`")).SendAsync();
} }
[Cmd]
[RequireContext(ContextType.Guild)]
[UserPerm(GuildPerm.ManageGuild)]
[Ratelimit(5)]
public async Task BoostTest([Leftover] IGuildUser? user = null)
{
user ??= (IGuildUser)ctx.User;
await _service.BoostTest((ITextChannel)ctx.Channel, user);
var enabled = _service.GetBoostEnabled(ctx.Guild.Id);
if (!enabled)
await Response().Pending(strs.boostmsg_enable($"`{prefix}boost`")).SendAsync();
}
} }
} }

View File

@@ -242,7 +242,7 @@ public class GreetService : INService, IReadyExecutor
guild: channel.Guild, guild: channel.Guild,
channel: channel, channel: channel,
users: users.ToArray()); users: users.ToArray());
var text = SmartText.CreateFrom(conf.ChannelGreetMessageText); var text = SmartText.CreateFrom(conf.ChannelGreetMessageText);
text = await _repSvc.ReplaceAsync(text, repCtx); text = await _repSvc.ReplaceAsync(text, repCtx);
try try
@@ -630,6 +630,13 @@ public class GreetService : INService, IReadyExecutor
return conf.SendChannelByeMessage; return conf.SendChannelByeMessage;
} }
public bool GetBoostEnabled(ulong guildId)
{
using var uow = _db.GetDbContext();
var conf = uow.GuildConfigsForId(guildId, set => set);
return conf.SendBoostMessage;
}
#endregion #endregion
#region Test Messages #region Test Messages

View File

@@ -1,6 +1,5 @@
#nullable disable #nullable disable
using NadekoBot.Common.ModuleBehaviors; using NadekoBot.Common.ModuleBehaviors;
using NadekoBot.Db.Models;
using NadekoBot.Modules.Games.Common; using NadekoBot.Modules.Games.Common;
using NadekoBot.Modules.Games.Common.ChatterBot; using NadekoBot.Modules.Games.Common.ChatterBot;
using NadekoBot.Modules.Patronage; using NadekoBot.Modules.Patronage;
@@ -58,18 +57,21 @@ public class ChatterBotService : IExecOnMessage
Log.Information("Cleverbot will not work as the api key is missing"); Log.Information("Cleverbot will not work as the api key is missing");
return null; return null;
case ChatBotImplementation.Gpt: case ChatBotImplementation.OpenAi:
var data = _gcs.Data;
if (!string.IsNullOrWhiteSpace(_creds.Gpt3ApiKey)) if (!string.IsNullOrWhiteSpace(_creds.Gpt3ApiKey))
return new OfficialGptSession(_creds.Gpt3ApiKey, return new OpenAiApiSession(
_gcs.Data.ChatGpt.ModelName, data.ChatGpt.ApiUrl,
_gcs.Data.ChatGpt.ChatHistory, _creds.Gpt3ApiKey,
_gcs.Data.ChatGpt.MaxTokens, data.ChatGpt.ModelName,
_gcs.Data.ChatGpt.MinTokens, data.ChatGpt.ChatHistory,
_gcs.Data.ChatGpt.PersonalityPrompt, data.ChatGpt.MaxTokens,
data.ChatGpt.MinTokens,
data.ChatGpt.PersonalityPrompt,
_client.CurrentUser.Username, _client.CurrentUser.Username,
_httpFactory); _httpFactory);
Log.Information("Gpt3 will not work as the api key is missing"); Log.Information("Openai Api will likely not work as the api key is missing");
return null; return null;
default: default:
return null; return null;

View File

@@ -0,0 +1,9 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public class Choice
{
[JsonPropertyName("message")]
public Message Message { get; init; }
}

View File

@@ -1,61 +0,0 @@
#nullable disable
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public class OpenAiCompletionResponse
{
[JsonPropertyName("choices")]
public Choice[] Choices { get; set; }
[JsonPropertyName("usage")]
public OpenAiUsageData Usage { get; set; }
}
public class OpenAiUsageData
{
[JsonPropertyName("prompt_tokens")]
public int PromptTokens { get; set; }
[JsonPropertyName("completion_tokens")]
public int CompletionTokens { get; set; }
[JsonPropertyName("total_tokens")]
public int TotalTokens { get; set; }
}
public class Choice
{
[JsonPropertyName("message")]
public Message Message { get; init; }
}
public class Message {
[JsonPropertyName("content")]
public string Content { get; init; }
}
public class Gpt3ApiRequest
{
[JsonPropertyName("model")]
public string Model { get; init; }
[JsonPropertyName("messages")]
public List<GPTMessage> Messages { get; init; }
[JsonPropertyName("temperature")]
public int Temperature { get; init; }
[JsonPropertyName("max_tokens")]
public int MaxTokens { get; init; }
}
public class GPTMessage
{
[JsonPropertyName("role")]
public string Role {get; init;}
[JsonPropertyName("content")]
public string Content {get; init;}
[JsonPropertyName("name")]
public string Name {get; init;}
}

View File

@@ -0,0 +1,8 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public class Message {
[JsonPropertyName("content")]
public string Content { get; init; }
}

View File

@@ -0,0 +1,15 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public class OpenAiApiMessage
{
[JsonPropertyName("role")]
public string Role { get; init; }
[JsonPropertyName("content")]
public string Content { get; init; }
[JsonPropertyName("name")]
public string Name { get; init; }
}

View File

@@ -0,0 +1,18 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public class OpenAiApiRequest
{
[JsonPropertyName("model")]
public string Model { get; init; }
[JsonPropertyName("messages")]
public List<OpenAiApiMessage> Messages { get; init; }
[JsonPropertyName("temperature")]
public int Temperature { get; init; }
[JsonPropertyName("max_tokens")]
public int MaxTokens { get; init; }
}

View File

@@ -0,0 +1,15 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public class OpenAiApiUsageData
{
[JsonPropertyName("prompt_tokens")]
public int PromptTokens { get; set; }
[JsonPropertyName("completion_tokens")]
public int CompletionTokens { get; set; }
[JsonPropertyName("total_tokens")]
public int TotalTokens { get; set; }
}

View File

@@ -0,0 +1,13 @@
#nullable disable
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public class OpenAiCompletionResponse
{
[JsonPropertyName("choices")]
public Choice[] Choices { get; set; }
[JsonPropertyName("usage")]
public OpenAiApiUsageData Usage { get; set; }
}

View File

@@ -1,18 +1,15 @@
#nullable disable #nullable disable
using Newtonsoft.Json; using Newtonsoft.Json;
using OneOf.Types; using OneOf.Types;
using System.Net.Http.Json;
using SharpToken; using SharpToken;
using System.CodeDom; using System.Net.Http.Json;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
namespace NadekoBot.Modules.Games.Common.ChatterBot; namespace NadekoBot.Modules.Games.Common.ChatterBot;
public partial class OfficialGptSession : IChatterBotSession public partial class OpenAiApiSession : IChatterBotSession
{ {
private string Uri private readonly string _baseUrl;
=> $"https://api.openai.com/v1/chat/completions";
private readonly string _apiKey; private readonly string _apiKey;
private readonly string _model; private readonly string _model;
private readonly int _maxHistory; private readonly int _maxHistory;
@@ -20,13 +17,14 @@ public partial class OfficialGptSession : IChatterBotSession
private readonly int _minTokens; private readonly int _minTokens;
private readonly string _nadekoUsername; private readonly string _nadekoUsername;
private readonly GptEncoding _encoding; private readonly GptEncoding _encoding;
private List<GPTMessage> messages = new(); private List<OpenAiApiMessage> messages = new();
private readonly IHttpClientFactory _httpFactory; private readonly IHttpClientFactory _httpFactory;
public OfficialGptSession( public OpenAiApiSession(
string url,
string apiKey, string apiKey,
ChatGptModel model, string model,
int chatHistory, int chatHistory,
int maxTokens, int maxTokens,
int minTokens, int minTokens,
@@ -34,44 +32,47 @@ public partial class OfficialGptSession : IChatterBotSession
string nadekoUsername, string nadekoUsername,
IHttpClientFactory factory) IHttpClientFactory factory)
{ {
_apiKey = apiKey; if (string.IsNullOrWhiteSpace(url) || !Uri.TryCreate(url, UriKind.Absolute, out _))
_httpFactory = factory;
_model = model switch
{ {
ChatGptModel.Gpt35Turbo => "gpt-3.5-turbo", throw new ArgumentException("Invalid OpenAi api url provided", nameof(url));
ChatGptModel.Gpt4o => "gpt-4o", }
_ => throw new ArgumentException("Unknown, unsupported or obsolete model", nameof(model))
};
_baseUrl = url.TrimEnd('/');
_apiKey = apiKey;
_model = model;
_httpFactory = factory;
_maxHistory = chatHistory; _maxHistory = chatHistory;
_maxTokens = maxTokens; _maxTokens = maxTokens;
_minTokens = minTokens; _minTokens = minTokens;
_nadekoUsername = UsernameCleaner().Replace(nadekoUsername, ""); _nadekoUsername = UsernameCleaner().Replace(nadekoUsername, "");
_encoding = GptEncoding.GetEncodingForModel(_model); _encoding = GptEncoding.GetEncodingForModel("gpt-4o");
messages.Add(new() if (!string.IsNullOrWhiteSpace(personality))
{ {
Role = "system", messages.Add(new()
Content = personality, {
Name = _nadekoUsername Role = "system",
}); Content = personality,
Name = _nadekoUsername
});
}
} }
[GeneratedRegex("[^a-zA-Z0-9_-]")] [GeneratedRegex("[^a-zA-Z0-9_-]")]
private static partial Regex UsernameCleaner(); private static partial Regex UsernameCleaner();
public async Task<OneOf.OneOf<ThinkResult, Error<string>>> Think(string input, string username) public async Task<OneOf.OneOf<ThinkResult, Error<string>>> Think(string input, string username)
{ {
username = UsernameCleaner().Replace(username, ""); username = UsernameCleaner().Replace(username, "");
messages.Add(new() messages.Add(new()
{ {
Role = "user", Role = "user",
Content = input, Content = input,
Name = username Name = username
}); });
while (messages.Count > _maxHistory + 2) while (messages.Count > _maxHistory + 2)
{ {
messages.RemoveAt(1); messages.RemoveAt(1);
@@ -92,28 +93,29 @@ public partial class OfficialGptSession : IChatterBotSession
} }
else else
{ {
return new Error<string>("Token count exceeded, please increase the number of tokens in the bot config and restart."); return new Error<string>(
"Token count exceeded, please increase the number of tokens in the bot config and restart.");
} }
} }
using var http = _httpFactory.CreateClient(); using var http = _httpFactory.CreateClient();
http.DefaultRequestHeaders.Authorization = new("Bearer", _apiKey); http.DefaultRequestHeaders.Authorization = new("Bearer", _apiKey);
var data = await http.PostAsJsonAsync(Uri, var data = await http.PostAsJsonAsync($"{_baseUrl}/v1/chat/completions",
new Gpt3ApiRequest() new OpenAiApiRequest()
{ {
Model = _model, Model = _model,
Messages = messages, Messages = messages,
MaxTokens = _maxTokens - tokensUsed, MaxTokens = _maxTokens - tokensUsed,
Temperature = 1, Temperature = 1,
}); });
var dataString = await data.Content.ReadAsStringAsync(); var dataString = await data.Content.ReadAsStringAsync();
try try
{ {
var response = JsonConvert.DeserializeObject<OpenAiCompletionResponse>(dataString); var response = JsonConvert.DeserializeObject<OpenAiCompletionResponse>(dataString);
Log.Information("Received response: {response} ", dataString); // Log.Information("Received response: {Response} ", dataString);
var res = response?.Choices?[0]; var res = response?.Choices?[0];
var message = res?.Message?.Content; var message = res?.Message?.Content;
@@ -121,14 +123,14 @@ public partial class OfficialGptSession : IChatterBotSession
{ {
return new Error<string>("ChatGpt: Received no response."); return new Error<string>("ChatGpt: Received no response.");
} }
messages.Add(new() messages.Add(new()
{ {
Role = "assistant", Role = "assistant",
Content = message, Content = message,
Name = _nadekoUsername Name = _nadekoUsername
}); });
return new ThinkResult() return new ThinkResult()
{ {
Text = message, Text = message,
@@ -142,11 +144,4 @@ public partial class OfficialGptSession : IChatterBotSession
return new Error<string>("Unexpected response received"); return new Error<string>("Unexpected response received");
} }
} }
}
public sealed class ThinkResult
{
public string Text { get; set; }
public int TokensIn { get; set; }
public int TokensOut { get; set; }
} }

View File

@@ -0,0 +1,11 @@
#nullable disable
using System.CodeDom;
namespace NadekoBot.Modules.Games.Common.ChatterBot;
public sealed class ThinkResult
{
public string Text { get; set; }
public int TokensIn { get; set; }
public int TokensOut { get; set; }
}

View File

@@ -8,7 +8,7 @@ namespace NadekoBot.Modules.Games.Common;
public sealed partial class GamesConfig : ICloneable<GamesConfig> public sealed partial class GamesConfig : ICloneable<GamesConfig>
{ {
[Comment("DO NOT CHANGE")] [Comment("DO NOT CHANGE")]
public int Version { get; set; } = 4; public int Version { get; set; } = 5;
[Comment("Hangman related settings (.hangman command)")] [Comment("Hangman related settings (.hangman command)")]
public HangmanConfig Hangman { get; set; } = new() public HangmanConfig Hangman { get; set; } = new()
@@ -103,10 +103,13 @@ public sealed partial class GamesConfig : ICloneable<GamesConfig>
} }
]; ];
[Comment(@"Which chatbot API should bot use. [Comment(
'cleverbot' - bot will use Cleverbot API. """
'gpt' - bot will use GPT API")] Which chatbot API should bot use.
public ChatBotImplementation ChatBot { get; set; } = ChatBotImplementation.Gpt; 'cleverbot' - bot will use Cleverbot API.
'openai' - bot will use OpenAi API
""")]
public ChatBotImplementation ChatBot { get; set; } = ChatBotImplementation.OpenAi;
public ChatGptConfig ChatGpt { get; set; } = new(); public ChatGptConfig ChatGpt { get; set; } = new();
} }
@@ -114,19 +117,38 @@ public sealed partial class GamesConfig : ICloneable<GamesConfig>
[Cloneable] [Cloneable]
public sealed partial class ChatGptConfig public sealed partial class ChatGptConfig
{ {
[Comment(@"Which GPT Model should bot use. [Comment("""
gpt35turbo - cheapest Url to any openai api compatible url.
gpt4o - more expensive, higher quality Make sure to modify the modelName appropriately
")] DO NOT add /v1/chat/completions suffix to the url
public ChatGptModel ModelName { get; set; } = ChatGptModel.Gpt35Turbo; """)]
public string ApiUrl { get; set; } = "https://api.openai.com";
[Comment(@"How should the chat bot behave, what's its personality? (Usage of this counts towards the max tokens)")] [Comment("""
public string PersonalityPrompt { get; set; } = "You are a chat bot willing to have a conversation with anyone about anything."; Which GPT Model should bot use.
gpt-3.5-turbo - cheapest
gpt-4o - more expensive, higher quality
[Comment(@"The maximum number of messages in a conversation that can be remembered. (This will increase the number of tokens used)")] If you are using another openai compatible api, you may use any of the models supported by that api
""")]
public string ModelName { get; set; } = "gpt-3.5-turbo";
[Comment("""
How should the chatbot behave, what's its personality?
This will be sent as a system message.
Usage of this counts towards the max tokens.
""")]
public string PersonalityPrompt { get; set; } =
"You are a chat bot willing to have a conversation with anyone about anything.";
[Comment(
"""
The maximum number of messages in a conversation that can be remembered.
This will increase the number of tokens used.
""")]
public int ChatHistory { get; set; } = 5; public int ChatHistory { get; set; } = 5;
[Comment(@"The maximum number of tokens to use per GPT API call")] [Comment(@"The maximum number of tokens to use per OpenAi API call")]
public int MaxTokens { get; set; } = 100; public int MaxTokens { get; set; } = 100;
[Comment(@"The minimum number of tokens to use per GPT API call, such that chat history is removed to make room.")] [Comment(@"The minimum number of tokens to use per GPT API call, such that chat history is removed to make room.")]
@@ -147,9 +169,9 @@ public sealed partial class TriviaConfig
public long CurrencyReward { get; set; } public long CurrencyReward { get; set; }
[Comment(""" [Comment("""
Users won't be able to start trivia games which have Users won't be able to start trivia games which have
a smaller win requirement than the one specified by this setting. a smaller win requirement than the one specified by this setting.
""")] """)]
public int MinimumWinReq { get; set; } = 1; public int MinimumWinReq { get; set; } = 1;
} }
@@ -163,18 +185,11 @@ public sealed partial class RaceAnimal
public enum ChatBotImplementation public enum ChatBotImplementation
{ {
Cleverbot, Cleverbot,
OpenAi = 1,
[Obsolete]
Gpt = 1, Gpt = 1,
[Obsolete] [Obsolete]
Gpt3 = 1, Gpt3 = 1,
}
public enum ChatGptModel
{
[Obsolete]
Gpt4,
[Obsolete]
Gpt432k,
Gpt35Turbo,
Gpt4o,
} }

View File

@@ -32,29 +32,21 @@ public sealed class GamesConfigService : ConfigServiceBase<GamesConfig>
gs => gs.ChatBot, gs => gs.ChatBot,
ConfigParsers.InsensitiveEnum, ConfigParsers.InsensitiveEnum,
ConfigPrinters.ToString); ConfigPrinters.ToString);
AddParsedProp("gpt.apiUrl",
gs => gs.ChatGpt.ApiUrl,
ConfigParsers.String,
ConfigPrinters.ToString);
AddParsedProp("gpt.modelName", AddParsedProp("gpt.modelName",
gs => gs.ChatGpt.ModelName, gs => gs.ChatGpt.ModelName,
ConfigParsers.InsensitiveEnum, ConfigParsers.String,
ConfigPrinters.ToString); ConfigPrinters.ToString);
AddParsedProp("gpt.personality", AddParsedProp("gpt.personality",
gs => gs.ChatGpt.PersonalityPrompt, gs => gs.ChatGpt.PersonalityPrompt,
ConfigParsers.String, ConfigParsers.String,
ConfigPrinters.ToString); ConfigPrinters.ToString);
AddParsedProp("gpt.chathistory",
gs => gs.ChatGpt.ChatHistory,
int.TryParse,
ConfigPrinters.ToString,
val => val > 0);
AddParsedProp("gpt.max_tokens",
gs => gs.ChatGpt.MaxTokens,
int.TryParse,
ConfigPrinters.ToString,
val => val > 0);
AddParsedProp("gpt.min_tokens",
gs => gs.ChatGpt.MinTokens,
int.TryParse,
ConfigPrinters.ToString,
val => val > 0);
Migrate(); Migrate();
} }
@@ -78,7 +70,7 @@ public sealed class GamesConfigService : ConfigServiceBase<GamesConfig>
ModifyConfig(c => ModifyConfig(c =>
{ {
c.Version = 3; c.Version = 3;
c.ChatGpt.ModelName = ChatGptModel.Gpt35Turbo; c.ChatGpt.ModelName = "gpt35turbo";
}); });
} }
@@ -89,11 +81,40 @@ public sealed class GamesConfigService : ConfigServiceBase<GamesConfig>
c.Version = 4; c.Version = 4;
#pragma warning disable CS0612 // Type or member is obsolete #pragma warning disable CS0612 // Type or member is obsolete
c.ChatGpt.ModelName = c.ChatGpt.ModelName =
c.ChatGpt.ModelName == ChatGptModel.Gpt4 || c.ChatGpt.ModelName == ChatGptModel.Gpt432k c.ChatGpt.ModelName.Equals("gpt4", StringComparison.OrdinalIgnoreCase)
? ChatGptModel.Gpt4o || c.ChatGpt.ModelName.Equals("gpt432k", StringComparison.OrdinalIgnoreCase)
: c.ChatGpt.ModelName; ? "gpt-4o"
: "gpt-3.5-turbo";
#pragma warning restore CS0612 // Type or member is obsolete #pragma warning restore CS0612 // Type or member is obsolete
}); });
} }
if (data.Version < 5)
{
ModifyConfig(c =>
{
c.Version = 5;
c.ChatBot = c.ChatBot == ChatBotImplementation.OpenAi
? ChatBotImplementation.OpenAi
: c.ChatBot;
if (c.ChatGpt.ModelName.Equals("gpt4o", StringComparison.OrdinalIgnoreCase))
{
c.ChatGpt.ModelName = "gpt-4o";
}
else if (c.ChatGpt.ModelName.Equals("gpt35turbo", StringComparison.OrdinalIgnoreCase))
{
c.ChatGpt.ModelName = "gpt-3.5-turbo";
}
else
{
Log.Warning(
"Unknown OpenAI api model name: {ModelName}. "
+ "It will be reset to 'gpt-3.5-turbo' only this time",
c.ChatGpt.ModelName);
c.ChatGpt.ModelName = "gpt-3.5-turbo";
}
});
}
} }
} }

View File

@@ -1,5 +1,5 @@
# DO NOT CHANGE # DO NOT CHANGE
version: 4 version: 5
# Hangman related settings (.hangman command) # Hangman related settings (.hangman command)
hangman: hangman:
# The amount of currency awarded to the winner of a hangman game # The amount of currency awarded to the winner of a hangman game
@@ -56,19 +56,27 @@ raceAnimals:
name: Unicorn name: Unicorn
# Which chatbot API should bot use. # Which chatbot API should bot use.
# 'cleverbot' - bot will use Cleverbot API. # 'cleverbot' - bot will use Cleverbot API.
# 'gpt' - bot will use GPT API # 'openai' - bot will use OpenAi API
chatBot: Gpt chatBot: OpenAi
chatGpt: chatGpt:
# Url to any openai api compatible url.
# Make sure to modify the modelName appropriately
# DO NOT add /v1/chat/completions suffix to the url
apiUrl: https://api.openai.com
# Which GPT Model should bot use. # Which GPT Model should bot use.
# gpt35turbo - cheapest # gpt-3.5-turbo - cheapest
# gpt4o - more expensive, higher quality # gpt-4o - more expensive, higher quality
# #
modelName: Gpt35Turbo # If you are using another openai compatible api, you may use any of the models supported by that api
# How should the chat bot behave, what's its personality? (Usage of this counts towards the max tokens) modelName: gpt-3.5-turbo
# How should the chatbot behave, what's its personality?
# This will be sent as a system message.
# Usage of this counts towards the max tokens.
personalityPrompt: You are a chat bot willing to have a conversation with anyone about anything. personalityPrompt: You are a chat bot willing to have a conversation with anyone about anything.
# The maximum number of messages in a conversation that can be remembered. (This will increase the number of tokens used) # The maximum number of messages in a conversation that can be remembered.
# This will increase the number of tokens used.
chatHistory: 5 chatHistory: 5
# The maximum number of tokens to use per GPT API call # The maximum number of tokens to use per OpenAi API call
maxTokens: 100 maxTokens: 100
# The minimum number of tokens to use per GPT API call, such that chat history is removed to make room. # The minimum number of tokens to use per GPT API call, such that chat history is removed to make room.
minTokens: 30 minTokens: 30