From afba004d850f6fdcf6c813f4fc1570eed6176900 Mon Sep 17 00:00:00 2001 From: Alexandra Date: Tue, 16 Jan 2024 09:12:14 +0000 Subject: [PATCH 1/5] Update GPT ChatterBot * Updates endpoint from v1/completions to v1/chat/completions * Add SharpTokens as a library to calculate input token usage * Subtract input tokens from max_tokens to ensure the API tokens don't exceed the max specified * Add Chat history support since this API supports it * Add a personality prompt to tweak the way the bot behaves * Add a min_tokens config to increase the quality of chat messages when history is enabled * Adjust the response function to throw an exception so that a null message isn't added to the list. --- .../Games/ChatterBot/ChatterbotService.cs | 8 ++- .../Games/ChatterBot/_Common/Gpt3Response.cs | 22 +++++- .../ChatterBot/_Common/IChatterBotSession.cs | 2 +- .../_Common/OfficialCleverbotSession.cs | 2 +- .../ChatterBot/_Common/OfficialGpt3Session.cs | 72 ++++++++++++++----- src/NadekoBot/Modules/Games/GamesConfig.cs | 29 +++++--- .../Modules/Games/GamesConfigService.cs | 30 ++++++-- src/NadekoBot/NadekoBot.csproj | 1 + src/NadekoBot/data/games.yml | 21 +++--- 9 files changed, 140 insertions(+), 47 deletions(-) diff --git a/src/NadekoBot/Modules/Games/ChatterBot/ChatterbotService.cs b/src/NadekoBot/Modules/Games/ChatterBot/ChatterbotService.cs index 422925f4b..5962a27d2 100644 --- a/src/NadekoBot/Modules/Games/ChatterBot/ChatterbotService.cs +++ b/src/NadekoBot/Modules/Games/ChatterBot/ChatterbotService.cs @@ -79,8 +79,12 @@ public class ChatterBotService : IExecOnMessage case ChatBotImplementation.Gpt3: if (!string.IsNullOrWhiteSpace(_creds.Gpt3ApiKey)) return new OfficialGpt3Session(_creds.Gpt3ApiKey, - _gcs.Data.ChatGpt.Model, + _gcs.Data.ChatGpt.ModelName, + _gcs.Data.ChatGpt.ChatHistory, _gcs.Data.ChatGpt.MaxTokens, + _gcs.Data.ChatGpt.MinTokens, + _gcs.Data.ChatGpt.PersonalityPrompt, + _client.CurrentUser.Username, _httpFactory); Log.Information("Gpt3 will not work as the api key is missing."); @@ -199,7 +203,7 @@ public class ChatterBotService : IExecOnMessage } _ = channel.TriggerTypingAsync(); - var response = await cbs.Think(message); + var response = await cbs.Think(message, usrMsg.Author.ToString()); await channel.SendConfirmAsync(_eb, title: null, response.SanitizeMentions(true) diff --git a/src/NadekoBot/Modules/Games/ChatterBot/_Common/Gpt3Response.cs b/src/NadekoBot/Modules/Games/ChatterBot/_Common/Gpt3Response.cs index 7ec0c6186..80c24c1d3 100644 --- a/src/NadekoBot/Modules/Games/ChatterBot/_Common/Gpt3Response.cs +++ b/src/NadekoBot/Modules/Games/ChatterBot/_Common/Gpt3Response.cs @@ -11,7 +11,13 @@ public class Gpt3Response public class Choice { - public string Text { get; set; } + [JsonPropertyName("message")] + public Message Message { get; init; } +} + +public class Message { + [JsonPropertyName("content")] + public string Content { get; init; } } public class Gpt3ApiRequest @@ -19,12 +25,22 @@ public class Gpt3ApiRequest [JsonPropertyName("model")] public string Model { get; init; } - [JsonPropertyName("prompt")] - public string Prompt { get; init; } + [JsonPropertyName("messages")] + public List Messages { get; init; } [JsonPropertyName("temperature")] public int Temperature { get; init; } [JsonPropertyName("max_tokens")] public int MaxTokens { get; init; } +} + +public class GPTMessage +{ + [JsonPropertyName("role")] + public string Role {get; init;} + [JsonPropertyName("content")] + public string Content {get; init;} + [JsonPropertyName("name")] + public string Name {get; init;} } \ No newline at end of file diff --git a/src/NadekoBot/Modules/Games/ChatterBot/_Common/IChatterBotSession.cs b/src/NadekoBot/Modules/Games/ChatterBot/_Common/IChatterBotSession.cs index 5fff26978..15a93406d 100644 --- a/src/NadekoBot/Modules/Games/ChatterBot/_Common/IChatterBotSession.cs +++ b/src/NadekoBot/Modules/Games/ChatterBot/_Common/IChatterBotSession.cs @@ -3,5 +3,5 @@ namespace NadekoBot.Modules.Games.Common.ChatterBot; public interface IChatterBotSession { - Task Think(string input); + Task Think(string input, string username); } \ No newline at end of file diff --git a/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialCleverbotSession.cs b/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialCleverbotSession.cs index 73ed93bd2..71979d2f0 100644 --- a/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialCleverbotSession.cs +++ b/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialCleverbotSession.cs @@ -18,7 +18,7 @@ public class OfficialCleverbotSession : IChatterBotSession _httpFactory = factory; } - public async Task Think(string input) + public async Task Think(string input, string username) { using var http = _httpFactory.CreateClient(); var dataString = await http.GetStringAsync(string.Format(QueryString, input, cs ?? "")); diff --git a/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialGpt3Session.cs b/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialGpt3Session.cs index 24eb2db98..1fa28d4a1 100644 --- a/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialGpt3Session.cs +++ b/src/NadekoBot/Modules/Games/ChatterBot/_Common/OfficialGpt3Session.cs @@ -1,63 +1,101 @@ #nullable disable using Newtonsoft.Json; using System.Net.Http.Json; +using SharpToken; +using Antlr.Runtime; +using Microsoft.CodeAnalysis.CSharp.Syntax; namespace NadekoBot.Modules.Games.Common.ChatterBot; public class OfficialGpt3Session : IChatterBotSession { private string Uri - => $"https://api.openai.com/v1/completions"; + => $"https://api.openai.com/v1/chat/completions"; private readonly string _apiKey; private readonly string _model; + private readonly int _maxHistory; private readonly int _maxTokens; + private readonly int _minTokens; + private readonly string _nadekoUsername; + private readonly GptEncoding _encoding; + private List messages = new(); private readonly IHttpClientFactory _httpFactory; + + public OfficialGpt3Session( string apiKey, - Gpt3Model model, + ChatGptModel model, + int chatHistory, int maxTokens, + int minTokens, + string personality, + string nadekoUsername, IHttpClientFactory factory) { _apiKey = apiKey; _httpFactory = factory; switch (model) { - case Gpt3Model.Ada001: - _model = "text-ada-001"; + case ChatGptModel.Gpt35Turbo: + _model = "gpt-3.5-turbo"; break; - case Gpt3Model.Babbage001: - _model = "text-babbage-001"; + case ChatGptModel.Gpt4: + _model = "gpt-4"; break; - case Gpt3Model.Curie001: - _model = "text-curie-001"; - break; - case Gpt3Model.Davinci003: - _model = "text-davinci-003"; + case ChatGptModel.Gpt432k: + _model = "gpt-4-32k"; break; } - + _maxHistory = chatHistory; _maxTokens = maxTokens; + _minTokens = minTokens; + _nadekoUsername = nadekoUsername; + _encoding = GptEncoding.GetEncodingForModel(_model); + messages.Add(new GPTMessage(){Role = "user", Content = personality, Name = _nadekoUsername}); } - public async Task Think(string input) + public async Task Think(string input, string username) { + messages.Add(new GPTMessage(){Role = "user", Content = input, Name = username}); + while(messages.Count > _maxHistory + 2){ + messages.RemoveAt(1); + } + int tokensUsed = 0; + foreach(GPTMessage message in messages){ + tokensUsed += _encoding.Encode(message.Content).Count; + } + tokensUsed *= 2; //Unsure why this is the case, but the token count chatgpt reports back is double what I calculate. + //check if we have the minimum number of tokens available to use. Remove messages until we have enough, otherwise exit out and inform the user why. + while(_maxTokens - tokensUsed <= _minTokens){ + if(messages.Count > 2){ + int tokens = _encoding.Encode(messages[1].Content).Count * 2; + tokensUsed -= tokens; + messages.RemoveAt(1); + } + else{ + return "Token count exceeded, please increase the number of tokens in the bot config and restart."; + } + } using var http = _httpFactory.CreateClient(); http.DefaultRequestHeaders.Authorization = new("Bearer", _apiKey); var data = await http.PostAsJsonAsync(Uri, new Gpt3ApiRequest() { Model = _model, - Prompt = input, - MaxTokens = _maxTokens, + Messages = messages, + MaxTokens = _maxTokens - tokensUsed, Temperature = 1, }); var dataString = await data.Content.ReadAsStringAsync(); try { var response = JsonConvert.DeserializeObject(dataString); - - return response?.Choices[0]?.Text; + string message = response?.Choices[0]?.Message?.Content; + //Can't rely on the return to except, now that we need to add it to the messages list. + _ = message ?? throw new ArgumentNullException(nameof(message)); + messages.Add(new GPTMessage(){Role = "assistant", Content = message, Name = _nadekoUsername}); + return message; } catch { diff --git a/src/NadekoBot/Modules/Games/GamesConfig.cs b/src/NadekoBot/Modules/Games/GamesConfig.cs index db8144648..5537f7c8f 100644 --- a/src/NadekoBot/Modules/Games/GamesConfig.cs +++ b/src/NadekoBot/Modules/Games/GamesConfig.cs @@ -8,7 +8,7 @@ namespace NadekoBot.Modules.Games.Common; public sealed partial class GamesConfig : ICloneable { [Comment("DO NOT CHANGE")] - public int Version { get; set; } = 2; + public int Version { get; set; } = 3; [Comment("Hangman related settings (.hangman command)")] public HangmanConfig Hangman { get; set; } = new() @@ -108,14 +108,22 @@ public sealed partial class GamesConfig : ICloneable public sealed partial class ChatGptConfig { [Comment(@"Which GPT-3 Model should bot use. -'ada001' - cheapest and fastest -'babbage001' - 2nd option -'curie001' - 3rd option -'davinci003' - Most expensive, slowest")] - public Gpt3Model Model { get; set; } = Gpt3Model.Ada001; + gpt35turbo - cheapest + gpt4 - 30x more expensive, higher quality + gp432k - same model as above, but with a 32k token limit")] + public ChatGptModel ModelName { get; set; } = ChatGptModel.Gpt35Turbo; + + [Comment(@"How should the chat bot behave, what's its personality? (Usage of this counts towards the max tokens)")] + public string PersonalityPrompt { get; set; } = "You are a chat bot willing to have a conversation with anyone about anything."; + + [Comment(@"The maximum number of messages in a conversation that can be remembered. (This will increase the number of tokens used)")] + public int ChatHistory { get; set; } = 5; [Comment(@"The maximum number of tokens to use per GPT-3 API call")] public int MaxTokens { get; set; } = 100; + + [Comment(@"The minimum number of tokens to use per GPT-3 API call, such that chat history is removed to make room.")] + public int MinTokens { get; set; } = 30; } [Cloneable] @@ -149,10 +157,9 @@ public enum ChatBotImplementation Gpt3 } -public enum Gpt3Model +public enum ChatGptModel { - Ada001, - Babbage001, - Curie001, - Davinci003 + Gpt35Turbo, + Gpt4, + Gpt432k } \ No newline at end of file diff --git a/src/NadekoBot/Modules/Games/GamesConfigService.cs b/src/NadekoBot/Modules/Games/GamesConfigService.cs index 690a92e0b..4f08b6106 100644 --- a/src/NadekoBot/Modules/Games/GamesConfigService.cs +++ b/src/NadekoBot/Modules/Games/GamesConfigService.cs @@ -28,20 +28,33 @@ public sealed class GamesConfigService : ConfigServiceBase long.TryParse, ConfigPrinters.ToString, val => val >= 0); - AddParsedProp("chatbot", gs => gs.ChatBot, ConfigParsers.InsensitiveEnum, ConfigPrinters.ToString); - AddParsedProp("gpt.model", - gs => gs.ChatGpt.Model, + AddParsedProp("gpt.modelName", + gs => gs.ChatGpt.ModelName, ConfigParsers.InsensitiveEnum, ConfigPrinters.ToString); + AddParsedProp("gpt.personality", + gs => gs.ChatGpt.PersonalityPrompt, + ConfigParsers.String, + ConfigPrinters.ToString); + AddParsedProp("gpt.chathistory", + gs => gs.ChatGpt.ChatHistory, + int.TryParse, + ConfigPrinters.ToString, + val => val > 0); AddParsedProp("gpt.max_tokens", gs => gs.ChatGpt.MaxTokens, int.TryParse, ConfigPrinters.ToString, val => val > 0); + AddParsedProp("gpt.min_tokens", + gs => gs.ChatGpt.MinTokens, + int.TryParse, + ConfigPrinters.ToString, + val => val > 0); Migrate(); } @@ -65,7 +78,16 @@ public sealed class GamesConfigService : ConfigServiceBase ModifyConfig(c => { c.Version = 2; - c.ChatBot = ChatBotImplementation.Cleverbot; + c.ChatBot = ChatBotImplementation.Cleverbot; + }); + } + + if (data.Version < 3) + { + ModifyConfig(c => + { + c.Version = 3; + c.ChatGpt.ModelName = ChatGptModel.Gpt35Turbo; }); } } diff --git a/src/NadekoBot/NadekoBot.csproj b/src/NadekoBot/NadekoBot.csproj index ea32dd82c..fdd220e1a 100644 --- a/src/NadekoBot/NadekoBot.csproj +++ b/src/NadekoBot/NadekoBot.csproj @@ -58,6 +58,7 @@ + diff --git a/src/NadekoBot/data/games.yml b/src/NadekoBot/data/games.yml index eefbc952d..5d0c369f8 100644 --- a/src/NadekoBot/data/games.yml +++ b/src/NadekoBot/data/games.yml @@ -1,5 +1,5 @@ # DO NOT CHANGE -version: 2 +version: 3 # Hangman related settings (.hangman command) hangman: # The amount of currency awarded to the winner of a hangman game @@ -57,14 +57,19 @@ raceAnimals: # Which chatbot API should bot use. # 'cleverbot' - bot will use Cleverbot API. # 'gpt3' - bot will use GPT-3 API -chatBot: gpt3 +chatBot: Gpt3 chatGpt: - # Which GPT-3 Model should bot use. - # 'ada001' - cheapest and fastest - # 'babbage001' - 2nd option - # 'curie001' - 3rd option - # 'davinci003' - Most expensive, slowest - model: davinci003 +# Which GPT-3 Model should bot use. + # gpt35turbo - cheapest + # gpt4 - 30x more expensive, higher quality + # gp432k - same model as above, but with a 32k token limit + modelName: Gpt35Turbo + # How should the chat bot behave, whats its personality? (Usage of this counts towards the max tokens) + personalityPrompt: You are a chat bot willing to have a conversation with anyone about anything. + # The maximum number of messages in a conversation that can be remembered. (This will increase the number of tokens used) + chatHistory: 5 # The maximum number of tokens to use per GPT-3 API call maxTokens: 100 + # The minimum number of tokens to use per GPT-3 API call, such that chat history is removed to make room. + minTokens: 30 \ No newline at end of file From f69f8548b0d9c77475072bf51ed8a6a60de80673 Mon Sep 17 00:00:00 2001 From: Kwoth Date: Sat, 20 Jan 2024 14:05:20 +0000 Subject: [PATCH 2/5] Added followedStreams.maxCount to searches configx --- .../StreamNotificationService.cs | 150 +++++++++--------- .../Searches/_Common/Config/SearchesConfig.cs | 15 +- .../_Common/Config/SearchesConfigService.cs | 17 +- 3 files changed, 104 insertions(+), 78 deletions(-) diff --git a/src/NadekoBot/Modules/Searches/StreamNotification/StreamNotificationService.cs b/src/NadekoBot/Modules/Searches/StreamNotification/StreamNotificationService.cs index ed7896e82..15807146f 100644 --- a/src/NadekoBot/Modules/Searches/StreamNotification/StreamNotificationService.cs +++ b/src/NadekoBot/Modules/Searches/StreamNotification/StreamNotificationService.cs @@ -28,6 +28,7 @@ public sealed class StreamNotificationService : INService, IReadyExecutor private readonly IPubSub _pubSub; private readonly IEmbedBuilderService _eb; + private readonly SearchesConfigService _config; public TypedKey> StreamsOnlineKey { get; } public TypedKey> StreamsOfflineKey { get; } @@ -49,14 +50,16 @@ public sealed class StreamNotificationService : INService, IReadyExecutor IHttpClientFactory httpFactory, Bot bot, IPubSub pubSub, - IEmbedBuilderService eb) + IEmbedBuilderService eb, + SearchesConfigService config) { _db = db; _client = client; _strings = strings; _pubSub = pubSub; _eb = eb; - + _config = config; + _streamTracker = new(httpFactory, creds); StreamsOnlineKey = new("streams.online"); @@ -69,34 +72,34 @@ public sealed class StreamNotificationService : INService, IReadyExecutor { var ids = client.GetGuildIds(); var guildConfigs = uow.Set() - .AsQueryable() - .Include(x => x.FollowedStreams) - .Where(x => ids.Contains(x.GuildId)) - .ToList(); + .AsQueryable() + .Include(x => x.FollowedStreams) + .Where(x => ids.Contains(x.GuildId)) + .ToList(); _offlineNotificationServers = new(guildConfigs - .Where(gc => gc.NotifyStreamOffline) - .Select(x => x.GuildId) - .ToList()); - + .Where(gc => gc.NotifyStreamOffline) + .Select(x => x.GuildId) + .ToList()); + _deleteOnOfflineServers = new(guildConfigs - .Where(gc => gc.DeleteStreamOnlineMessage) - .Select(x => x.GuildId) - .ToList()); + .Where(gc => gc.DeleteStreamOnlineMessage) + .Select(x => x.GuildId) + .ToList()); var followedStreams = guildConfigs.SelectMany(x => x.FollowedStreams).ToList(); _shardTrackedStreams = followedStreams.GroupBy(x => new - { - x.Type, - Name = x.Username.ToLower() - }) - .ToList() - .ToDictionary( - x => new StreamDataKey(x.Key.Type, x.Key.Name.ToLower()), - x => x.GroupBy(y => y.GuildId) - .ToDictionary(y => y.Key, - y => y.AsEnumerable().ToHashSet())); + { + x.Type, + Name = x.Username.ToLower() + }) + .ToList() + .ToDictionary( + x => new StreamDataKey(x.Key.Type, x.Key.Name.ToLower()), + x => x.GroupBy(y => y.GuildId) + .ToDictionary(y => y.Key, + y => y.AsEnumerable().ToHashSet())); // shard 0 will keep track of when there are no more guilds which track a stream if (client.ShardId == 0) @@ -107,12 +110,12 @@ public sealed class StreamNotificationService : INService, IReadyExecutor _streamTracker.AddLastData(fs.CreateKey(), null, false); _trackCounter = allFollowedStreams.GroupBy(x => new - { - x.Type, - Name = x.Username.ToLower() - }) - .ToDictionary(x => new StreamDataKey(x.Key.Type, x.Key.Name), - x => x.Select(fs => fs.GuildId).ToHashSet()); + { + x.Type, + Name = x.Username.ToLower() + }) + .ToDictionary(x => new StreamDataKey(x.Key.Type, x.Key.Name), + x => x.Select(fs => fs.GuildId).ToHashSet()); } } @@ -152,7 +155,7 @@ public sealed class StreamNotificationService : INService, IReadyExecutor continue; var deleteGroups = failingStreams.GroupBy(x => x.Type) - .ToDictionary(x => x.Key, x => x.Select(y => y.Name).ToList()); + .ToDictionary(x => x.Key, x => x.Select(y => y.Name).ToList()); await using var uow = _db.GetDbContext(); foreach (var kvp in deleteGroups) @@ -165,9 +168,9 @@ public sealed class StreamNotificationService : INService, IReadyExecutor string.Join(", ", kvp.Value)); var toDelete = uow.Set() - .AsQueryable() - .Where(x => x.Type == kvp.Key && kvp.Value.Contains(x.Username)) - .ToList(); + .AsQueryable() + .Where(x => x.Type == kvp.Key && kvp.Value.Contains(x.Username)) + .ToList(); uow.RemoveRange(toDelete); await uow.SaveChangesAsync(); @@ -246,17 +249,17 @@ public sealed class StreamNotificationService : INService, IReadyExecutor if (_shardTrackedStreams.TryGetValue(key, out var fss)) { await fss - // send offline stream notifications only to guilds which enable it with .stoff - .SelectMany(x => x.Value) - .Where(x => _offlineNotificationServers.Contains(x.GuildId)) - .Select(fs => _client.GetGuild(fs.GuildId) - ?.GetTextChannel(fs.ChannelId) - ?.EmbedAsync(GetEmbed(fs.GuildId, stream))) - .WhenAll(); + // send offline stream notifications only to guilds which enable it with .stoff + .SelectMany(x => x.Value) + .Where(x => _offlineNotificationServers.Contains(x.GuildId)) + .Select(fs => _client.GetGuild(fs.GuildId) + ?.GetTextChannel(fs.ChannelId) + ?.EmbedAsync(GetEmbed(fs.GuildId, stream))) + .WhenAll(); } } } - + private async ValueTask HandleStreamsOnline(List onlineStreams) { @@ -266,30 +269,30 @@ public sealed class StreamNotificationService : INService, IReadyExecutor if (_shardTrackedStreams.TryGetValue(key, out var fss)) { var messages = await fss.SelectMany(x => x.Value) - .Select(async fs => - { - var textChannel = _client.GetGuild(fs.GuildId)?.GetTextChannel(fs.ChannelId); + .Select(async fs => + { + var textChannel = _client.GetGuild(fs.GuildId)?.GetTextChannel(fs.ChannelId); - if (textChannel is null) - return default; + if (textChannel is null) + return default; - var rep = new ReplacementBuilder().WithOverride("%user%", () => fs.Username) - .WithOverride("%platform%", () => fs.Type.ToString()) - .Build(); + var rep = new ReplacementBuilder().WithOverride("%user%", () => fs.Username) + .WithOverride("%platform%", () => fs.Type.ToString()) + .Build(); - var message = string.IsNullOrWhiteSpace(fs.Message) ? "" : rep.Replace(fs.Message); + var message = string.IsNullOrWhiteSpace(fs.Message) ? "" : rep.Replace(fs.Message); - var msg = await textChannel.EmbedAsync(GetEmbed(fs.GuildId, stream, false), message); + var msg = await textChannel.EmbedAsync(GetEmbed(fs.GuildId, stream, false), message); + + // only cache the ids of channel/message pairs + if (_deleteOnOfflineServers.Contains(fs.GuildId)) + return (textChannel.Id, msg.Id); + else + return default; + }) + .WhenAll(); - // only cache the ids of channel/message pairs - if(_deleteOnOfflineServers.Contains(fs.GuildId)) - return (textChannel.Id, msg.Id); - else - return default; - }) - .WhenAll(); - // push online stream messages to redis // when streams go offline, any server which // has the online stream message deletion feature @@ -297,16 +300,15 @@ public sealed class StreamNotificationService : INService, IReadyExecutor try { var pairs = messages - .Where(x => x != default) - .Select(x => (x.Item1, x.Item2)) - .ToList(); + .Where(x => x != default) + .Select(x => (x.Item1, x.Item2)) + .ToList(); if (pairs.Count > 0) await OnlineMessagesSent(key.Type, key.Name, pairs); } catch { - } } } @@ -384,10 +386,10 @@ public sealed class StreamNotificationService : INService, IReadyExecutor await using (var uow = _db.GetDbContext()) { var fss = uow.Set() - .AsQueryable() - .Where(x => x.GuildId == guildId) - .OrderBy(x => x.Id) - .ToList(); + .AsQueryable() + .Where(x => x.GuildId == guildId) + .OrderBy(x => x.Id) + .ToList(); // out of range if (fss.Count <= index) @@ -450,7 +452,9 @@ public sealed class StreamNotificationService : INService, IReadyExecutor GuildId = guildId }; - if (gc.FollowedStreams.Count >= 10) + var config = _config.Data; + if (config.FollowedStreams.MaxCount is not -1 + && gc.FollowedStreams.Count >= config.FollowedStreams.MaxCount) return null; gc.FollowedStreams.Add(fs); @@ -475,10 +479,10 @@ public sealed class StreamNotificationService : INService, IReadyExecutor public IEmbedBuilder GetEmbed(ulong guildId, StreamData status, bool showViewers = true) { var embed = _eb.Create() - .WithTitle(status.Name) - .WithUrl(status.StreamUrl) - .WithDescription(status.StreamUrl) - .AddField(GetText(guildId, strs.status), status.IsLive ? "🟢 Online" : "🔴 Offline", true); + .WithTitle(status.Name) + .WithUrl(status.StreamUrl) + .WithDescription(status.StreamUrl) + .AddField(GetText(guildId, strs.status), status.IsLive ? "🟢 Online" : "🔴 Offline", true); if (showViewers) { @@ -527,7 +531,7 @@ public sealed class StreamNotificationService : INService, IReadyExecutor return newValue; } - + public bool ToggleStreamOnlineDelete(ulong guildId) { using var uow = _db.GetDbContext(); diff --git a/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfig.cs b/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfig.cs index f2147e547..4bab0d8b8 100644 --- a/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfig.cs +++ b/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfig.cs @@ -8,18 +8,18 @@ public partial class SearchesConfig : ICloneable { [Comment("DO NOT CHANGE")] public int Version { get; set; } = 0; - + [Comment(@"Which engine should .search command 'google_scrape' - default. Scrapes the webpage for results. May break. Requires no api keys. 'google' - official google api. Requires googleApiKey and google.searchId set in creds.yml 'searx' - requires at least one searx instance specified in the 'searxInstances' property below")] public WebSearchEngine WebSearchEngine { get; set; } = WebSearchEngine.Google_Scrape; - + [Comment(@"Which engine should .image command use 'google'- official google api. googleApiKey and google.imageSearchId set in creds.yml 'searx' requires at least one searx instance specified in the 'searxInstances' property below")] public ImgSearchEngine ImgSearchEngine { get; set; } = ImgSearchEngine.Google; - + [Comment(@"Which search provider will be used for the `.youtube` command. @@ -55,6 +55,15 @@ Use a fully qualified url. Example: https://my-invidious-instance.mydomain.com Instances specified must have api available. You check that by opening an api endpoint in your browser. For example: https://my-invidious-instance.mydomain.com/api/v1/trending")] public List InvidiousInstances { get; set; } = new List(); + + [Comment("Maximum number of followed streams per server")] + public FollowedStreamConfig FollowedStreams { get; set; } = new FollowedStreamConfig(); +} + +public sealed class FollowedStreamConfig +{ + [Comment("Maximum number of streams that each server can follow. -1 for infinite")] + public int MaxCount { get; set; } = 10; } public enum YoutubeSearcher diff --git a/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs b/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs index 4d58098b4..87de6ec0e 100644 --- a/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs +++ b/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs @@ -17,17 +17,22 @@ public class SearchesConfigService : ConfigServiceBase sc => sc.WebSearchEngine, ConfigParsers.InsensitiveEnum, ConfigPrinters.ToString); - + AddParsedProp("imgEngine", sc => sc.ImgSearchEngine, ConfigParsers.InsensitiveEnum, ConfigPrinters.ToString); - + AddParsedProp("ytProvider", sc => sc.YtProvider, ConfigParsers.InsensitiveEnum, ConfigPrinters.ToString); + AddParsedProp("followedStreams.maxCount", + sc => sc.FollowedStreams.MaxCount, + ConfigParsers.InsensitiveEnum, + ConfigPrinters.ToString); + Migrate(); } @@ -41,5 +46,13 @@ public class SearchesConfigService : ConfigServiceBase c.WebSearchEngine = WebSearchEngine.Google_Scrape; }); } + + if (data.Version < 2) + { + ModifyConfig(c => + { + c.Version = 2; + }); + } } } \ No newline at end of file From b0ac35b82eb1a1b0e5b0fc7f11f711efa0c79f9e Mon Sep 17 00:00:00 2001 From: Kwoth Date: Sat, 20 Jan 2024 14:15:30 +0000 Subject: [PATCH 3/5] Updated changelog. Version upped to 4.3.19 --- CHANGELOG.md | 13 +++++++++++++ src/NadekoBot/Services/Impl/StatsService.cs | 2 +- src/NadekoBot/data/searches.yml | 6 +++++- 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 581080202..cd1040ce3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,19 @@ Experimental changelog. Mostly based on [keepachangelog](https://keepachangelog.com/en/1.0.0/) except date format. a-c-f-r-o + +## [4.3.19] - 20.01.2024 + +### Added +- Added `followedStreams.maxCount` to `searches.yml` which lets bot owners change the default of 10 per server + +### Changed +- Improvements to GPT ChatterBot (thx alexandra) +- Add a personality prompt to tweak the way chatgpt bot behaves +- Added Chat history support to chatgpt ChatterBot +- Chatgpt token usage now correctly calculated +- More chatgpt configs in `games.yml` + ## [4.3.18] - 26.12.2023 ### Added diff --git a/src/NadekoBot/Services/Impl/StatsService.cs b/src/NadekoBot/Services/Impl/StatsService.cs index a69d6503c..393d78cab 100644 --- a/src/NadekoBot/Services/Impl/StatsService.cs +++ b/src/NadekoBot/Services/Impl/StatsService.cs @@ -7,7 +7,7 @@ namespace NadekoBot.Services; public sealed class StatsService : IStatsService, IReadyExecutor, INService { - public const string BOT_VERSION = "4.3.18"; + public const string BOT_VERSION = "4.3.19"; public string Author => "Kwoth#2452"; diff --git a/src/NadekoBot/data/searches.yml b/src/NadekoBot/data/searches.yml index 0c2625bbb..e3b8d2b38 100644 --- a/src/NadekoBot/data/searches.yml +++ b/src/NadekoBot/data/searches.yml @@ -1,5 +1,5 @@ # DO NOT CHANGE -version: 1 +version: 2 # Which engine should .search command # 'google_scrape' - default. Scrapes the webpage for results. May break. Requires no api keys. # 'google' - official google api. Requires googleApiKey and google.searchId set in creds.yml @@ -41,3 +41,7 @@ searxInstances: [] # Instances specified must have api available. # You check that by opening an api endpoint in your browser. For example: https://my-invidious-instance.mydomain.com/api/v1/trending invidiousInstances: [] +# Maximum number of followed streams per server +followedStreams: +# Maximum number of streams that each server can follow. -1 for infinite + maxCount: 10 From a7e1e8a98263414bbc1b98ccce0668218940e97f Mon Sep 17 00:00:00 2001 From: Kwoth Date: Sat, 20 Jan 2024 15:06:45 +0000 Subject: [PATCH 4/5] Fixed .config searches followedStreams.maxCount. Version upped to 4.3.20 --- CHANGELOG.md | 5 ++++- .../Modules/Searches/_Common/Config/SearchesConfigService.cs | 2 +- src/NadekoBot/Services/Impl/StatsService.cs | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cd1040ce3..37ba26692 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,10 @@ Experimental changelog. Mostly based on [keepachangelog](https://keepachangelog.com/en/1.0.0/) except date format. a-c-f-r-o +## [4.3.20] - 20.01.2024 + +### Fixed +- Fixed `.config searches followedStreams.maxCount` not working ## [4.3.19] - 20.01.2024 @@ -36,7 +40,6 @@ Experimental changelog. Mostly based on [keepachangelog](https://keepachangelog. ### Removed - `.revimg` and `.revav` as google removed reverse image search -- ## [4.3.17] - 06.09.2023 diff --git a/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs b/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs index 87de6ec0e..378c46a65 100644 --- a/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs +++ b/src/NadekoBot/Modules/Searches/_Common/Config/SearchesConfigService.cs @@ -30,7 +30,7 @@ public class SearchesConfigService : ConfigServiceBase AddParsedProp("followedStreams.maxCount", sc => sc.FollowedStreams.MaxCount, - ConfigParsers.InsensitiveEnum, + int.TryParse, ConfigPrinters.ToString); Migrate(); diff --git a/src/NadekoBot/Services/Impl/StatsService.cs b/src/NadekoBot/Services/Impl/StatsService.cs index 393d78cab..970a004fc 100644 --- a/src/NadekoBot/Services/Impl/StatsService.cs +++ b/src/NadekoBot/Services/Impl/StatsService.cs @@ -7,7 +7,7 @@ namespace NadekoBot.Services; public sealed class StatsService : IStatsService, IReadyExecutor, INService { - public const string BOT_VERSION = "4.3.19"; + public const string BOT_VERSION = "4.3.20"; public string Author => "Kwoth#2452"; From 127a46a9b802e647e576bf0558c0909aa76f4f43 Mon Sep 17 00:00:00 2001 From: Kwoth Date: Fri, 9 Feb 2024 02:32:34 +0000 Subject: [PATCH 5/5] Possible fix for duplicate .bank take in .h .bank --- src/NadekoBot/Modules/Help/Help.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/NadekoBot/Modules/Help/Help.cs b/src/NadekoBot/Modules/Help/Help.cs index 49bd46c7d..92dfeb829 100644 --- a/src/NadekoBot/Modules/Help/Help.cs +++ b/src/NadekoBot/Modules/Help/Help.cs @@ -292,7 +292,7 @@ public partial class Help : NadekoModule .WithTitle(GetText(strs.cmd_group_commands(group.Name))) .WithOkColor(); - foreach (var cmd in group.Commands) + foreach (var cmd in group.Commands.DistinctBy(x => x.Aliases[0])) { eb.AddField(prefix + cmd.Aliases.First(), cmd.RealSummary(_strings, _medusae, Culture, prefix)); }