Merge branch 'v3-dev' into 'v3'

backport of public nsfw module

See merge request Kwoth/nadekobot!176
This commit is contained in:
Kwoth
2021-10-21 23:35:58 +00:00
44 changed files with 4284 additions and 713 deletions

View File

@@ -54,7 +54,6 @@ namespace NadekoBot.Db
.Include(gc => gc.CommandCooldowns)
.Include(gc => gc.FollowedStreams)
.Include(gc => gc.StreamRole)
.Include(gc => gc.NsfwBlacklistedTags)
.Include(gc => gc.XpSettings)
.ThenInclude(x => x.ExclusionList)
.Include(gc => gc.DelMsgOnCmdChannels)

View File

@@ -91,7 +91,6 @@ namespace NadekoBot.Services.Database.Models
public bool WarningsInitialized { get; set; }
public HashSet<SlowmodeIgnoredUser> SlowmodeIgnoredUsers { get; set; }
public HashSet<SlowmodeIgnoredRole> SlowmodeIgnoredRoles { get; set; }
public HashSet<NsfwBlacklitedTag> NsfwBlacklistedTags { get; set; } = new HashSet<NsfwBlacklitedTag>();
public List<ShopEntry> ShopEntries { get; set; }
public ulong? GameVoiceChannel { get; set; } = null;

View File

@@ -0,0 +1,16 @@
using System;
namespace NadekoBot.Services.Database.Models
{
public class NsfwBlacklistedTag : DbEntity
{
public ulong GuildId { get; set; }
public string Tag { get; set; }
public override int GetHashCode()
=> Tag.GetHashCode(StringComparison.InvariantCulture);
public override bool Equals(object obj)
=> obj is NsfwBlacklistedTag x && x.Tag == Tag;
}
}

View File

@@ -1,21 +0,0 @@
using System;
namespace NadekoBot.Services.Database.Models
{
public class NsfwBlacklitedTag : DbEntity
{
public string Tag { get; set; }
public override int GetHashCode()
{
return Tag.GetHashCode(StringComparison.InvariantCulture);
}
public override bool Equals(object obj)
{
return obj is NsfwBlacklitedTag x
? x.Tag == Tag
: false;
}
}
}

View File

@@ -59,6 +59,7 @@ namespace NadekoBot.Services.Database
public DbSet<Poll> Poll { get; set; }
public DbSet<WaifuInfo> WaifuInfo { get; set; }
public DbSet<ImageOnlyChannel> ImageOnlyChannels { get; set; }
public DbSet<NsfwBlacklistedTag> NsfwBlacklistedTags { get; set; }
public NadekoContext(DbContextOptions<NadekoContext> options) : base(options)
{
@@ -357,6 +358,10 @@ namespace NadekoBot.Services.Database
modelBuilder.Entity<ImageOnlyChannel>(ioc => ioc
.HasIndex(x => x.ChannelId)
.IsUnique());
modelBuilder.Entity<NsfwBlacklistedTag>(nbt => nbt
.HasIndex(x => x.GuildId)
.IsUnique(false));
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,76 @@
using System;
using Microsoft.EntityFrameworkCore.Migrations;
namespace NadekoBot.Migrations
{
public partial class nsfwblacklisttags : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "NsfwBlacklistedTags",
columns: table => new
{
Id = table.Column<int>(type: "INTEGER", nullable: false)
.Annotation("Sqlite:Autoincrement", true),
GuildId = table.Column<ulong>(type: "INTEGER", nullable: false),
Tag = table.Column<string>(type: "TEXT", nullable: true),
DateAdded = table.Column<DateTime>(type: "TEXT", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_NsfwBlacklistedTags", x => x.Id);
});
migrationBuilder.CreateIndex(
name: "IX_NsfwBlacklistedTags_GuildId",
table: "NsfwBlacklistedTags",
column: "GuildId");
migrationBuilder.Sql(@"INSERT INTO NsfwBlacklistedTags(Id, GuildId, Tag, DateAdded)
SELECT
Id,
(SELECT GuildId From GuildConfigs WHERE Id=GuildConfigId),
Tag,
DateAdded
FROM NsfwBlacklitedTag
WHERE GuildConfigId in (SELECT Id from GuildConfigs);");
migrationBuilder.DropTable(
name: "NsfwBlacklitedTag");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "NsfwBlacklistedTags");
migrationBuilder.CreateTable(
name: "NsfwBlacklitedTag",
columns: table => new
{
Id = table.Column<int>(type: "INTEGER", nullable: false)
.Annotation("Sqlite:Autoincrement", true),
DateAdded = table.Column<DateTime>(type: "TEXT", nullable: true),
GuildConfigId = table.Column<int>(type: "INTEGER", nullable: true),
Tag = table.Column<string>(type: "TEXT", nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_NsfwBlacklitedTag", x => x.Id);
table.ForeignKey(
name: "FK_NsfwBlacklitedTag_GuildConfigs_GuildConfigId",
column: x => x.GuildConfigId,
principalTable: "GuildConfigs",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateIndex(
name: "IX_NsfwBlacklitedTag_GuildConfigId",
table: "NsfwBlacklitedTag",
column: "GuildConfigId");
}
}
}

View File

@@ -1062,7 +1062,7 @@ namespace NadekoBot.Migrations
b.ToTable("MutedUserId");
});
modelBuilder.Entity("NadekoBot.Services.Database.Models.NsfwBlacklitedTag", b =>
modelBuilder.Entity("NadekoBot.Services.Database.Models.NsfwBlacklistedTag", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
@@ -1071,7 +1071,7 @@ namespace NadekoBot.Migrations
b.Property<DateTime?>("DateAdded")
.HasColumnType("TEXT");
b.Property<int?>("GuildConfigId")
b.Property<ulong>("GuildId")
.HasColumnType("INTEGER");
b.Property<string>("Tag")
@@ -1079,9 +1079,9 @@ namespace NadekoBot.Migrations
b.HasKey("Id");
b.HasIndex("GuildConfigId");
b.HasIndex("GuildId");
b.ToTable("NsfwBlacklitedTag");
b.ToTable("NsfwBlacklistedTags");
});
modelBuilder.Entity("NadekoBot.Services.Database.Models.Permissionv2", b =>
@@ -2300,13 +2300,6 @@ namespace NadekoBot.Migrations
.HasForeignKey("GuildConfigId");
});
modelBuilder.Entity("NadekoBot.Services.Database.Models.NsfwBlacklitedTag", b =>
{
b.HasOne("NadekoBot.Services.Database.Models.GuildConfig", null)
.WithMany("NsfwBlacklistedTags")
.HasForeignKey("GuildConfigId");
});
modelBuilder.Entity("NadekoBot.Services.Database.Models.Permissionv2", b =>
{
b.HasOne("NadekoBot.Services.Database.Models.GuildConfig", null)
@@ -2573,8 +2566,6 @@ namespace NadekoBot.Migrations
b.Navigation("MutedUsers");
b.Navigation("NsfwBlacklistedTags");
b.Navigation("Permissions");
b.Navigation("ReactionRoleMessages");

View File

@@ -0,0 +1,15 @@
namespace NadekoBot.Modules.Nsfw.Common
{
public enum Booru
{
Safebooru,
E621,
Derpibooru,
Rule34,
Gelbooru,
Konachan,
Yandere,
Danbooru,
Sankaku
}
}

View File

@@ -0,0 +1,18 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Nsfw.Common
{
public class DapiImageObject : IImageData
{
[JsonPropertyName("File_Url")]
public string FileUrl { get; set; }
public string Tags { get; set; }
[JsonPropertyName("Tag_String")]
public string TagString { get; set; }
public int Score { get; set; }
public string Rating { get; set; }
public ImageData ToCachedImageData(Booru type)
=> new ImageData(this.FileUrl, type, this.Tags?.Split(' ') ?? this.TagString?.Split(' '), Score.ToString() ?? Rating);
}
}

View File

@@ -0,0 +1,15 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Nsfw.Common
{
public readonly struct DapiTag
{
public string Name { get; }
[JsonConstructor]
public DapiTag(string name)
{
Name = name;
}
}
}

View File

@@ -0,0 +1,19 @@
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Nsfw.Common
{
public class DerpiContainer
{
public DerpiImageObject[] Images { get; set; }
}
public class DerpiImageObject : IImageData
{
[JsonPropertyName("view_url")]
public string ViewUrl { get; set; }
public string[] Tags { get; set; }
public int Score { get; set; }
public ImageData ToCachedImageData(Booru type)
=> new(ViewUrl, type, Tags, Score.ToString("F1"));
}
}

View File

@@ -0,0 +1,41 @@
using System.Collections.Concurrent;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public sealed class DanbooruImageDownloader : DapiImageDownloader
{
// using them as concurrent hashsets, value doesn't matter
private static readonly ConcurrentDictionary<string, bool> _existentTags = new();
private static readonly ConcurrentDictionary<string, bool> _nonexistentTags = new();
public override async Task<bool> IsTagValid(string tag, CancellationToken cancel = default)
{
if (_existentTags.ContainsKey(tag))
return true;
if (_nonexistentTags.ContainsKey(tag))
return false;
var tags = await _http.GetFromJsonAsync<DapiTag[]>(_baseUrl +
"/tags.json" +
$"?search[name_or_alias_matches]={tag}",
options: this._serializerOptions,
cancellationToken: cancel);
if (tags is {Length: > 0})
{
return _existentTags[tag] = true;
}
return _nonexistentTags[tag] = false;
}
public DanbooruImageDownloader(HttpClient http)
: base(Booru.Danbooru, http, "http://danbooru.donmai.us")
{
}
}
}

View File

@@ -0,0 +1,56 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public abstract class DapiImageDownloader : ImageDownloader<DapiImageObject>
{
protected readonly string _baseUrl;
public DapiImageDownloader(Booru booru, HttpClient http, string baseUrl) : base(booru, http)
{
_baseUrl = baseUrl;
}
public abstract Task<bool> IsTagValid(string tag, CancellationToken cancel = default);
protected async Task<bool> AllTagsValid(string[] tags, CancellationToken cancel = default)
{
var results = await Task.WhenAll(tags.Select(tag => IsTagValid(tag, cancel)));
// if any of the tags is not valid, the query is not valid
foreach (var result in results)
{
if (!result)
return false;
}
return true;
}
public override async Task<List<DapiImageObject>> DownloadImagesAsync(string[] tags, int page,
bool isExplicit = false, CancellationToken cancel = default)
{
// up to 2 tags allowed on danbooru
if (tags.Length > 2)
return new();
if (!await AllTagsValid(tags, cancel).ConfigureAwait(false))
return new();
var tagString = ImageDownloaderHelper.GetTagString(tags, isExplicit);
var uri = $"{_baseUrl}/posts.json?limit=200&tags={tagString}&page={page}";
var imageObjects = await _http.GetFromJsonAsync<DapiImageObject[]>(uri, _serializerOptions, cancel)
.ConfigureAwait(false);
if (imageObjects is null)
return new();
return imageObjects
.Where(x => x.FileUrl is not null)
.ToList();
}
}
}

View File

@@ -0,0 +1,35 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
using NadekoBot.Extensions;
namespace NadekoBot.Modules.Nsfw.Common
{
public class DerpibooruImageDownloader : ImageDownloader<DerpiImageObject>
{
public DerpibooruImageDownloader(HttpClient http) : base(Booru.Derpibooru, http)
{
}
public override async Task<List<DerpiImageObject>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
var tagString = ImageDownloaderHelper.GetTagString(tags, isExplicit);
var uri = $"https://www.derpibooru.org/api/v1/json/search/images?q={tagString.Replace('+', ',')}&per_page=49&page={page}";
using var req = new HttpRequestMessage(HttpMethod.Get, uri);
req.Headers.AddFakeHeaders();
using var res = await _http.SendAsync(req, cancel).ConfigureAwait(false);
res.EnsureSuccessStatusCode();
var container = await res.Content.ReadFromJsonAsync<DerpiContainer>(_serializerOptions, cancel).ConfigureAwait(false);
if (container?.Images is null)
return new();
return container.Images
.Where(x => !string.IsNullOrWhiteSpace(x.ViewUrl))
.ToList();
}
}
}

View File

@@ -0,0 +1,35 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
using NadekoBot.Extensions;
namespace NadekoBot.Modules.Nsfw.Common
{
public class E621ImageDownloader : ImageDownloader<E621Object>
{
public E621ImageDownloader(HttpClient http) : base(Booru.E621, http)
{
}
public override async Task<List<E621Object>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
var tagString = ImageDownloaderHelper.GetTagString(tags, isExplicit: isExplicit);
var uri = $"https://e621.net/posts.json?limit=32&tags={tagString}&page={page}";
using var req = new HttpRequestMessage(HttpMethod.Get, uri);
req.Headers.AddFakeHeaders();
using var res = await _http.SendAsync(req, cancel).ConfigureAwait(false);
res.EnsureSuccessStatusCode();
var data = await res.Content.ReadFromJsonAsync<E621Response>(_serializerOptions, cancel).ConfigureAwait(false);
if (data?.Posts is null)
return new();
return data.Posts
.Where(x => !string.IsNullOrWhiteSpace(x.File?.Url))
.ToList();
}
}
}

View File

@@ -0,0 +1,9 @@
using System.Collections.Generic;
namespace NadekoBot.Modules.Nsfw.Common
{
public class E621Response
{
public List<E621Object> Posts { get; set; }
}
}

View File

@@ -0,0 +1,35 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public class GelbooruImageDownloader : ImageDownloader<DapiImageObject>
{
public GelbooruImageDownloader(HttpClient http) : base(Booru.Gelbooru, http)
{
}
public override async Task<List<DapiImageObject>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
var tagString = ImageDownloaderHelper.GetTagString(tags, isExplicit);
var uri = $"http://gelbooru.com/index.php?page=dapi&s=post&json=1&q=index&limit=100" +
$"&tags={tagString}&pid={page}";
using var req = new HttpRequestMessage(HttpMethod.Get, uri);
using var res = await _http.SendAsync(req, cancel).ConfigureAwait(false);
res.EnsureSuccessStatusCode();
var resString = await res.Content.ReadAsStringAsync(cancel);
if (string.IsNullOrWhiteSpace(resString))
return new();
var images = JsonSerializer.Deserialize<List<DapiImageObject>>(resString, _serializerOptions);
if (images is null)
return new();
return images.Where(x => x.FileUrl is not null).ToList();
}
}
}

View File

@@ -0,0 +1,12 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public interface IImageDownloader
{
Task<List<ImageData>> DownloadImageDataAsync(string[] tags, int page = 0,
bool isExplicit = false, CancellationToken cancel = default);
}
}

View File

@@ -0,0 +1,40 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public abstract class ImageDownloader<T> : IImageDownloader
where T : IImageData
{
protected readonly HttpClient _http;
protected JsonSerializerOptions _serializerOptions = new JsonSerializerOptions()
{
PropertyNameCaseInsensitive = true,
NumberHandling = JsonNumberHandling.WriteAsString | JsonNumberHandling.AllowReadingFromString,
};
public Booru Booru { get; }
public ImageDownloader(Booru booru, HttpClient http)
{
_http = http;
this.Booru = booru;
}
public abstract Task<List<T>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default);
public async Task<List<ImageData>> DownloadImageDataAsync(string[] tags, int page, bool isExplicit = false,
CancellationToken cancel = default)
{
var images = await DownloadImagesAsync(tags, page, isExplicit, cancel).ConfigureAwait(false);
return images.Select(x => x.ToCachedImageData(Booru)).ToList();
}
}
}

View File

@@ -0,0 +1,17 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace NadekoBot.Modules.Nsfw.Common
{
public static class ImageDownloaderHelper
{
public static string GetTagString(IEnumerable<string> tags, bool isExplicit = false)
{
if (isExplicit)
tags = tags.Append("rating:explicit");
return string.Join('+', tags.Select(x => x.ToLowerInvariant()));
}
}
}

View File

@@ -0,0 +1,33 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public sealed class KonachanImageDownloader : ImageDownloader<DapiImageObject>
{
private readonly string _baseUrl;
public KonachanImageDownloader(HttpClient http)
: base(Booru.Konachan, http)
{
_baseUrl = "https://konachan.com";
}
public override async Task<List<DapiImageObject>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
var tagString = ImageDownloaderHelper.GetTagString(tags, isExplicit);
var uri = $"{_baseUrl}/post.json?s=post&q=index&limit=200&tags={tagString}&page={page}";
var imageObjects = await _http.GetFromJsonAsync<DapiImageObject[]>(uri, _serializerOptions, cancel)
.ConfigureAwait(false);
if (imageObjects is null)
return new();
return imageObjects
.Where(x => x.FileUrl is not null)
.ToList();
}
}
}

View File

@@ -0,0 +1,31 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public class Rule34ImageDownloader : ImageDownloader<Rule34Object>
{
public Rule34ImageDownloader(HttpClient http) : base(Booru.Rule34, http)
{
}
public override async Task<List<Rule34Object>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
var tagString = ImageDownloaderHelper.GetTagString(tags);
var uri = $"https://rule34.xxx/index.php?page=dapi&s=post&q=index&json=1&limit=100" +
$"&tags={tagString}&pid={page}";
var images = await _http.GetFromJsonAsync<List<Rule34Object>>(uri, _serializerOptions, cancel).ConfigureAwait(false);
if (images is null)
return new();
return images
.Where(img => !string.IsNullOrWhiteSpace(img.Directory) && !string.IsNullOrWhiteSpace(img.Image))
.ToList();
}
}
}

View File

@@ -0,0 +1,26 @@
using System.Collections.Generic;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
namespace NadekoBot.Modules.Nsfw.Common
{
public class SafebooruImageDownloader : ImageDownloader<SafebooruElement>
{
public SafebooruImageDownloader(HttpClient http) : base(Booru.Safebooru, http)
{
}
public override async Task<List<SafebooruElement>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
var tagString = ImageDownloaderHelper.GetTagString(tags, isExplicit: false);
var uri = $"https://safebooru.org/index.php?page=dapi&s=post&q=index&limit=200&tags={tagString}&json=1&pid={page}";
var images = await _http.GetFromJsonAsync<List<SafebooruElement>>(uri, _serializerOptions, cancellationToken: cancel);
if (images is null)
return new();
return images;
}
}
}

View File

@@ -0,0 +1,34 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using NadekoBot.Extensions;
namespace NadekoBot.Modules.Nsfw.Common
{
public sealed class SankakuImageDownloader : ImageDownloader<SankakuImageObject>
{
private readonly string _baseUrl;
public SankakuImageDownloader(HttpClient http)
: base(Booru.Sankaku, http)
{
_baseUrl = "https://capi-v2.sankakucomplex.com";
_http.AddFakeHeaders();
}
public override async Task<List<SankakuImageObject>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
// explicit probably not supported
var tagString = ImageDownloaderHelper.GetTagString(tags, false);
var uri = $"{_baseUrl}/posts?tags={tagString}&limit=50";
var data = await _http.GetStringAsync(uri).ConfigureAwait(false);
return JsonSerializer.Deserialize<SankakuImageObject[]>(data, _serializerOptions)
.Where(x => !string.IsNullOrWhiteSpace(x.FileUrl) && x.FileType.StartsWith("image"))
.ToList();
}
}
}

View File

@@ -0,0 +1,34 @@
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
using NadekoBot.Extensions;
namespace NadekoBot.Modules.Nsfw.Common
{
public sealed class YandereImageDownloader : ImageDownloader<DapiImageObject>
{
private readonly string _baseUrl;
public YandereImageDownloader(HttpClient http)
: base(Booru.Yandere, http)
{
_baseUrl = "https://yande.re";
}
public override async Task<List<DapiImageObject>> DownloadImagesAsync(string[] tags, int page, bool isExplicit = false, CancellationToken cancel = default)
{
var tagString = ImageDownloaderHelper.GetTagString(tags, isExplicit);
var uri = $"{_baseUrl}/post.json?limit=200&tags={tagString}&page={page}";
var imageObjects = await _http.GetFromJsonAsync<DapiImageObject[]>(uri, _serializerOptions, cancel)
.ConfigureAwait(false);
if (imageObjects is null)
return new();
return imageObjects
.Where(x => x.FileUrl is not null)
.ToList();
}
}
}

View File

@@ -0,0 +1,27 @@
namespace NadekoBot.Modules.Nsfw.Common
{
public class E621Object : IImageData
{
public class FileData
{
public string Url { get; set; }
}
public class TagData
{
public string[] General { get; set; }
}
public class ScoreData
{
public int Total { get; set; }
}
public FileData File { get; set; }
public TagData Tags { get; set; }
public ScoreData Score { get; set; }
public ImageData ToCachedImageData(Booru type)
=> new(File.Url, Booru.E621, Tags.General, Score.Total.ToString());
}
}

View File

@@ -0,0 +1,7 @@
namespace NadekoBot.Modules.Nsfw.Common
{
public interface IImageData
{
ImageData ToCachedImageData(Booru type);
}
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace NadekoBot.Modules.Nsfw.Common
{
public class ImageData : IComparable<ImageData>
{
public Booru SearchType { get; }
public string FileUrl { get; }
public HashSet<string> Tags { get; }
public string Rating { get; }
public ImageData(string url, Booru type, string[] tags, string rating)
{
if (type == Booru.Danbooru && !Uri.IsWellFormedUriString(url, UriKind.Absolute))
{
this.FileUrl = "https://danbooru.donmai.us" + url;
}
else
{
this.FileUrl = url.StartsWith("http", StringComparison.InvariantCulture) ? url : "https:" + url;
}
this.SearchType = type;
this.FileUrl = url;
this.Tags = tags.ToHashSet();
this.Rating = rating;
}
public override string ToString()
{
return FileUrl;
}
public override int GetHashCode() => FileUrl.GetHashCode();
public override bool Equals(object obj)
=> obj is ImageData ico && ico.FileUrl == this.FileUrl;
public int CompareTo(ImageData other)
=> string.Compare(FileUrl, other.FileUrl, StringComparison.InvariantCulture);
}
}

View File

@@ -0,0 +1,19 @@
namespace NadekoBot.Modules.Nsfw.Common
{
public class Rule34Object : IImageData
{
public string Image { get; init; }
public string Directory { get; init; }
public string Tags { get; init; }
public int Score { get; init; }
public ImageData ToCachedImageData(Booru type)
{
return new ImageData(
$"https://img.rule34.xxx//images/{Directory}/{Image}",
Booru.Rule34,
Tags.Split(' '),
Score.ToString());
}
}
}

View File

@@ -0,0 +1,15 @@
namespace NadekoBot.Modules.Nsfw.Common
{
public class SafebooruElement : IImageData
{
public string Directory { get; set; }
public string Image { get; set; }
public string FileUrl => $"https://safebooru.org/images/{Directory}/{Image}";
public string Rating { get; set; }
public string Tags { get; set; }
public ImageData ToCachedImageData(Booru type)
=> new ImageData(FileUrl, Booru.Safebooru, this.Tags.Split(' '), Rating);
}
}

View File

@@ -0,0 +1,27 @@
using System.Linq;
using System.Text.Json.Serialization;
namespace NadekoBot.Modules.Nsfw.Common
{
public class SankakuImageObject : IImageData
{
public class Tag
{
public string Name { get; set; }
}
[JsonPropertyName("file_url")]
public string FileUrl { get; set; }
[JsonPropertyName("file_type")]
public string FileType { get; set; }
public Tag[] Tags { get; set; }
[JsonPropertyName("total_score")]
public int Score { get; set; }
public ImageData ToCachedImageData(Nsfw.Common.Booru type)
=> new(FileUrl, Nsfw.Common.Booru.Sankaku, Tags.Select(x => x.Name).ToArray(), Score.ToString());
}
}

View File

@@ -1,3 +0,0 @@
namespace NadekoBot.Modules.NSFW.Exceptions
{
}

View File

@@ -0,0 +1,31 @@
using System.Collections.Concurrent;
using System.Threading;
using System.Threading.Tasks;
using NadekoBot.Modules.Nsfw.Common;
using NadekoBot.Modules.Searches.Common;
namespace NadekoBot.Modules.Nsfw
{
public interface ISearchImagesService
{
Task<UrlReply> Gelbooru(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> Danbooru(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> Konachan(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> Yandere(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> Rule34(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> E621(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> DerpiBooru(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> Sankaku(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> SafeBooru(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> Hentai(ulong? guildId, bool forceExplicit, string[] tags);
Task<UrlReply> Boobs();
ValueTask<bool> ToggleBlacklistTag(ulong guildId, string tag);
ValueTask<string[]> GetBlacklistedTags(ulong guildId);
Task<UrlReply> Butts();
Task<Gallery> GetNhentaiByIdAsync(uint id);
Task<Gallery> GetNhentaiBySearchAsync(string search);
ConcurrentDictionary<ulong, Timer> AutoHentaiTimers { get; }
ConcurrentDictionary<ulong, Timer> AutoBoobTimers { get; }
ConcurrentDictionary<ulong, Timer> AutoButtTimers { get; }
}
}

View File

@@ -5,68 +5,28 @@ using NadekoBot.Common.Attributes;
using NadekoBot.Common.Collections;
using NadekoBot.Extensions;
using NadekoBot.Modules.Searches.Common;
using NadekoBot.Modules.Searches.Services;
using Newtonsoft.Json.Linq;
using System;
using System.Linq;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using CommandLine;
using Serilog;
namespace NadekoBot.Modules.NSFW
namespace NadekoBot.Modules.Nsfw
{
// thanks to halitalf for adding autoboob and autobutt features :D
public class NSFW : NadekoModule<SearchesService>
[NoPublicBot]
public class NSFW : NadekoModule<ISearchImagesService>
{
private static readonly ConcurrentHashSet<ulong> _hentaiBombBlacklist = new ConcurrentHashSet<ulong>();
private readonly IHttpClientFactory _httpFactory;
private readonly NadekoRandom _rng;
public NSFW(IHttpClientFactory factory)
{
_httpFactory = factory;
}
private async Task InternalHentai(IMessageChannel channel, string tag)
{
// create a random number generator
var rng = new NadekoRandom();
// get all of the DAPI search types, except first 3
// which are safebooru (not nsfw), and 2 furry ones 🤢
var listOfProviders = Enum.GetValues(typeof(DapiSearchType))
.Cast<DapiSearchType>()
.Skip(3)
.ToList();
// now try to get an image, if it fails return an error,
// keep trying for each provider until one of them is successful, or until
// we run out of providers. If we run out, then return an error
ImageCacherObject img;
do
{
// random index of the providers
var num = rng.Next(0, listOfProviders.Count);
// get the type
var type = listOfProviders[num];
// remove it
listOfProviders.RemoveAt(num);
// get the image
img = await _service.DapiSearch(tag, type, ctx.Guild?.Id, true).ConfigureAwait(false);
// if i can't find the image, ran out of providers, or tag is blacklisted
// return the error
if (img is null && !listOfProviders.Any())
{
await ReplyErrorLocalizedAsync(strs.no_results).ConfigureAwait(false);
return;
}
} while (img is null);
await channel.EmbedAsync(_eb.Create().WithOkColor()
.WithImageUrl(img.FileUrl)
.WithDescription($"[{GetText(strs.tag)}: {tag}]({img})"))
.ConfigureAwait(false);
_rng = new NadekoRandom();
}
private async Task InternalBoobs()
@@ -76,8 +36,11 @@ namespace NadekoBot.Modules.NSFW
JToken obj;
using (var http = _httpFactory.CreateClient())
{
obj = JArray.Parse(await http.GetStringAsync($"http://api.oboobs.ru/boobs/{new NadekoRandom().Next(0, 10330)}").ConfigureAwait(false))[0];
obj = JArray.Parse(await http
.GetStringAsync($"http://api.oboobs.ru/boobs/{new NadekoRandom().Next(0, 10330)}")
.ConfigureAwait(false))[0];
}
await ctx.Channel.SendMessageAsync($"http://media.oboobs.ru/{obj["preview"]}").ConfigureAwait(false);
}
catch (Exception ex)
@@ -85,6 +48,7 @@ namespace NadekoBot.Modules.NSFW
await SendErrorAsync(ex.Message).ConfigureAwait(false);
}
}
private async Task InternalButts(IMessageChannel Channel)
{
try
@@ -92,8 +56,11 @@ namespace NadekoBot.Modules.NSFW
JToken obj;
using (var http = _httpFactory.CreateClient())
{
obj = JArray.Parse(await http.GetStringAsync($"http://api.obutts.ru/butts/{new NadekoRandom().Next(0, 4335)}").ConfigureAwait(false))[0];
obj = JArray.Parse(await http
.GetStringAsync($"http://api.obutts.ru/butts/{new NadekoRandom().Next(0, 4335)}")
.ConfigureAwait(false))[0];
}
await Channel.SendMessageAsync($"http://media.obutts.ru/{obj["preview"]}").ConfigureAwait(false);
}
catch (Exception ex)
@@ -102,12 +69,11 @@ namespace NadekoBot.Modules.NSFW
}
}
#if !GLOBAL_NADEKO
[NadekoCommand, Aliases]
[RequireNsfw]
[RequireContext(ContextType.Guild)]
[UserPerm(ChannelPerm.ManageMessages)]
public async Task AutoHentai(int interval = 0, string tags = null)
public async Task AutoHentai(int interval = 0, [Leftover] string tags = null)
{
Timer t;
@@ -123,16 +89,18 @@ namespace NadekoBot.Modules.NSFW
if (interval < 20)
return;
var tagsArr = tags?.Split('|');
t = new Timer(async (state) =>
{
try
{
if (tagsArr is null || tagsArr.Length == 0)
await InternalHentai(ctx.Channel, null).ConfigureAwait(false);
if (tags is null || tags.Length == 0)
await InternalDapiCommand(null, true, _service.Hentai).ConfigureAwait(false);
else
await InternalHentai(ctx.Channel, tagsArr[new NadekoRandom().Next(0, tagsArr.Length)]).ConfigureAwait(false);
{
var groups = tags.Split('|');
var group = groups[_rng.Next(0, groups.Length)];
await InternalDapiCommand(group.Split(' '), true, _service.Hentai).ConfigureAwait(false);
}
}
catch
{
@@ -148,7 +116,7 @@ namespace NadekoBot.Modules.NSFW
await ReplyConfirmLocalizedAsync(strs.autohentai_started(
interval,
string.Join(", ", tagsArr)));
string.Join(", ", tags)));
}
[NadekoCommand, Aliases]
@@ -231,25 +199,24 @@ namespace NadekoBot.Modules.NSFW
await ReplyConfirmLocalizedAsync(strs.started(interval));
}
#endif
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Hentai([Leftover] string tag = null) =>
InternalHentai(ctx.Channel, tag);
public Task Hentai(params string[] tags)
=> InternalDapiCommand(tags, true, _service.Hentai);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public async Task HentaiBomb([Leftover] string tag = null)
public async Task HentaiBomb(params string[] tags)
{
if (!_hentaiBombBlacklist.Add(ctx.Guild?.Id ?? ctx.User.Id))
return;
try
{
var images = await Task.WhenAll(_service.DapiSearch(tag, DapiSearchType.Gelbooru, ctx.Guild?.Id, true),
_service.DapiSearch(tag, DapiSearchType.Danbooru, ctx.Guild?.Id, true),
_service.DapiSearch(tag, DapiSearchType.Konachan, ctx.Guild?.Id, true),
_service.DapiSearch(tag, DapiSearchType.Yandere, ctx.Guild?.Id, true)).ConfigureAwait(false);
var images = await Task.WhenAll(_service.Yandere(ctx.Guild?.Id, true, tags),
_service.Danbooru(ctx.Guild?.Id, true, tags),
_service.Konachan(ctx.Guild?.Id, true, tags),
_service.Gelbooru(ctx.Guild?.Id, true, tags));
var linksEnum = images?.Where(l => l != null).ToArray();
if (images is null || !linksEnum.Any())
@@ -258,7 +225,8 @@ namespace NadekoBot.Modules.NSFW
return;
}
await ctx.Channel.SendMessageAsync(string.Join("\n\n", linksEnum.Select(x => x.FileUrl))).ConfigureAwait(false);
await ctx.Channel.SendMessageAsync(string.Join("\n\n", linksEnum.Select(x => x.Url)))
.ConfigureAwait(false);
}
finally
{
@@ -268,43 +236,48 @@ namespace NadekoBot.Modules.NSFW
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Yandere([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Yandere, false);
public Task Yandere(params string[] tags)
=> InternalDapiCommand(tags, false, _service.Yandere);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Konachan([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Konachan, false);
public Task Konachan(params string[] tags)
=> InternalDapiCommand(tags, false, _service.Konachan);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Sankaku([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Sankaku, false);
public Task Sankaku(params string[] tags)
=> InternalDapiCommand(tags, false, _service.Sankaku);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task E621([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.E621, false);
public Task E621(params string[] tags)
=> InternalDapiCommand(tags, false, _service.E621);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Rule34([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Rule34, false);
public Task Rule34(params string[] tags)
=> InternalDapiCommand(tags, false, _service.Rule34);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Danbooru([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Danbooru, false);
public Task Danbooru(params string[] tags)
=> InternalDapiCommand(tags, false, _service.Danbooru);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Gelbooru([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Gelbooru, false);
public Task Gelbooru(params string[] tags)
=> InternalDapiCommand(tags, false, _service.Gelbooru);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Derpibooru([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Derpibooru, false);
public Task Derpibooru(params string[] tags)
=> InternalDapiCommand(tags, false, _service.DerpiBooru);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
public Task Safebooru(params string[] tags)
=> InternalDapiCommand(tags, false, _service.SafeBooru);
[NadekoCommand, Aliases]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
@@ -315,8 +288,11 @@ namespace NadekoBot.Modules.NSFW
JToken obj;
using (var http = _httpFactory.CreateClient())
{
obj = JArray.Parse(await http.GetStringAsync($"http://api.oboobs.ru/boobs/{new NadekoRandom().Next(0, 12000)}").ConfigureAwait(false))[0];
obj = JArray.Parse(await http
.GetStringAsync($"http://api.oboobs.ru/boobs/{new NadekoRandom().Next(0, 12000)}")
.ConfigureAwait(false))[0];
}
await ctx.Channel.SendMessageAsync($"http://media.oboobs.ru/{obj["preview"]}").ConfigureAwait(false);
}
catch (Exception ex)
@@ -334,8 +310,11 @@ namespace NadekoBot.Modules.NSFW
JToken obj;
using (var http = _httpFactory.CreateClient())
{
obj = JArray.Parse(await http.GetStringAsync($"http://api.obutts.ru/butts/{new NadekoRandom().Next(0, 6100)}").ConfigureAwait(false))[0];
obj = JArray.Parse(await http
.GetStringAsync($"http://api.obutts.ru/butts/{new NadekoRandom().Next(0, 6100)}")
.ConfigureAwait(false))[0];
}
await ctx.Channel.SendMessageAsync($"http://media.obutts.ru/{obj["preview"]}").ConfigureAwait(false);
}
catch (Exception ex)
@@ -351,7 +330,7 @@ namespace NadekoBot.Modules.NSFW
{
if (string.IsNullOrWhiteSpace(tag))
{
var blTags = _service.GetBlacklistedTags(ctx.Guild.Id);
var blTags = await _service.GetBlacklistedTags(ctx.Guild.Id);
await SendConfirmAsync(GetText(strs.blacklisted_tag_list),
blTags.Any()
? string.Join(", ", blTags)
@@ -360,7 +339,7 @@ namespace NadekoBot.Modules.NSFW
else
{
tag = tag.Trim().ToLowerInvariant();
var added = _service.ToggleBlacklistedTag(ctx.Guild.Id, tag);
var added = await _service.ToggleBlacklistTag(ctx.Guild.Id, tag);
if (added)
await ReplyPendingLocalizedAsync(strs.blacklisted_tag_add(tag));
@@ -369,15 +348,6 @@ namespace NadekoBot.Modules.NSFW
}
}
[NadekoCommand, Aliases]
[RequireContext(ContextType.Guild)]
[OwnerOnly]
public Task NsfwClearCache()
{
_service.ClearCache();
return ctx.OkAsync();
}
[NadekoCommand, Aliases]
[RequireContext(ContextType.Guild)]
[RequireNsfw(Group = "nsfw_or_dm"), RequireContext(ContextType.DM, Group = "nsfw_or_dm")]
@@ -435,27 +405,23 @@ namespace NadekoBot.Modules.NSFW
await ctx.Channel.EmbedAsync(embed);
}
public async Task InternalDapiCommand(string tag, DapiSearchType type, bool forceExplicit)
private async Task InternalDapiCommand(string[] tags,
bool forceExplicit,
Func<ulong?, bool, string[], Task<UrlReply>> func)
{
ImageCacherObject imgObj;
var data = await func(ctx.Guild?.Id, forceExplicit, tags);
imgObj = await _service.DapiSearch(tag, type, ctx.Guild?.Id, forceExplicit).ConfigureAwait(false);
if (imgObj is null)
await ReplyErrorLocalizedAsync(strs.no_results).ConfigureAwait(false);
else
if (data is null || !string.IsNullOrWhiteSpace(data.Error))
{
var embed = _eb.Create().WithOkColor()
.WithDescription($"{ctx.User} [{tag ?? "url"}]({imgObj}) ")
.WithFooter(type.ToString());
if (Uri.IsWellFormedUriString(imgObj.FileUrl, UriKind.Absolute))
embed.WithImageUrl(imgObj.FileUrl);
else
Log.Error($"Image link from {type} is not a proper Url: {imgObj.FileUrl}");
await ctx.Channel.EmbedAsync(embed).ConfigureAwait(false);
}
await ReplyErrorLocalizedAsync(strs.no_results);
return;
}
await ctx.Channel.EmbedAsync(_eb
.Create(ctx)
.WithOkColor()
.WithImageUrl(data.Url)
.WithDescription($"[link]({data.Url})")
.WithFooter($"{data.Rating} ({data.Provider}) | {string.Join(" | ", data.Tags.Where(x => !string.IsNullOrWhiteSpace(x)).Take(5))}"));
}
}
}

View File

@@ -0,0 +1,12 @@
namespace NadekoBot.Modules.Nsfw
{
public interface INsfwService
{
}
public class NsfwService
{
}
}

View File

@@ -0,0 +1,328 @@
using Serilog;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Caching.Memory;
using NadekoBot.Common;
using NadekoBot.Extensions;
using NadekoBot.Services;
namespace NadekoBot.Modules.Nsfw.Common
{
public class SearchImageCacher : INService
{
private readonly IHttpClientFactory _httpFactory;
private readonly SemaphoreSlim _lock = new SemaphoreSlim(1, 1);
private readonly Random _rng;
private static readonly ISet<string> defaultTagBlacklist = new HashSet<string>()
{
"loli",
"lolicon",
"shota",
"shotacon",
"cub"
};
private readonly Dictionary<Booru, object> _typeLocks = new();
private readonly Dictionary<Booru, HashSet<string>> _usedTags = new();
private readonly IMemoryCache _cache;
public SearchImageCacher(IHttpClientFactory httpFactory, IMemoryCache cache)
{
_httpFactory = httpFactory;
_rng = new NadekoRandom();
_cache = cache;
// initialize new cache with empty values
foreach (var type in Enum.GetValues<Booru>())
{
_typeLocks[type] = new();
_usedTags[type] = new();
}
}
private string Key(Booru boory, string tag)
=> $"booru:{boory}__tag:{tag}";
/// <summary>
/// Download images of the specified type, and cache them.
/// </summary>
/// <param name="tags">Required tags</param>
/// <param name="forceExplicit">Whether images will be forced to be explicit</param>
/// <param name="type">Provider type</param>
/// <param name="cancel">Cancellation token</param>
/// <returns>Whether any image is found.</returns>
private async Task<bool> UpdateImagesInternalAsync(string[] tags, bool forceExplicit, Booru type, CancellationToken cancel)
{
var images = await DownloadImagesAsync(tags, forceExplicit, type, cancel).ConfigureAwait(false);
if (images is null || images.Count == 0)
{
// Log.Warning("Got no images for {0}, tags: {1}", type, string.Join(", ", tags));
return false;
}
Log.Information("Updating {0}...", type);
lock (_typeLocks[type])
{
var typeUsedTags = _usedTags[type];
foreach (var tag in tags)
typeUsedTags.Add(tag);
// if user uses no tags for the hentai command and there are no used
// tags atm, just select 50 random tags from downloaded images to seed
if (typeUsedTags.Count == 0)
images.SelectMany(x => x.Tags)
.Distinct()
.Shuffle()
.Take(50)
.ForEach(x => typeUsedTags.Add(x));
foreach (var img in images)
{
// if any of the tags is a tag banned by discord
// do not put that image in the cache
if (defaultTagBlacklist.Overlaps(img.Tags))
continue;
// if image doesn't have a proper absolute uri, skip it
if (!Uri.IsWellFormedUriString(img.FileUrl, UriKind.Absolute))
continue;
// i'm appending current tags because of tag aliasing
// this way, if user uses tag alias, for example 'kissing' -
// both 'kiss' (real tag returned by the image) and 'kissing' will be populated with
// retreived images
foreach (var tag in img.Tags.Concat(tags).Distinct())
{
if (typeUsedTags.Contains(tag))
{
var set = _cache.GetOrCreate<HashSet<ImageData>>(Key(type, tag), e =>
{
e.AbsoluteExpirationRelativeToNow = TimeSpan.FromMinutes(30);
return new();
});
if(set.Count < 100)
set.Add(img);
}
}
}
}
return true;
}
private ImageData QueryLocal(string[] tags, bool forceExplicit, Booru type, HashSet<string> blacklistedTags)
{
var setList = new List<HashSet<ImageData>>();
// ofc make sure no changes are happening while we're getting a random one
lock (_typeLocks[type])
{
// if no tags are provided, get a random tag
if (tags.Length == 0)
{
// get all tags in the cache
if (_usedTags.TryGetValue(type, out var allTags)
&& allTags.Count > 0)
{
tags = new[] {allTags.ToList()[_rng.Next(0, allTags.Count)]};
}
else
{
return null;
}
}
foreach (var tag in tags)
{
// if any tag is missing from cache, that means there is no result
if (_cache.TryGetValue<HashSet<ImageData>>(Key(type, tag), out var set))
setList.Add(set);
else
return null;
}
if (setList.Count == 0)
return null;
List<ImageData> resultList;
// if multiple tags, we need to interesect sets
if (setList.Count > 1)
{
// now that we have sets, interesect them to find eligible items
// make a copy of the 1st set
var resultSet = new HashSet<ImageData>(setList[0]);
// go through all other sets, and
for (var i = 1; i < setList.Count; ++i)
{
// if any of the elements in result set are not present in the current set
// remove it from the result set
resultSet.IntersectWith(setList[i]);
}
resultList = resultSet.ToList();
}
else
{
// if only one tag, use that set
resultList = setList[0].ToList();
}
// return a random one which doesn't have blacklisted tags in it
resultList = resultList.Where(x => !blacklistedTags.Overlaps(x.Tags)).ToList();
// if no items in the set -> not found
if (resultList.Count == 0)
return null;
var toReturn = resultList[_rng.Next(0, resultList.Count)];
// remove from cache
foreach (var tag in tags)
{
if (_cache.TryGetValue<HashSet<ImageData>>(Key(type, tag), out var items))
{
items.Remove(toReturn);
}
}
return toReturn;
}
}
public async Task<ImageData> GetImageNew(string[] tags, bool forceExplicit, Booru type,
HashSet<string> blacklistedTags, CancellationToken cancel)
{
// make sure tags are proper
tags = tags
.Where(x => x is not null)
.Select(tag => tag.ToLowerInvariant().Trim())
.Distinct()
.ToArray();
if (tags.Length > 2 && type == Booru.Danbooru)
tags = tags[..2];
// use both tags banned by discord and tags banned on the server
if (blacklistedTags.Overlaps(tags) || defaultTagBlacklist.Overlaps(tags))
return default;
// query for an image
var image = QueryLocal(tags, forceExplicit, type, blacklistedTags);
if (image is not null)
return image;
var success = false;
try
{
// if image is not found, update the cache and query again
success = await UpdateImagesInternalAsync(tags, forceExplicit, type, cancel).ConfigureAwait(false);
}
catch (HttpRequestException)
{
return default;
}
if (!success)
return default;
image = QueryLocal(tags, forceExplicit, type, blacklistedTags);
return image;
}
private readonly ConcurrentDictionary<(Booru, string), int> maxPages = new();
public async Task<List<ImageData>> DownloadImagesAsync(string[] tags, bool isExplicit, Booru type, CancellationToken cancel)
{
var tagStr = string.Join(' ', tags.OrderByDescending(x => x));
var page = 0;
var attempt = 0;
while (attempt++ <= 10)
{
if (maxPages.TryGetValue((type, tagStr), out var maxPage))
{
if (maxPage == 0)
{
Log.Information("Tag {0} yields no result on {1}, skipping.", tagStr, type);
return new();
}
page = _rng.Next(0, maxPage);
}
else
{
page = _rng.Next(0, 11);
}
var result = await DownloadImagesAsync(tags, isExplicit, type, page, cancel).ConfigureAwait(false);
if (result is null or { Count: 0 })
{
Log.Information("Tag {0}, page {1} has no result on {2}.", string.Join(", ", tags), page, type.ToString());
continue;
}
return result;
}
return new();
}
private IImageDownloader GetImageDownloader(Booru booru, HttpClient http)
=> booru switch
{
Booru.Danbooru => new DanbooruImageDownloader(http),
Booru.Yandere => new YandereImageDownloader(http),
Booru.Konachan => new KonachanImageDownloader(http),
Booru.Safebooru => new SafebooruImageDownloader(http),
Booru.E621 => new E621ImageDownloader(http),
Booru.Derpibooru => new DerpibooruImageDownloader(http),
Booru.Gelbooru => new GelbooruImageDownloader(http),
Booru.Rule34 => new Rule34ImageDownloader(http),
Booru.Sankaku => new SankakuImageDownloader(http),
_ => throw new NotImplementedException($"{booru} downloader not implemented.")
};
private async Task<List<ImageData>> DownloadImagesAsync(string[] tags, bool isExplicit, Booru type, int page, CancellationToken cancel)
{
try
{
Log.Information("Downloading from {0} (page {1})...", type, page);
using var http = _httpFactory.CreateClient();
var downloader = GetImageDownloader(type, http);
var images = await downloader.DownloadImageDataAsync(tags, page, isExplicit, cancel);
if (images.Count == 0)
{
var tagStr = string.Join(' ', tags.OrderByDescending(x => x));
maxPages[(type, tagStr)] = page;
}
return images;
}
catch (OperationCanceledException)
{
throw;
}
catch (Exception ex)
{
Log.Error(ex, "Error downloading an image:\nTags: {0}\nType: {1}\nPage: {2}\nMessage: {3}",
string.Join(", ", tags),
type,
page,
ex.Message);
return new();
}
}
}
}

View File

@@ -0,0 +1,393 @@
using Newtonsoft.Json.Linq;
using Serilog;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using System.Threading;
using LinqToDB;
using NadekoBot.Common;
using NadekoBot.Extensions;
using NadekoBot.Modules.Searches.Common;
using NadekoBot.Services;
using NadekoBot.Services.Database.Models;
using Newtonsoft.Json;
using Booru = NadekoBot.Modules.Nsfw.Common.Booru;
using SearchImageCacher = NadekoBot.Modules.Nsfw.Common.SearchImageCacher;
namespace NadekoBot.Modules.Nsfw
{
public record TagRequest(ulong GuildId, bool ForceExplicit, Booru SearchType, params string[] Tags);
public record UrlReply
{
public string Error { get; init; }
public string Url { get; init; }
public string Rating { get; init; }
public string Provider { get; init; }
public List<string> Tags { get; } = new List<string>();
}
public class SearchImagesService : ISearchImagesService, INService
{
private readonly Random _rng;
private readonly HttpClient _http;
private readonly SearchImageCacher _cache;
private readonly IHttpClientFactory _httpFactory;
private readonly DbService _db;
private ConcurrentDictionary<ulong, HashSet<string>> BlacklistedTags { get; }
public ConcurrentDictionary<ulong, Timer> AutoHentaiTimers { get; } = new ConcurrentDictionary<ulong, Timer>();
public ConcurrentDictionary<ulong, Timer> AutoBoobTimers { get; } = new ConcurrentDictionary<ulong, Timer>();
public ConcurrentDictionary<ulong, Timer> AutoButtTimers { get; } = new ConcurrentDictionary<ulong, Timer>();
public SearchImagesService(DbService db,
IHttpClientFactory http,
SearchImageCacher cacher,
IHttpClientFactory httpFactory)
{
_db = db;
_rng = new NadekoRandom();
_http = http.CreateClient();
_http.AddFakeHeaders();
_cache = cacher;
_httpFactory = httpFactory;
using var uow = db.GetDbContext();
BlacklistedTags = new(
uow.NsfwBlacklistedTags
.AsEnumerable()
.GroupBy(x => x.GuildId)
.ToDictionary(
x => x.Key,
x => new HashSet<string>(x.Select(x => x.Tag))));
}
private Task<UrlReply> GetNsfwImageAsync(ulong? guildId, bool forceExplicit, string[] tags, Booru dapi, CancellationToken cancel = default)
{
return GetNsfwImageAsync(guildId ?? 0, tags ?? Array.Empty<string>(), forceExplicit, dapi, cancel);
}
private bool IsValidTag(string tag) => tag.All(x => x != '+' && x != '?' && x != '/'); // tags mustn't contain + or ? or /
private async Task<UrlReply> GetNsfwImageAsync(
ulong guildId,
string[] tags,
bool forceExplicit,
Booru dapi,
CancellationToken cancel)
{
if (!tags.All(x => IsValidTag(x)))
{
return new UrlReply
{
Error = "One or more tags are invalid.",
Url = ""
};
}
Log.Information("Getting {V} image for Guild: {GuildId}...", dapi.ToString(), guildId);
try
{
BlacklistedTags.TryGetValue(guildId, out var blTags);
if (dapi == Booru.E621) {
for (var i = 0; i < tags.Length; ++i)
if (tags[i] == "yuri")
tags[i] = "female/female";
}
if (dapi == Booru.Derpibooru)
{
for (var i = 0; i < tags.Length; ++i)
if (tags[i] == "yuri")
tags[i] = "lesbian";
}
var result = await _cache.GetImageNew(tags, forceExplicit, dapi, blTags ?? new HashSet<string>(), cancel)
.ConfigureAwait(false);
if (result is null)
{
return new UrlReply
{
Error = "Image not found.",
Url = ""
};
}
var reply = new UrlReply
{
Error = "",
Url = result.FileUrl,
Rating = result.Rating,
Provider = result.SearchType.ToString()
};
reply.Tags.AddRange(result.Tags);
return reply;
}
catch (Exception ex)
{
Log.Error(ex, "Failed getting {Dapi} image: {Message}", dapi, ex.Message);
return new UrlReply
{
Error = ex.Message,
Url = ""
};
}
}
public Task<UrlReply> Gelbooru(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Gelbooru);
public Task<UrlReply> Danbooru(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Danbooru);
public Task<UrlReply> Konachan(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Konachan);
public Task<UrlReply> Yandere(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Yandere);
public Task<UrlReply> Rule34(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Rule34);
public Task<UrlReply> E621(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.E621);
public Task<UrlReply> DerpiBooru(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Derpibooru);
public Task<UrlReply> SafeBooru(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Safebooru);
public Task<UrlReply> Sankaku(ulong? guildId, bool forceExplicit, string[] tags)
=> GetNsfwImageAsync(guildId, forceExplicit, tags, Booru.Sankaku);
public async Task<UrlReply> Hentai(ulong? guildId, bool forceExplicit, string[] tags)
{
var providers = new[] {
Booru.Danbooru,
Booru.Konachan,
Booru.Gelbooru,
Booru.Yandere
};
using var cancelSource = new CancellationTokenSource();
// create a task for each type
var tasks = providers.Select(type => GetNsfwImageAsync(guildId, forceExplicit, tags, type)).ToList();
do
{
// wait for any of the tasks to complete
var task = await Task.WhenAny(tasks);
// get its result
var result = task.GetAwaiter().GetResult();
if(result.Error == "")
{
// if we have a non-error result, cancel other searches and return the result
cancelSource.Cancel();
return result;
}
// if the result is an error, remove that task from the waiting list,
// and wait for another task to complete
tasks.Remove(task);
}
while (tasks.Count > 0); // keep looping as long as there is any task remaining to be attempted
// if we ran out of tasks, that means all tasks failed - return an error
return new UrlReply()
{
Error = "No hentai image found."
};
}
public async Task<UrlReply> Boobs()
{
try
{
JToken obj;
obj = JArray.Parse(await _http.GetStringAsync($"http://api.oboobs.ru/boobs/{_rng.Next(0, 12000)}").ConfigureAwait(false))[0];
return new UrlReply
{
Error = "",
Url = $"http://media.oboobs.ru/{obj["preview"]}",
};
}
catch (Exception ex)
{
Log.Error(ex, "Error retreiving boob image: {Message}", ex.Message);
return new UrlReply
{
Error = ex.Message,
Url = "",
};
}
}
private readonly object taglock = new object();
public ValueTask<bool> ToggleBlacklistTag(ulong guildId, string tag)
{
lock (taglock)
{
tag = tag.Trim().ToLowerInvariant();
var blacklistedTags = BlacklistedTags.GetOrAdd(guildId, new HashSet<string>());
var isAdded = blacklistedTags.Add(tag);
using var uow = _db.GetDbContext();
if (!isAdded)
{
blacklistedTags.Remove(tag);
uow.NsfwBlacklistedTags.DeleteAsync(x => x.GuildId == guildId && x.Tag == tag);
uow.SaveChanges();
}
else
{
uow.NsfwBlacklistedTags.Add(new NsfwBlacklistedTag()
{
Tag = tag,
GuildId = guildId
});
uow.SaveChanges();
}
return new(isAdded);
}
}
public ValueTask<string[]> GetBlacklistedTags(ulong guildId)
{
lock (taglock)
{
if (BlacklistedTags.TryGetValue(guildId, out var tags))
{
return new(tags.ToArray());
}
return new(Array.Empty<string>());
}
}
public async Task<UrlReply> Butts()
{
try
{
JToken obj;
obj = JArray.Parse(await _http.GetStringAsync($"http://api.obutts.ru/butts/{_rng.Next(0, 6100)}"))[0];
return new UrlReply
{
Error = "",
Url = $"http://media.obutts.ru/{obj["preview"]}",
};
}
catch (Exception ex)
{
Log.Error(ex, "Error retreiving butt image: {Message}", ex.Message);
return new UrlReply
{
Error = ex.Message,
Url = "",
};
}
}
#region Nhentai
private string GetNhentaiExtensionInternal(string s)
=> s switch
{
"j" => "jpg",
"p" => "png",
"g" => "gif",
_ => "jpg"
};
private Gallery ModelToGallery(NhentaiApiModel.Gallery model)
{
var thumbnail = $"https://t.nhentai.net/galleries/{model.MediaId}/thumb."
+ GetNhentaiExtensionInternal(model.Images.Thumbnail.T);
var url = $"https://nhentai.net/g/{model.Id}";
return new Gallery(
model.Id.ToString(),
url,
model.Title.English,
model.Title.Pretty,
thumbnail,
model.NumPages,
model.NumFavorites,
model.UploadDate.ToUnixTimestamp().UtcDateTime,
model.Tags.Map(x => new Tag()
{
Name = x.Name,
Url = "https://nhentai.com/" + x.Url
}));
}
private async Task<NhentaiApiModel.Gallery> GetNhentaiByIdInternalAsync(uint id)
{
using var http = _httpFactory.CreateClient();
try
{
var res = await http.GetStringAsync("https://nhentai.net/api/gallery/" + id);
return JsonConvert.DeserializeObject<NhentaiApiModel.Gallery>(res);
}
catch (HttpRequestException)
{
Log.Warning("Nhentai with id {NhentaiId} not found", id);
return null;
}
}
private async Task<NhentaiApiModel.Gallery[]> SearchNhentaiInternalAsync(string search)
{
using var http = _httpFactory.CreateClient();
try
{
var res = await http.GetStringAsync("https://nhentai.net/api/galleries/search?query=" + search);
return JsonConvert.DeserializeObject<NhentaiApiModel.SearchResult>(res).Result;
}
catch (HttpRequestException)
{
Log.Warning("Nhentai with search {NhentaiSearch} not found", search);
return null;
}
}
public async Task<Gallery> GetNhentaiByIdAsync(uint id)
{
var model = await GetNhentaiByIdInternalAsync(id);
return ModelToGallery(model);
}
private static readonly string[] _bannedTags =
{
"loli",
"lolicon",
"shota",
"shotacon",
"cub"
};
public async Task<Gallery> GetNhentaiBySearchAsync(string search)
{
var models = await SearchNhentaiInternalAsync(search);
models = models.Where(x => !x.Tags.Any(t => _bannedTags.Contains(t.Name))).ToArray();
if (models.Length == 0)
return null;
return ModelToGallery(models[_rng.Next(0, models.Length)]);
}
#endregion
}
}

View File

@@ -1,18 +1,19 @@
using System;
using System.Collections.Generic;
using NadekoBot.Modules.Nsfw.Common;
namespace NadekoBot.Modules.Searches.Common
{
public class ImageCacherObject : IComparable<ImageCacherObject>
{
public DapiSearchType SearchType { get; }
public Booru SearchType { get; }
public string FileUrl { get; }
public HashSet<string> Tags { get; }
public string Rating { get; }
public ImageCacherObject(DapiImageObject obj, DapiSearchType type)
public ImageCacherObject(DapiImageObject obj, Booru type)
{
if (type == DapiSearchType.Danbooru && !Uri.IsWellFormedUriString(obj.FileUrl, UriKind.Absolute))
if (type == Booru.Danbooru && !Uri.IsWellFormedUriString(obj.FileUrl, UriKind.Absolute))
{
this.FileUrl = "https://danbooru.donmai.us" + obj.FileUrl;
}
@@ -25,7 +26,7 @@ namespace NadekoBot.Modules.Searches.Common
this.Tags = new HashSet<string>((obj.Tags ?? obj.TagString).Split(' '));
}
public ImageCacherObject(string url, DapiSearchType type, string tags, string rating)
public ImageCacherObject(string url, Booru type, string tags, string rating)
{
this.SearchType = type;
this.FileUrl = url;

View File

@@ -1,312 +0,0 @@
using NadekoBot.Extensions;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using System.Xml;
using Serilog;
namespace NadekoBot.Modules.Searches.Common
{
// note: this is not the code that public nadeko is using
public class SearchImageCacher
{
private readonly SemaphoreSlim _lock = new SemaphoreSlim(1, 1);
private readonly IHttpClientFactory _httpFactory;
private readonly Random _rng;
private readonly SortedSet<ImageCacherObject> _cache;
private static readonly List<string> defaultTagBlacklist = new List<string>() {
"loli",
"lolicon",
"shota"
};
public SearchImageCacher(IHttpClientFactory http)
{
_httpFactory = http;
_rng = new Random();
_cache = new SortedSet<ImageCacherObject>();
}
public async Task<ImageCacherObject> GetImage(string[] tags, bool forceExplicit, DapiSearchType type,
HashSet<string> blacklistedTags = null)
{
tags = tags.Select(tag => tag?.ToLowerInvariant()).ToArray();
blacklistedTags = blacklistedTags ?? new HashSet<string>();
foreach (var item in defaultTagBlacklist)
{
blacklistedTags.Add(item);
}
blacklistedTags = blacklistedTags.Select(t => t.ToLowerInvariant()).ToHashSet();
if (tags.Any(x => blacklistedTags.Contains(x)))
{
throw new Exception("One of the specified tags is blacklisted");
}
if (type == DapiSearchType.E621)
tags = tags.Select(tag => tag?.Replace("yuri", "female/female", StringComparison.InvariantCulture))
.ToArray();
await _lock.WaitAsync().ConfigureAwait(false);
try
{
ImageCacherObject[] imgs;
if (tags.Any())
{
imgs = _cache.Where(x => x.Tags.IsSupersetOf(tags) && x.SearchType == type && (!forceExplicit || x.Rating == "e")).ToArray();
}
else
{
imgs = _cache.Where(x => x.SearchType == type).ToArray();
}
imgs = imgs.Where(x => x.Tags.All(t => !blacklistedTags.Contains(t.ToLowerInvariant()))).ToArray();
ImageCacherObject img;
if (imgs.Length == 0)
img = null;
else
img = imgs[_rng.Next(imgs.Length)];
if (img != null)
{
_cache.Remove(img);
return img;
}
else
{
var images = await DownloadImagesAsync(tags, forceExplicit, type).ConfigureAwait(false);
images = images
.Where(x => x.Tags.All(t => !blacklistedTags.Contains(t.ToLowerInvariant())))
.ToArray();
if (images.Length == 0)
return null;
var toReturn = images[_rng.Next(images.Length)];
foreach (var dledImg in images)
{
if (dledImg != toReturn)
_cache.Add(dledImg);
}
return toReturn;
}
}
finally
{
_lock.Release();
}
}
public async Task<ImageCacherObject[]> DownloadImagesAsync(string[] tags, bool isExplicit, DapiSearchType type)
{
isExplicit = type == DapiSearchType.Safebooru
? false
: isExplicit;
var tag = "";
tag += string.Join('+', tags.Select(x => x.Replace(" ", "_", StringComparison.InvariantCulture).ToLowerInvariant()));
if (isExplicit)
tag = "rating%3Aexplicit+" + tag;
var website = "";
switch (type)
{
case DapiSearchType.Safebooru:
website = $"https://safebooru.org/index.php?page=dapi&s=post&q=index&limit=1000&tags={tag}&json=1";
break;
case DapiSearchType.E621:
website = $"https://e621.net/posts.json?limit=200&tags={tag}";
break;
case DapiSearchType.Danbooru:
website = $"http://danbooru.donmai.us/posts.json?limit=100&tags={tag}";
break;
case DapiSearchType.Gelbooru:
website = $"http://gelbooru.com/index.php?page=dapi&s=post&q=index&limit=100&tags={tag}";
break;
case DapiSearchType.Rule34:
website = $"https://rule34.xxx/index.php?page=dapi&s=post&q=index&limit=100&tags={tag}";
break;
case DapiSearchType.Konachan:
website = $"https://konachan.com/post.json?s=post&q=index&limit=100&tags={tag}";
break;
case DapiSearchType.Yandere:
website = $"https://yande.re/post.json?limit=100&tags={tag}";
break;
case DapiSearchType.Derpibooru:
tag = string.IsNullOrWhiteSpace(tag) ? "safe" : tag;
website = $"https://www.derpibooru.org/api/v1/json/search/images?q={tag?.Replace('+', ',')}&per_page=49";
break;
case DapiSearchType.Sankaku:
website = $"https://capi-v2.sankakucomplex.com/posts?tags={tag}&limit=50";
break;
}
try
{
using (var _http = _httpFactory.CreateClient())
{
_http.AddFakeHeaders();
if (type == DapiSearchType.Konachan || type == DapiSearchType.Yandere || type == DapiSearchType.Danbooru)
{
var data = await _http.GetStringAsync(website).ConfigureAwait(false);
return JsonConvert.DeserializeObject<DapiImageObject[]>(data)
.Where(x => x.FileUrl != null)
.Select(x => new ImageCacherObject(x, type))
.ToArray();
}
if (type == DapiSearchType.Sankaku)
{
var data = await _http.GetStringAsync(website).ConfigureAwait(false);
return JsonConvert.DeserializeObject<SankakuImageObject[]>(data)
.Where(x => !string.IsNullOrWhiteSpace(x.FileUrl) && x.FileType.StartsWith("image"))
.Select(x => new ImageCacherObject(
x.FileUrl,
DapiSearchType.Sankaku,
x.Tags.Select(x => x.Name).JoinWith(','),
x.Score))
.ToArray();
}
if (type == DapiSearchType.E621)
{
var data = await _http.GetStringAsync(website).ConfigureAwait(false);
return JsonConvert.DeserializeAnonymousType(data, new { posts = new List<E621Object>() })
.posts
.Where(x => !string.IsNullOrWhiteSpace(x.File?.Url))
.Select(x => new ImageCacherObject(x.File.Url,
type, string.Join(' ', x.Tags.General), x.Score.Total))
.ToArray();
}
if (type == DapiSearchType.Derpibooru)
{
var data = await _http.GetStringAsync(website).ConfigureAwait(false);
return JsonConvert.DeserializeObject<DerpiContainer>(data)
.Images
.Where(x => !string.IsNullOrWhiteSpace(x.ViewUrl))
.Select(x => new ImageCacherObject(x.ViewUrl,
type, string.Join("\n", x.Tags), x.Score))
.ToArray();
}
if (type == DapiSearchType.Safebooru)
{
var data = await _http.GetStringAsync(website).ConfigureAwait(false);
return JsonConvert.DeserializeObject<SafebooruElement[]>(data)
.Select(x => new ImageCacherObject(x.FileUrl, type, x.Tags, x.Rating))
.ToArray();
}
return (await LoadXmlAsync(website, type).ConfigureAwait(false)).ToArray();
}
}
catch (Exception ex)
{
Log.Warning(ex, "Error downloading an image: {Message}", ex.Message);
return Array.Empty<ImageCacherObject>();
}
}
private async Task<ImageCacherObject[]> LoadXmlAsync(string website, DapiSearchType type)
{
var list = new List<ImageCacherObject>();
using (var http = _httpFactory.CreateClient())
using (var stream = await http.GetStreamAsync(website).ConfigureAwait(false))
using (var reader = XmlReader.Create(stream, new XmlReaderSettings()
{
Async = true,
}))
{
while (await reader.ReadAsync().ConfigureAwait(false))
{
if (reader.NodeType == XmlNodeType.Element &&
reader.Name == "post")
{
list.Add(new ImageCacherObject(new DapiImageObject()
{
FileUrl = reader["file_url"],
Tags = reader["tags"],
Rating = reader["rating"] ?? "e"
}, type));
}
}
}
return list.ToArray();
}
public void Clear()
{
_cache.Clear();
}
}
public class DapiImageObject
{
[JsonProperty("File_Url")]
public string FileUrl { get; set; }
public string Tags { get; set; }
[JsonProperty("Tag_String")]
public string TagString { get; set; }
public string Rating { get; set; }
}
public class DerpiContainer
{
public DerpiImageObject[] Images { get; set; }
}
public class DerpiImageObject
{
[JsonProperty("view_url")]
public string ViewUrl { get; set; }
public string[] Tags { get; set; }
public string Score { get; set; }
}
public class SankakuImageObject
{
public class Tag
{
public string Name { get; set; }
}
[JsonProperty("file_url")]
public string FileUrl { get; set; }
[JsonProperty("file_type")]
public string FileType { get; set; }
public Tag[] Tags { get; set; }
[JsonProperty("total_score")]
public string Score { get; set; }
}
public enum DapiSearchType
{
Safebooru,
E621,
Derpibooru,
Gelbooru,
Konachan,
Rule34,
Yandere,
Danbooru,
Sankaku,
}
public class SafebooruElement
{
public string Directory { get; set; }
public string Image { get; set; }
public string FileUrl => $"https://safebooru.org/images/{Directory}/{Image}";
public string Rating { get; set; }
public string Tags { get; set; }
}
}

View File

@@ -22,6 +22,7 @@ using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using NadekoBot.Modules.Administration.Services;
using NadekoBot.Modules.Nsfw.Common;
using Serilog;
using Configuration = AngleSharp.Configuration;
@@ -589,10 +590,6 @@ namespace NadekoBot.Modules.Searches
await SendConfirmAsync($"https://images.google.com/searchbyimage?image_url={imageLink}").ConfigureAwait(false);
}
[NadekoCommand, Aliases]
public Task Safebooru([Leftover] string tag = null)
=> InternalDapiCommand(tag, DapiSearchType.Safebooru);
[NadekoCommand, Aliases]
public async Task Wiki([Leftover] string query = null)
{
@@ -760,21 +757,6 @@ namespace NadekoBot.Modules.Searches
await ctx.Channel.SendMessageAsync($"https://store.steampowered.com/app/{appId}").ConfigureAwait(false);
}
public async Task InternalDapiCommand(string tag, DapiSearchType type)
{
tag = tag?.Trim() ?? "";
var imgObj = await _service.DapiSearch(tag, type, ctx.Guild?.Id).ConfigureAwait(false);
if (imgObj is null)
await SendErrorAsync(ctx.User.Mention + " " + GetText(strs.no_results)).ConfigureAwait(false);
else
await ctx.Channel.EmbedAsync(_eb.Create().WithOkColor()
.WithDescription($"{ctx.User.Mention} [{tag ?? "url"}]({imgObj.FileUrl})")
.WithImageUrl(imgObj.FileUrl)
.WithFooter(type.ToString())).ConfigureAwait(false);
}
public async Task<bool> ValidateQuery(IMessageChannel ch, string query)
{
if (!string.IsNullOrWhiteSpace(query))

View File

@@ -50,14 +50,6 @@ namespace NadekoBot.Modules.Searches.Services
public List<WoWJoke> WowJokes { get; } = new List<WoWJoke>();
public List<MagicItem> MagicItems { get; } = new List<MagicItem>();
private readonly ConcurrentDictionary<ulong, SearchImageCacher> _imageCacher = new ConcurrentDictionary<ulong, SearchImageCacher>();
public ConcurrentDictionary<ulong, Timer> AutoHentaiTimers { get; } = new ConcurrentDictionary<ulong, Timer>();
public ConcurrentDictionary<ulong, Timer> AutoBoobTimers { get; } = new ConcurrentDictionary<ulong, Timer>();
public ConcurrentDictionary<ulong, Timer> AutoButtTimers { get; } = new ConcurrentDictionary<ulong, Timer>();
private readonly ConcurrentDictionary<ulong, HashSet<string>> _blacklistedTags = new ConcurrentDictionary<ulong, HashSet<string>>();
private readonly List<string> _yomamaJokes;
public SearchesService(DiscordSocketClient client, IGoogleApiService google,
@@ -75,11 +67,6 @@ namespace NadekoBot.Modules.Searches.Services
_eb = eb;
_rng = new NadekoRandom();
_blacklistedTags = new ConcurrentDictionary<ulong, HashSet<string>>(
bot.AllGuildConfigs.ToDictionary(
x => x.GuildId,
x => new HashSet<string>(x.NsfwBlacklistedTags.Select(y => y.Tag))));
//translate commands
_client.MessageReceived += (msg) =>
{
@@ -366,80 +353,6 @@ namespace NadekoBot.Modules.Searches.Services
return (await _google.Translate(text, from, to).ConfigureAwait(false)).SanitizeMentions(true);
}
public Task<ImageCacherObject> DapiSearch(string tag, DapiSearchType type, ulong? guild, bool isExplicit = false)
{
tag = tag ?? "";
if (string.IsNullOrWhiteSpace(tag)
&& (tag.Contains("loli") || tag.Contains("shota")))
{
return null;
}
var tags = tag
.Split('+')
.Select(x => x.ToLowerInvariant().Replace(' ', '_'))
.ToArray();
if (guild.HasValue)
{
var blacklistedTags = GetBlacklistedTags(guild.Value);
var cacher = _imageCacher.GetOrAdd(guild.Value, (key) => new SearchImageCacher(_httpFactory));
return cacher.GetImage(tags, isExplicit, type, blacklistedTags);
}
else
{
var cacher = _imageCacher.GetOrAdd(guild ?? 0, (key) => new SearchImageCacher(_httpFactory));
return cacher.GetImage(tags, isExplicit, type);
}
}
public HashSet<string> GetBlacklistedTags(ulong guildId)
{
if (_blacklistedTags.TryGetValue(guildId, out var tags))
return tags;
return new HashSet<string>();
}
public bool ToggleBlacklistedTag(ulong guildId, string tag)
{
var tagObj = new NsfwBlacklitedTag
{
Tag = tag
};
bool added;
using (var uow = _db.GetDbContext())
{
var gc = uow.GuildConfigsForId(guildId, set => set.Include(y => y.NsfwBlacklistedTags));
if (gc.NsfwBlacklistedTags.Add(tagObj))
added = true;
else
{
gc.NsfwBlacklistedTags.Remove(tagObj);
var toRemove = gc.NsfwBlacklistedTags.FirstOrDefault(x => x.Equals(tagObj));
if (toRemove != null)
uow.Remove(toRemove);
added = false;
}
var newTags = new HashSet<string>(gc.NsfwBlacklistedTags.Select(x => x.Tag));
_blacklistedTags.AddOrUpdate(guildId, newTags, delegate { return newTags; });
uow.SaveChanges();
}
return added;
}
public void ClearCache()
{
foreach (var c in _imageCacher)
{
c.Value?.Clear();
}
}
private readonly object yomamaLock = new object();
private int yomamaJokeIndex = 0;
public Task<string> GetYomamaJoke()
@@ -838,95 +751,5 @@ namespace NadekoBot.Modules.Searches.Services
fullQueryLink,
"0");
}
#region Nhentai
private string GetNhentaiExtensionInternal(string s)
=> s switch
{
"j" => "jpg",
"p" => "png",
"g" => "gif",
_ => "jpg"
};
private Gallery ModelToGallery(NhentaiApiModel.Gallery model)
{
var thumbnail = $"https://t.nhentai.net/galleries/{model.MediaId}/thumb."
+ GetNhentaiExtensionInternal(model.Images.Thumbnail.T);
var url = $"https://nhentai.net/g/{model.Id}";
return new Gallery(
model.Id.ToString(),
url,
model.Title.English,
model.Title.Pretty,
thumbnail,
model.NumPages,
model.NumFavorites,
model.UploadDate.ToUnixTimestamp().UtcDateTime,
model.Tags.Map(x => new Tag()
{
Name = x.Name,
Url = "https://nhentai.com/" + x.Url
}));
}
public async Task<NhentaiApiModel.Gallery> GetNhentaiByIdInternalAsync(uint id)
{
using var http = _httpFactory.CreateClient();
try
{
var res = await http.GetStringAsync("https://nhentai.net/api/gallery/" + id);
return JsonConvert.DeserializeObject<NhentaiApiModel.Gallery>(res);
}
catch (HttpRequestException)
{
Log.Warning("Nhentai with id {NhentaiId} not found", id);
return null;
}
}
private async Task<NhentaiApiModel.Gallery[]> SearchNhentaiInternalAsync(string search)
{
using var http = _httpFactory.CreateClient();
try
{
var res = await http.GetStringAsync("https://nhentai.net/api/galleries/search?query=" + search);
return JsonConvert.DeserializeObject<NhentaiApiModel.SearchResult>(res).Result;
}
catch (HttpRequestException)
{
Log.Warning("Nhentai with search {NhentaiSearch} not found", search);
return null;
}
}
public async Task<Gallery> GetNhentaiByIdAsync(uint id)
{
var model = await GetNhentaiByIdInternalAsync(id);
return ModelToGallery(model);
}
private static readonly string[] _bannedTags =
{
"loli",
"lolicon",
"shota",
"shotacon",
"cub"
};
public async Task<Gallery> GetNhentaiBySearchAsync(string search)
{
var models = await SearchNhentaiInternalAsync(search);
models = models.Where(x => !x.Tags.Any(t => _bannedTags.Contains(t.Name))).ToArray();
if (models.Length == 0)
return null;
return ModelToGallery(models[_rng.Next(0, models.Length)]);
}
#endregion
}
}

View File

@@ -1132,8 +1132,6 @@ clubbans:
clubleaderboard:
- clublb
- clubs
nsfwclearcache:
- nsfwcc
clubadmin:
- clubadmin
autoboobs:

View File

@@ -1069,10 +1069,6 @@ revimg:
desc: "Returns a Google reverse image search for an image from a link."
args:
- "Image link"
safebooru:
desc: "Shows a random image from safebooru with a given tag. Tag is optional but preferred. (multiple tags are appended with +)"
args:
- "yuri+kissing"
wiki:
desc: "Gives you back a wikipedia link"
args:
@@ -1087,33 +1083,55 @@ avatar:
args:
- "@Someone"
hentai:
desc: "Shows a hentai image from a random website (gelbooru, danbooru, konachan or yandere) with a given tag. Tag is optional but preferred. Only 1 tag allowed."
desc: "Shows a hentai image from a random website (gelbooru, danbooru, konachan or yandere) with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags. Only 1 tag allowed."
args:
- "yuri"
nhentai:
desc: "Shows basic information about a hentai with the specified id, or a valid nhentai search query."
args:
- "273426"
- "cute girl"
autohentai:
desc: "Posts a hentai every X seconds with a random tag from the provided tags. Use `|` to separate tag groups. Random group will be chosen every time the image is sent. Max 2 tags per group. 20 seconds minimum. Provide no parameters to disable."
args:
- "30 yuri kissing|tail long_hair"
- ""
hentaibomb:
desc: "Shows a total 5 images (from gelbooru, danbooru, konachan and yandere). Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri"
yandere:
desc: "Shows a random image from yandere with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri kissing"
danbooru:
desc: "Shows a random hentai image from danbooru with a given tag. Tag is optional but preferred. (multiple tags are appended with +)"
desc: "Shows a random hentai image from danbooru with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri+kissing"
- "yuri kissing"
derpibooru:
desc: "Shows a random image from derpibooru with a given tag. Tag is optional but preferred."
desc: "Shows a random image from derpibooru with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri+kissing"
- "yuri kissing"
gelbooru:
desc: "Shows a random hentai image from gelbooru with a given tag. Tag is optional but preferred. (multiple tags are appended with +)"
desc: "Shows a random hentai image from gelbooru with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri+kissing"
- "yuri kissing"
sankaku:
desc: "Shows a random hentai image from chan.sankakucomplex.com with a given tag. Tag is optional but preferred. (multiple tags are appended with +)"
desc: "Shows a random hentai image from chan.sankakucomplex.com with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri+kiss"
- "yuri kiss"
rule34:
desc: "Shows a random image from rule34.xx with a given tag. Tag is optional but preferred. (multiple tags are appended with +)"
desc: "Shows a random image from rule34.xx with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri+kissing"
- "yuri kissing"
e621:
desc: "Shows a random hentai image from e621.net with a given tag. Tag is optional but preferred. (multiple tags are appended with +)"
desc: "Shows a random hentai image from e621.net with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri+kissing"
- "yuri kissing"
safebooru:
desc: "Shows a random image from safebooru with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri kissing"
boobs:
desc: "Real adult content."
args:
@@ -1229,7 +1247,7 @@ voicemute:
- "1h30m @Someone"
- "1h @Someone silence"
konachan:
desc: "Shows a random hentai image from konachan with a given tag. Tag is optional but preferred."
desc: "Shows a random hentai image from konachan with a given tag. Tag(s) are optional but preferred. Maximum is usually 2 tags."
args:
- "yuri"
muterole:
@@ -1299,10 +1317,6 @@ listservers:
desc: "Lists servers the bot is on with some basic info. 15 per page."
args:
- "3"
hentaibomb:
desc: "Shows a total 5 images (from gelbooru, danbooru, konachan and yandere). Tag is optional but preferred."
args:
- "yuri"
cleverbot:
desc: "Toggles cleverbot session. When enabled, the bot will reply to messages starting with bot mention in the server. Custom reactions starting with %bot.mention% won't work if cleverbot is enabled."
args:
@@ -1316,10 +1330,6 @@ wikia:
args:
- "mtg Vigilance"
- "mlp Dashy"
yandere:
desc: "Shows a random image from yandere with a given tag. Tag is optional but preferred. (multiple tags are appended with +)"
args:
- "tag1+tag2"
magicthegathering:
desc: "Searches for a Magic The Gathering card."
args:
@@ -1368,11 +1378,6 @@ activity:
desc: "Checks for spammers."
args:
- ""
autohentai:
desc: "Posts a hentai every X seconds with a random tag from the provided tags. Use `|` to separate tag groups. Random group will be chosen every time the image is sent. Use `+` for multiple tags (max 2 per group). 20 seconds minimum. Provide no parameters to disable."
args:
- "30 yuri+kissing|tail+long_hair"
- ""
setstatus:
desc: "Sets the bot's status. (Online/Idle/Dnd/Invisible)"
args:
@@ -1899,10 +1904,6 @@ clublb:
desc: "Shows club rankings on the specified page."
args:
- "2"
nsfwcc:
desc: "Clears nsfw cache."
args:
- ""
clubadmin:
desc: "Assigns (or unassigns) staff role to the member of the club. Admins can ban, kick and accept applications."
args:
@@ -2118,11 +2119,6 @@ purgeuser:
desc: "Purge user from the database completely. This includes currency, xp, clubs that user owns, waifu info"
args:
- "@Oblivion"
nhentai:
desc: "Shows basic information about a hentai with the specified id, or a valid nhentai search query."
args:
- "273426"
- "cute girl"
imageonlychannel:
desc: |-
Toggles whether the channel only allows images.