Possible fix for #1523, im not testing that though :D

This commit is contained in:
Master Kwoth 2017-09-10 23:44:24 +02:00
parent 531633b018
commit 4adf85a9eb
3 changed files with 58 additions and 47 deletions

View File

@ -4,7 +4,6 @@ using Newtonsoft.Json.Linq;
using System;
using System.Linq;
using System.Threading.Tasks;
using System.Net.Http;
using NadekoBot.Extensions;
using System.Threading;
using System.Collections.Concurrent;
@ -15,7 +14,6 @@ using NadekoBot.Modules.Searches.Common;
using NadekoBot.Modules.Searches.Services;
using NadekoBot.Modules.NSFW.Exceptions;
//todo static httpclient
namespace NadekoBot.Modules.NSFW
{
public class NSFW : NadekoTopLevelModule<SearchesService>
@ -160,10 +158,7 @@ namespace NadekoBot.Modules.NSFW
try
{
JToken obj;
using (var http = new HttpClient())
{
obj = JArray.Parse(await http.GetStringAsync($"http://api.oboobs.ru/boobs/{new NadekoRandom().Next(0, 10330)}").ConfigureAwait(false))[0];
}
obj = JArray.Parse(await _service.Http.GetStringAsync($"http://api.oboobs.ru/boobs/{new NadekoRandom().Next(0, 10330)}").ConfigureAwait(false))[0];
await Context.Channel.SendMessageAsync($"http://media.oboobs.ru/{obj["preview"]}").ConfigureAwait(false);
}
catch (Exception ex)
@ -178,10 +173,7 @@ namespace NadekoBot.Modules.NSFW
try
{
JToken obj;
using (var http = new HttpClient())
{
obj = JArray.Parse(await http.GetStringAsync($"http://api.obutts.ru/butts/{new NadekoRandom().Next(0, 4335)}").ConfigureAwait(false))[0];
}
obj = JArray.Parse(await _service.Http.GetStringAsync($"http://api.obutts.ru/butts/{new NadekoRandom().Next(0, 4335)}").ConfigureAwait(false))[0];
await Context.Channel.SendMessageAsync($"http://media.obutts.ru/{obj["preview"]}").ConfigureAwait(false);
}
catch (Exception ex)

View File

@ -20,18 +20,25 @@ namespace NadekoBot.Modules.Searches.Common
private readonly SortedSet<ImageCacherObject> _cache;
private readonly Logger _log;
private readonly HttpClient _http;
public SearchImageCacher()
{
_http = new HttpClient();
_http.AddFakeHeaders();
_log = LogManager.GetCurrentClassLogger();
_rng = new NadekoRandom();
_cache = new SortedSet<ImageCacherObject>();
}
public async Task<ImageCacherObject> GetImage(string tag, bool forceExplicit, DapiSearchType type)
public async Task<ImageCacherObject> GetImage(string tag, bool forceExplicit, DapiSearchType type,
HashSet<string> blacklistedTags = null)
{
tag = tag?.ToLowerInvariant();
blacklistedTags = blacklistedTags ?? new HashSet<string>();
if (type == DapiSearchType.E621)
tag = tag?.Replace("yuri", "female/female");
@ -63,6 +70,9 @@ namespace NadekoBot.Modules.Searches.Common
else
{
var images = await DownloadImages(tag, forceExplicit, type).ConfigureAwait(false);
images = images
.Where(x => x.Tags.All(t => !blacklistedTags.Contains(t)))
.ToArray();
if (images.Length == 0)
return null;
var toReturn = images[_rng.Next(images.Length)];
@ -116,48 +126,40 @@ namespace NadekoBot.Modules.Searches.Common
website = $"https://yande.re/post.json?limit=100&tags={tag}";
break;
}
using (var http = new HttpClient())
{
http.AddFakeHeaders();
if (type == DapiSearchType.Konachan || type == DapiSearchType.Yandere ||
type == DapiSearchType.E621 || type == DapiSearchType.Danbooru)
{
var data = await http.GetStringAsync(website).ConfigureAwait(false);
return JsonConvert.DeserializeObject<DapiImageObject[]>(data)
.Where(x => x.File_Url != null)
.Select(x => new ImageCacherObject(x, type))
.ToArray();
}
return (await LoadXmlAsync(website, type)).ToArray();
if (type == DapiSearchType.Konachan || type == DapiSearchType.Yandere ||
type == DapiSearchType.E621 || type == DapiSearchType.Danbooru)
{
var data = await _http.GetStringAsync(website).ConfigureAwait(false);
return JsonConvert.DeserializeObject<DapiImageObject[]>(data)
.Where(x => x.File_Url != null)
.Select(x => new ImageCacherObject(x, type))
.ToArray();
}
return (await LoadXmlAsync(website, type)).ToArray();
}
private async Task<ImageCacherObject[]> LoadXmlAsync(string website, DapiSearchType type)
{
var list = new List<ImageCacherObject>();
using (var http = new HttpClient())
using (var reader = XmlReader.Create(await _http.GetStreamAsync(website), new XmlReaderSettings()
{
using (var reader = XmlReader.Create(await http.GetStreamAsync(website), new XmlReaderSettings()
Async = true,
}))
{
while (await reader.ReadAsync())
{
Async = true,
}))
{
while (await reader.ReadAsync())
if (reader.NodeType == XmlNodeType.Element &&
reader.Name == "post")
{
if (reader.NodeType == XmlNodeType.Element &&
reader.Name == "post")
list.Add(new ImageCacherObject(new DapiImageObject()
{
list.Add(new ImageCacherObject(new DapiImageObject()
{
File_Url = reader["file_url"],
Tags = reader["tags"],
Rating = reader["rating"] ?? "e"
File_Url = reader["file_url"],
Tags = reader["tags"],
Rating = reader["rating"] ?? "e"
}, type));
}
}, type));
}
}
}

View File

@ -14,11 +14,14 @@ using NadekoBot.Services.Database.Models;
using System.Linq;
using Microsoft.EntityFrameworkCore;
using NadekoBot.Modules.NSFW.Exceptions;
using System.Net.Http;
namespace NadekoBot.Modules.Searches.Services
{
public class SearchesService : INService
{
public HttpClient Http { get; }
private readonly DiscordSocketClient _client;
private readonly IGoogleApiService _google;
private readonly DbService _db;
@ -41,6 +44,8 @@ namespace NadekoBot.Modules.Searches.Services
public SearchesService(DiscordSocketClient client, IGoogleApiService google, DbService db, IEnumerable<GuildConfig> gcs)
{
Http = new HttpClient();
Http.AddFakeHeaders();
_client = client;
_google = google;
_db = db;
@ -128,14 +133,26 @@ namespace NadekoBot.Modules.Searches.Services
public Task<ImageCacherObject> DapiSearch(string tag, DapiSearchType type, ulong? guild, bool isExplicit = false)
{
if (guild.HasValue && GetBlacklistedTags(guild.Value)
.Any(x => tag.ToLowerInvariant().Contains(x)))
if (guild.HasValue)
{
throw new TagBlacklistedException();
var blacklistedTags = GetBlacklistedTags(guild.Value);
if (blacklistedTags
.Any(x => tag.ToLowerInvariant().Contains(x)))
{
throw new TagBlacklistedException();
}
var cacher = _imageCacher.GetOrAdd(guild.Value, (key) => new SearchImageCacher());
return cacher.GetImage(tag, isExplicit, type, blacklistedTags);
}
else
{
var cacher = _imageCacher.GetOrAdd(guild ?? 0, (key) => new SearchImageCacher());
return cacher.GetImage(tag, isExplicit, type);
}
var cacher = _imageCacher.GetOrAdd(guild ?? 0, (key) => new SearchImageCacher());
return cacher.GetImage(tag, isExplicit, type);
}
public HashSet<string> GetBlacklistedTags(ulong guildId)