danbooru-client/src/libdanbooru/danbooruservice.cpp
Luca Beltrame f8b59e78b5 Clean up DanbooruService
Remove multi_get for now (broken in kio_http)
Add a default constructor (needed for QML)
Explicitly delete posts
2015-02-05 23:47:29 +01:00

552 lines
14 KiB
C++

/*
* <one line to give the library's name and an idea of what it does.>
* Copyright 2013 Luca Beltrame <lbeltrame@kde.org>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License or (at your option) version 3 or any later version
* accepted by the membership of KDE e.V. (or its successor approved
* by the membership of KDE e.V.), which shall act as a proxy
* defined in Section 14 of version 3 of the license.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
// STL
#include <algorithm>
// Qt
#include <QDebug>
#include <qjson/parser.h>
#include <QDebug>
// KDE
#include <KIO/Job>
#include <KIO/Scheduler>
#include <KImageCache>
#include <KIO/MultiGetJob>
// Own
#include "danbooruservice.h"
#include "danboorupost.h"
#include "danboorupool.h"
#include "danboorutag.h"
#include "utils.h"
namespace Danbooru
{
using KIO::StoredTransferJob;
using KIO::MultiGetJob;
const QString DanbooruService::POST_URL = "post/index.json" ;
const QString DanbooruService::TAG_URL = "tag/index.xml";
const QString DanbooruService::POOL_URL = "pool/index.json";
const QString DanbooruService::ARTIST_URL = "artist/index.json";
const QString DanbooruService::POOL_DATA_URL = "pool/show.xml";
const QString DanbooruService::RELATED_TAG_URL = "tag/related.json";
DanbooruService::DanbooruService(QObject *parent):
QObject(parent),
m_url(QUrl()),
m_username(QString()),
m_password(QString()),
m_maxRating(DanbooruPost::Safe),
m_postsToFetch(0),
m_cache(0){
}
DanbooruService::DanbooruService(QUrl &boardUrl, QString username,
QString password, KImageCache *cache,
QObject *parent):
QObject(parent),
m_url(boardUrl),
m_username(username),
m_password(password),
m_maxRating(Danbooru::DanbooruPost::Safe),
m_postsToFetch(0),
m_cache(cache)
{
}
DanbooruService::~DanbooruService()
{
qDeleteAll(m_posts);
m_posts.clear();
}
void DanbooruService::getPostList(int page, QStringList tags, int limit)
{
// We can't fetch more than 100 items, API limitation
limit = limit > 100 ? 100 : limit;
QMap<QString, QString> parameters;
parameters.insert("limit", QString::number(limit));
parameters.insert("page", QString::number(page));
QUrl danbooruUrl = requestUrl(m_url, POST_URL, m_username,
m_password, parameters, tags);
// qDebug() << "Final constructed post URL" << danbooruUrl.url();
KIO::StoredTransferJob *job = KIO::storedGet(danbooruUrl, KIO::NoReload,
KIO::HideProgressInfo);
// This job can use JSON data
job->setProperty("needsXML", false);
connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPostList);
}
void DanbooruService::getTagList(int limit, QString name)
{
QMap<QString, QString> parameters;
parameters.insert("limit", QString::number(limit));
if (!name.isEmpty()) {
parameters.insert("name", name);
}
parameters.insert("order", "date");
QUrl danbooruUrl = requestUrl(m_url, TAG_URL, m_username, m_password,
parameters);
//qDebug() << "Final constructed tag URL" << danbooruUrl.url();
KIO::StoredTransferJob *job = KIO::storedGet(danbooruUrl, KIO::NoReload,
KIO::HideProgressInfo);
connect(job, &KIO::StoredTransferJob::result, [this](KJob * job) {
if (job->error()) {
Q_EMIT(downloadError(job->errorString()));
return;
}
StoredTransferJob *jobResult = qobject_cast<StoredTransferJob *>(job);
QByteArray data = jobResult->data();
bool ok;
// Most Danbooru implementations return tags in wrong order when
// using JSON, so we have to fall back to XML
QList<QVariant> tagList = parseDanbooruResult(data, "tag", &ok);
if (!ok) {
Q_EMIT(downloadError(QString("Unable to decode data")));
return;
}
for (auto element : tagList) {
QVariantMap map = element.toMap();
DanbooruTag *tag = new DanbooruTag(map);
Q_EMIT(tagDownloaded(tag));
}
}
);
}
void DanbooruService::getPool(int poolId, int page)
{
QMap<QString, QString> parameters;
parameters.insert("id", QString::number(poolId));
if (page > 1) {
parameters.insert("page", QString::number(page));
}
QUrl danbooruUrl = requestUrl(m_url, POOL_DATA_URL, m_username,
m_password, parameters);
//qDebug() << "Final constructed pool URL" << danbooruUrl.url();
KIO::StoredTransferJob *job = KIO::storedGet(danbooruUrl, KIO::NoReload,
KIO::HideProgressInfo);
//HACK: Most Danbooru implementations don't provide valid data on
// pools via JSON, hence we set XML and parse the XML data.
job->setProperty("needsXML", true);
connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPostList);
}
void DanbooruService::getPoolList(int page)
{
QUrl danbooruUrl;
if (page == 0) {
danbooruUrl = requestUrl(m_url, POOL_URL, m_username, m_password);
} else {
QMap<QString, QString> map;
map.insert("page", QString::number(page));
danbooruUrl = requestUrl(m_url, POOL_URL, m_username,
m_password, map);
}
//qDebug() << "Final constructed pool list URL" << danbooruUrl.url();
KIO::StoredTransferJob *job = KIO::storedGet(danbooruUrl, KIO::NoReload,
KIO::HideProgressInfo);
// This job can use JSON data
job->setProperty("needsXML", false);
// connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPoolList);
connect(job, &KIO::StoredTransferJob::result, [this](KJob * job) {
if (job->error()) {
Q_EMIT(downloadError(job->errorString()));
return;
}
StoredTransferJob *jobResult = qobject_cast<StoredTransferJob *>(job);
QByteArray data = jobResult->data();
bool ok;
QList<QVariant> poolList = parseDanbooruResult(data, &ok).toList();
if (!ok) {
Q_EMIT(downloadError(QString("Unable to decode data")));
return;
}
for (auto element : poolList) {
QVariantMap map = element.toMap();
DanbooruPool *pool = new DanbooruPool(map);
Q_EMIT(poolDownloaded(pool));
}
//qDebug() << "Pool download finished!";
Q_EMIT(poolDownloadFinished());
}
);
}
void DanbooruService::getRelatedTags(const QStringList &tags,
DanbooruTag::TagType tagType)
{
QString type;
switch (tagType) {
case DanbooruTag::General:
type = "general";
break;
case DanbooruTag::Artist:
type = "artist";
break;
case DanbooruTag::Copyright:
type = "copyright";
break;
case DanbooruTag::Character:
type = "character";
break;
case DanbooruTag::Unknown:
type = "unknown";
break;
}
QMap<QString, QString> parameters;
parameters.insert("type", type);
QUrl danbooruUrl = requestUrl(m_url, RELATED_TAG_URL, m_username,
m_password, parameters, tags);
//qDebug() << "Final constructed related tag URL" << danbooruUrl.url();
StoredTransferJob *job = KIO::storedGet(
danbooruUrl, KIO::NoReload,
KIO::HideProgressInfo
);
connect(job, &StoredTransferJob::result, [this](KJob * job) {
if (job->error()) {
Q_EMIT(downloadError(job->errorString()));
return;
}
StoredTransferJob *jobResult = qobject_cast<StoredTransferJob *>(job);
QByteArray data = jobResult->data();
bool ok;
QVariantMap tagList = parseDanbooruResult(data, &ok).toMap();
if (!ok) {
Q_EMIT(downloadError(QString("Unable to decode data")));
return;
}
QVariantMap::const_iterator iter;
// The service returns a list of key-related tag list pair,
// we iterate through them and remove the empty (not found) ones, then
// we call getTagList. Unfortunately Danbooru doesn't have a method to
// fetch all tags in batch, so this is done one by one.
for (iter = tagList.constBegin(); iter != tagList.constEnd(); ++iter) {
QList<QVariant> tags = iter.value().toList();
if (tags.isEmpty()) {
continue;
}
for (auto tag : tags) {
// We get the first element in the list, the second is
// the ID which is useless (no API methods in Danbooru)
QString tagName = tag.toList()[0].toString();
getTagList(1, tagName);
}
}
}
);
}
// Getters / setters
void DanbooruService::setBlacklist(const QSet< QString > &blacklist)
{
if (!blacklist.isEmpty()) {
m_blacklist = blacklist;
}
}
const QSet< QString > DanbooruService::blacklist() const
{
return m_blacklist;
}
const QStringList DanbooruService::allowedRatings() const
{
QStringList ratings;
if (m_maxRating.testFlag(DanbooruPost::Safe)) {
ratings.append("Safe");
}
if (m_maxRating.testFlag(DanbooruPost::Questionable)) {
ratings.append("Questionable");
}
if (m_maxRating.testFlag(DanbooruPost::Explicit)) {
ratings.append("Explicit");
}
return ratings;
}
void DanbooruService::setMaximumAllowedRating(DanbooruPost::Rating rating)
{
DanbooruPost::Ratings flags;
switch (rating) {
case DanbooruPost::Safe:
flags = DanbooruPost::Safe;
break;
case DanbooruPost::Questionable:
flags = DanbooruPost::Safe | DanbooruPost::Questionable;
break;
case DanbooruPost::Explicit:
flags = DanbooruPost::Safe | DanbooruPost::Questionable | DanbooruPost::Explicit;
break;
}
m_maxRating = flags;
}
const DanbooruPost::Ratings DanbooruService::maximumAllowedRating() const
{
return m_maxRating;
}
// Slots
void DanbooruService::processPostList(KJob *job)
{
// qDebug() << "Got post data OK";
if (!m_posts.isEmpty()) {
m_posts.clear();
}
if (job->error()) {
Q_EMIT(downloadError(job->errorString()));
}
StoredTransferJob *jobResult = qobject_cast<StoredTransferJob *>(job);
if (jobResult == 0) {
Q_EMIT(downloadError(QString("Internal error")));
return;
}
QByteArray data = jobResult->data();
bool ok;
bool needsXML = job->property("needsXML").toBool();
QList<QVariant> postList;
if (needsXML) {
// Special cases for pools
postList = parseDanbooruResult(data, QString("post"), &ok);
} else {
postList = parseDanbooruResult(data, &ok).toList();
}
if (!ok) {
Q_EMIT(downloadError(QString("Unable to decode data")));
return;
}
// How many posts do we have to fetch?
m_postsToFetch = postList.length();
int postId = 0;
for (auto element: postList) {
QVariantMap map = element.toMap();
DanbooruPost *post = new DanbooruPost(map);
if (isPostBlacklisted(post, m_blacklist, m_maxRating)) {
m_postsToFetch--;
delete post;
continue;
}
m_posts.insert(postId, post);
postId++;
}
for (auto element : postList) {
QVariantMap map = element.toMap();
DanbooruPost *post = new DanbooruPost(map);
// Remove unwanted posts
if (isPostBlacklisted(post, m_blacklist, m_maxRating)) {
m_postsToFetch--;
delete post;
continue;
}
QPixmap pix;
// Usare QHash<id, url> per KIO::multi_get!
if (m_cache->findPixmap(post->thumbnailUrl().url(), &pix)) {
post->setPixmap(pix);
Q_EMIT(postDownloaded(post));
m_postsToFetch--;
if (m_postsToFetch == 0) {
qDebug() << "Post download finished";
Q_EMIT(postDownloadFinished());
}
} else {
StoredTransferJob *pixmapJob = KIO::storedGet(post->thumbnailUrl(),
KIO::NoReload, KIO::HideProgressInfo
);
KIO::Scheduler::setJobPriority(static_cast<KIO::SimpleJob *>(job), 1);
QVariant variant;
variant.setValue(post);
// We don't want to overload the servers, so set some rational
// priority
pixmapJob->setProperty("danbooruPost", variant);
connect(pixmapJob, &StoredTransferJob::result, [post, this, pix](KJob * job) mutable {
if (job->error())
{
Q_EMIT(downloadError(job->errorString()));
return;
}
StoredTransferJob *jobResult = qobject_cast<StoredTransferJob *>(job);
if (!pix.loadFromData(jobResult->data()))
{
Q_EMIT(downloadError(QString("Pixmap data could not be loaded")));
return;
}
post->setPixmap(pix);
if (m_cache)
{
//qDebug() << "Inserting item in cache";
m_cache->insertPixmap(post->thumbnailUrl().url(), pix);
}
m_postsToFetch--; // One less post to do
//qDebug() << "Current posts remaining" << m_currentPosts;
Q_EMIT(postDownloaded(post));
if (m_postsToFetch == 0)
{
qDebug() << "Post download finished";
Q_EMIT(postDownloadFinished());
}
});
}
}
}
void DanbooruService::downloadAllTags(KJob *job)
{
Q_UNUSED(job)
}
} // namespace Danbooru