556 lines
15 KiB
C++
556 lines
15 KiB
C++
/*
|
|
* <one line to give the library's name and an idea of what it does.>
|
|
* Copyright 2013 Luca Beltrame <lbeltrame@kde.org>
|
|
*
|
|
* This program is free software; you can redistribute it and/or
|
|
* modify it under the terms of the GNU General Public License as
|
|
* published by the Free Software Foundation; either version 2 of
|
|
* the License or (at your option) version 3 or any later version
|
|
* accepted by the membership of KDE e.V. (or its successor approved
|
|
* by the membership of KDE e.V.), which shall act as a proxy
|
|
* defined in Section 14 of version 3 of the license.
|
|
*
|
|
* This program is distributed in the hope that it will be useful,
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
* GNU General Public License for more details.
|
|
*
|
|
* You should have received a copy of the GNU General Public License
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
*
|
|
*/
|
|
|
|
// Qt
|
|
|
|
#include <QtCore/QDebug>
|
|
#include <qjson/parser.h>
|
|
|
|
// KDE
|
|
|
|
#include <KIO/Job>
|
|
#include <KIO/Scheduler>
|
|
|
|
// Own
|
|
|
|
#include "danbooruservice.h"
|
|
#include "danboorupost.h"
|
|
#include "danboorupool.h"
|
|
#include "danboorutag.h"
|
|
#include "utils.h"
|
|
|
|
namespace Danbooru {
|
|
|
|
using KIO::StoredTransferJob;
|
|
|
|
const QString DanbooruService::POST_URL ="post/index.json" ;
|
|
const QString DanbooruService::TAG_URL = "tag/index.xml";
|
|
const QString DanbooruService::POOL_URL = "pool/index.json";
|
|
const QString DanbooruService::ARTIST_URL = "artist/index.json";
|
|
const QString DanbooruService::POOL_DATA_URL = "pool/show.xml";
|
|
const QString DanbooruService::RELATED_TAG_URL = "tag/related.json";
|
|
|
|
DanbooruService::DanbooruService(KUrl& boardUrl, QString username,
|
|
QString password,
|
|
QObject* parent):
|
|
QObject(parent),
|
|
m_url(boardUrl),
|
|
m_username(username),
|
|
m_password(password),
|
|
m_maxRating(Safe),
|
|
m_currentPosts(0)
|
|
{
|
|
}
|
|
|
|
DanbooruService::~DanbooruService()
|
|
{
|
|
|
|
}
|
|
|
|
|
|
void DanbooruService::getPostList(int page, QStringList tags, int limit)
|
|
{
|
|
|
|
// We can't fetch more than 100 items, API limitation
|
|
|
|
limit = limit > 100 ? 100: limit;
|
|
|
|
QMap<QString, QString> parameters;
|
|
|
|
parameters.insert("limit", QString::number(limit));
|
|
parameters.insert("page", QString::number(page));
|
|
|
|
KUrl danbooruUrl = requestUrl(m_url, POST_URL, m_username,
|
|
m_password, parameters, tags);
|
|
|
|
qDebug() << "Final constructed URL" << danbooruUrl.url();
|
|
|
|
KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload,
|
|
KIO::HideProgressInfo);
|
|
|
|
// This job can use JSON data
|
|
job->setProperty("needsXML", false);
|
|
|
|
connect(job, SIGNAL(result(KJob*)), this,
|
|
SLOT(processPostList(KJob*)));
|
|
|
|
}
|
|
|
|
void DanbooruService::getTagList(int limit, QString name)
|
|
{
|
|
QMap<QString, QString> parameters;
|
|
parameters.insert("limit", QString::number(limit));
|
|
|
|
if (!name.isEmpty()) {
|
|
parameters.insert("name", name);
|
|
}
|
|
parameters.insert("order", "date");
|
|
|
|
KUrl danbooruUrl = requestUrl(m_url, TAG_URL, m_username, m_password,
|
|
parameters);
|
|
qDebug() << "Final constructed URL" << danbooruUrl.url();
|
|
|
|
KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload,
|
|
KIO::HideProgressInfo);
|
|
|
|
connect(job, SIGNAL(result(KJob*)), this, SLOT(processTagList(KJob*)));
|
|
}
|
|
|
|
void DanbooruService::getPool(int poolId, int page)
|
|
{
|
|
|
|
QMap<QString, QString> parameters;
|
|
|
|
parameters.insert("id", QString::number(poolId));
|
|
|
|
if (page > 1) {
|
|
parameters.insert("page", QString::number(page));
|
|
}
|
|
|
|
KUrl danbooruUrl = requestUrl(m_url, POOL_DATA_URL, m_username,
|
|
m_password, parameters);
|
|
|
|
qDebug() << "Final constructed URL" << danbooruUrl.url();
|
|
|
|
|
|
KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload,
|
|
KIO::HideProgressInfo);
|
|
|
|
//HACK: Most Danbooru implementations don't provide valid data on
|
|
// pools via JSON, hence we set XML and parse the XML data.
|
|
|
|
job->setProperty("needsXML", true);
|
|
|
|
connect(job, SIGNAL(result(KJob*)), this,
|
|
SLOT(processPostList(KJob*)));
|
|
|
|
}
|
|
|
|
void DanbooruService::getPoolList(int page)
|
|
{
|
|
|
|
KUrl danbooruUrl;
|
|
|
|
if (page == 0) {
|
|
danbooruUrl = requestUrl(m_url, POOL_URL, m_username, m_password);
|
|
} else {
|
|
QMap<QString, QString> map;
|
|
map.insert("page", QString::number(page));
|
|
|
|
danbooruUrl = requestUrl(m_url, POOL_URL, m_username,
|
|
m_password, map);
|
|
}
|
|
|
|
qDebug() << "Final constructed URL" << danbooruUrl.url();
|
|
|
|
KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload,
|
|
KIO::HideProgressInfo);
|
|
// This job can use JSON data
|
|
job->setProperty("needsXML", false);
|
|
|
|
connect(job, SIGNAL(result(KJob*)), this,
|
|
SLOT(processPoolList(KJob*)));
|
|
|
|
}
|
|
|
|
void DanbooruService::getRelatedTags(const QStringList& tags,
|
|
Danbooru::TagType tagType)
|
|
{
|
|
|
|
qDebug() << "We start with these" << tags;
|
|
|
|
QString type;
|
|
switch(tagType) {
|
|
case Danbooru::General:
|
|
type = "general";
|
|
break;
|
|
case Danbooru::Artist:
|
|
type = "artist";
|
|
break;
|
|
case Danbooru::Copyright:
|
|
type = "copyright";
|
|
break;
|
|
case Danbooru::Character:
|
|
type = "character";
|
|
break;
|
|
case Danbooru::Unknown:
|
|
type = "unknown";
|
|
break;
|
|
}
|
|
|
|
QMap<QString, QString> parameters;
|
|
parameters.insert("type", type);
|
|
|
|
KUrl danbooruUrl = requestUrl(m_url, RELATED_TAG_URL, m_username,
|
|
m_password, parameters, tags );
|
|
|
|
qDebug() << "Final constructed URL" << danbooruUrl.url();
|
|
|
|
StoredTransferJob* job = KIO::storedGet(
|
|
danbooruUrl, KIO::NoReload,
|
|
KIO::HideProgressInfo
|
|
);
|
|
|
|
connect(job, SIGNAL(result(KJob*)), this,
|
|
SLOT(processRelatedTagList(KJob*)));
|
|
|
|
}
|
|
|
|
// Getters / setters
|
|
|
|
void DanbooruService::setBlacklist(const QSet< QString >& blacklist)
|
|
{
|
|
|
|
if (!blacklist.isEmpty()) {
|
|
m_blacklist = blacklist;
|
|
}
|
|
|
|
}
|
|
|
|
|
|
const QSet< QString > DanbooruService::blacklist() const
|
|
{
|
|
return m_blacklist;
|
|
}
|
|
|
|
const QStringList DanbooruService::allowedRatings() const
|
|
{
|
|
QStringList ratings;
|
|
|
|
if (m_maxRating.testFlag(Safe)) {
|
|
ratings.append("Safe");
|
|
}
|
|
|
|
if (m_maxRating.testFlag(Questionable)) {
|
|
ratings.append("Questionable");
|
|
}
|
|
|
|
if (m_maxRating.testFlag(Explicit)) {
|
|
ratings.append("Explicit");
|
|
}
|
|
|
|
return ratings;
|
|
|
|
}
|
|
|
|
void DanbooruService::setMaximumAllowedRating(const Danbooru::Ratings& rating)
|
|
{
|
|
Ratings flags;
|
|
|
|
switch (rating) {
|
|
case Safe:
|
|
flags = Safe;
|
|
break;
|
|
case Questionable:
|
|
flags = Safe | Questionable;
|
|
break;
|
|
case Explicit:
|
|
flags = Safe | Questionable | Explicit;
|
|
break;
|
|
}
|
|
|
|
m_maxRating = flags;
|
|
|
|
}
|
|
|
|
const Ratings DanbooruService::maximumAllowedRating() const
|
|
{
|
|
|
|
return m_maxRating;
|
|
|
|
}
|
|
|
|
// Slots
|
|
|
|
void DanbooruService::processPostList(KJob* job)
|
|
{
|
|
|
|
qDebug() << "Got post data OK";
|
|
|
|
if (job->error()) {
|
|
Q_EMIT(downloadError(job->errorString()));
|
|
}
|
|
|
|
StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
|
|
|
|
if (jobResult == 0) {
|
|
Q_EMIT(downloadError(QString("Internal error")));
|
|
return;
|
|
|
|
}
|
|
|
|
QByteArray data = jobResult->data();
|
|
|
|
qDebug() << jobResult->mimetype();
|
|
|
|
bool ok;
|
|
|
|
bool needsXML = job->property("needsXML").toBool();
|
|
|
|
QList<QVariant> postList;
|
|
|
|
if (needsXML) {
|
|
// Special cases for pools
|
|
postList = parseDanbooruResult(data, QString("post"), &ok);
|
|
} else {
|
|
postList = parseDanbooruResult(data, &ok).toList();
|
|
}
|
|
|
|
if (!ok) {
|
|
Q_EMIT(downloadError(QString("Unable to decode data")));
|
|
return;
|
|
}
|
|
|
|
// How many posts do we have to fetch?
|
|
|
|
m_currentPosts = postList.length();
|
|
|
|
Q_FOREACH(const QVariant& element, postList) {
|
|
|
|
QVariantMap map = element.toMap();
|
|
|
|
DanbooruPost* post = new DanbooruPost(map);
|
|
|
|
// qDebug() << "Got rating" << post->rating();
|
|
|
|
// First check, for rating
|
|
|
|
if (post->rating() > m_maxRating) {
|
|
// qDebug() << "Skipping " << post->fileUrl();
|
|
// qDebug() << "Rating was " << post->rating();
|
|
m_currentPosts--;
|
|
delete post;
|
|
continue;
|
|
}
|
|
|
|
// second check, blacklist
|
|
// We make a copy due to the fact that otherwise intersect()
|
|
// will change the set in place
|
|
|
|
QSet<QString> temp = m_blacklist;
|
|
|
|
temp = temp.intersect(post->tags());
|
|
|
|
if (!temp.isEmpty()) {
|
|
// Blacklisted tags are present, do not use this post
|
|
m_currentPosts--;
|
|
delete post;
|
|
continue;
|
|
}
|
|
|
|
StoredTransferJob* pixmapJob = KIO::storedGet(post->thumbnailUrl(),
|
|
KIO::NoReload, KIO::HideProgressInfo);
|
|
|
|
// We don't want to overload the servers, so set some rational
|
|
// priority
|
|
|
|
KIO::Scheduler::setJobPriority(static_cast<KIO::SimpleJob*>(job),
|
|
1);
|
|
|
|
QVariant variant;
|
|
|
|
variant.setValue(post);
|
|
|
|
pixmapJob->setProperty("danbooruPost", variant);
|
|
connect(pixmapJob, SIGNAL(result(KJob*)), this,
|
|
SLOT(downloadThumbnail(KJob*)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
void DanbooruService::processTagList(KJob* job)
|
|
{
|
|
qDebug() << "Got tag data OK";
|
|
|
|
if (job->error()) {
|
|
Q_EMIT(downloadError(job->errorString()));
|
|
return;
|
|
}
|
|
|
|
StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
|
|
|
|
if (jobResult == 0) {
|
|
Q_EMIT(downloadError(QString("Internal error")));
|
|
return;
|
|
}
|
|
|
|
QByteArray data = jobResult->data();
|
|
|
|
bool ok;
|
|
|
|
// Most Danbooru implementations return tags in wrong order when
|
|
// using JSON, so we have to fall back to XML
|
|
QList<QVariant> tagList = parseDanbooruResult(data, "tag", &ok);
|
|
|
|
if (!ok) {
|
|
Q_EMIT(downloadError(QString("Unable to decode data")));
|
|
return;
|
|
}
|
|
|
|
Q_FOREACH(const QVariant& element, tagList) {
|
|
QVariantMap map = element.toMap();
|
|
DanbooruTag* tag = new DanbooruTag(map);
|
|
Q_EMIT(tagDownloaded(tag));
|
|
}
|
|
}
|
|
|
|
|
|
void DanbooruService::processRelatedTagList(KJob* job)
|
|
{
|
|
qDebug() << "Got related tag data OK";
|
|
|
|
if (job->error()) {
|
|
Q_EMIT(downloadError(job->errorString()));
|
|
return;
|
|
}
|
|
|
|
StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
|
|
|
|
if (jobResult == 0) {
|
|
Q_EMIT(downloadError(QString("Internal error")));
|
|
return;
|
|
|
|
}
|
|
|
|
QByteArray data = jobResult->data();
|
|
bool ok;
|
|
|
|
QVariantMap tagList = parseDanbooruResult(data, &ok).toMap();
|
|
|
|
if (!ok) {
|
|
Q_EMIT(downloadError(QString("Unable to decode data")));
|
|
return;
|
|
}
|
|
|
|
QVariantMap::const_iterator iter;
|
|
|
|
// The service returns a list of key-related tag list pair,
|
|
// we iterate through them and remove the empty (not found) ones, then
|
|
// we call getTagList. Unfortunately Danbooru doesn't have a method to
|
|
// fetch all tags in batch, so this is done one by one.
|
|
|
|
for (iter=tagList.constBegin(); iter!=tagList.constEnd(); ++iter) {
|
|
|
|
QList<QVariant> tags = iter.value().toList();
|
|
|
|
if (tags.isEmpty()) {
|
|
continue;
|
|
}
|
|
|
|
Q_FOREACH(const QVariant& tag, tags) {
|
|
// We get the first element in the list, the second is
|
|
// the ID which is useless (no API methods in Danbooru)
|
|
QString tagName = tag.toList()[0].toString();
|
|
getTagList(1, tagName);
|
|
}
|
|
|
|
}
|
|
}
|
|
|
|
void DanbooruService::processPoolList(KJob* job)
|
|
{
|
|
|
|
qDebug() << "Got pool data OK";
|
|
|
|
if (job->error()) {
|
|
Q_EMIT(downloadError(job->errorString()));
|
|
}
|
|
|
|
StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
|
|
|
|
if (jobResult == 0) {
|
|
Q_EMIT(downloadError(QString("Internal error")));
|
|
return;
|
|
|
|
}
|
|
|
|
QByteArray data = jobResult->data();
|
|
|
|
bool ok;
|
|
|
|
QList<QVariant> poolList = parseDanbooruResult(data, &ok).toList();
|
|
|
|
if (!ok) {
|
|
Q_EMIT(downloadError(QString("Unable to decode data")));
|
|
return;
|
|
}
|
|
|
|
Q_FOREACH(const QVariant& element, poolList) {
|
|
QVariantMap map = element.toMap();
|
|
|
|
DanbooruPool* pool = new DanbooruPool(map);
|
|
Q_EMIT(poolDownloaded(pool));
|
|
}
|
|
|
|
qDebug() << "Pool download finished!";
|
|
Q_EMIT(poolDownloadFinished());
|
|
|
|
}
|
|
|
|
void DanbooruService::downloadAllTags(KJob* job)
|
|
{
|
|
Q_UNUSED(job)
|
|
}
|
|
|
|
void DanbooruService::downloadThumbnail(KJob* job)
|
|
{
|
|
|
|
if (job->error()) {
|
|
Q_EMIT(downloadError(job->errorString()));
|
|
}
|
|
|
|
QVariant postData = job->property("danbooruPost");
|
|
|
|
DanbooruPost* post = postData.value<DanbooruPost*>();
|
|
QPixmap* pix = new QPixmap();
|
|
|
|
StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
|
|
|
|
if (jobResult == 0) {
|
|
Q_EMIT(downloadError(QString("Internal error")));
|
|
return;
|
|
|
|
}
|
|
|
|
bool ok = pix->loadFromData(jobResult->data());
|
|
|
|
if (!ok) {
|
|
Q_EMIT(downloadError(QString("Pixmap data could not be loaded")));
|
|
return;
|
|
}
|
|
|
|
post->setPixmap(pix);
|
|
|
|
m_currentPosts--; // One less post to do
|
|
|
|
qDebug() << "Current posts remaining" << m_currentPosts;
|
|
Q_EMIT(postDownloaded(post));
|
|
|
|
if (m_currentPosts == 0) {
|
|
qDebug() << "Post download finished";
|
|
Q_EMIT(postDownloadFinished());
|
|
}
|
|
|
|
}
|
|
|
|
} // namespace Danbooru
|