/* * * Copyright 2013 Luca Beltrame * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License as * published by the Free Software Foundation; either version 2 of * the License or (at your option) version 3 or any later version * accepted by the membership of KDE e.V. (or its successor approved * by the membership of KDE e.V.), which shall act as a proxy * defined in Section 14 of version 3 of the license. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see . * */ // Qt #include #include #include #include // KDE #include #include #include // Own #include "danbooruservice.h" #include "danboorupost.h" #include "danboorupool.h" #include "danboorutag.h" #include "utils.h" namespace Danbooru { using KIO::StoredTransferJob; const QString DanbooruService::POST_URL = "post/index.json" ; const QString DanbooruService::TAG_URL = "tag/index.xml"; const QString DanbooruService::POOL_URL = "pool/index.json"; const QString DanbooruService::ARTIST_URL = "artist/index.json"; const QString DanbooruService::POOL_DATA_URL = "pool/show.xml"; const QString DanbooruService::RELATED_TAG_URL = "tag/related.json"; DanbooruService::DanbooruService(QUrl& boardUrl, QString username, QString password, KImageCache* cache, QObject* parent): QObject(parent), m_url(boardUrl), m_username(username), m_password(password), m_maxRating(Danbooru::DanbooruPost::Safe), m_currentPosts(0), m_cache(cache) { } DanbooruService::~DanbooruService() { } void DanbooruService::getPostList(int page, QStringList tags, int limit) { // We can't fetch more than 100 items, API limitation limit = limit > 100 ? 100 : limit; QMap parameters; parameters.insert("limit", QString::number(limit)); parameters.insert("page", QString::number(page)); QUrl danbooruUrl = requestUrl(m_url, POST_URL, m_username, m_password, parameters, tags); //qDebug() << "Final constructed post URL" << danbooruUrl.url(); KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, KIO::HideProgressInfo); // This job can use JSON data job->setProperty("needsXML", false); connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPostList); } void DanbooruService::getTagList(int limit, QString name) { QMap parameters; parameters.insert("limit", QString::number(limit)); if (!name.isEmpty()) { parameters.insert("name", name); } parameters.insert("order", "date"); QUrl danbooruUrl = requestUrl(m_url, TAG_URL, m_username, m_password, parameters); //qDebug() << "Final constructed tag URL" << danbooruUrl.url(); KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, KIO::HideProgressInfo); connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processTagList); } void DanbooruService::getPool(int poolId, int page) { QMap parameters; parameters.insert("id", QString::number(poolId)); if (page > 1) { parameters.insert("page", QString::number(page)); } QUrl danbooruUrl = requestUrl(m_url, POOL_DATA_URL, m_username, m_password, parameters); //qDebug() << "Final constructed pool URL" << danbooruUrl.url(); KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, KIO::HideProgressInfo); //HACK: Most Danbooru implementations don't provide valid data on // pools via JSON, hence we set XML and parse the XML data. job->setProperty("needsXML", true); connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPostList); } void DanbooruService::getPoolList(int page) { QUrl danbooruUrl; if (page == 0) { danbooruUrl = requestUrl(m_url, POOL_URL, m_username, m_password); } else { QMap map; map.insert("page", QString::number(page)); danbooruUrl = requestUrl(m_url, POOL_URL, m_username, m_password, map); } //qDebug() << "Final constructed pool list URL" << danbooruUrl.url(); KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, KIO::HideProgressInfo); // This job can use JSON data job->setProperty("needsXML", false); connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPoolList); } void DanbooruService::getRelatedTags(const QStringList& tags, DanbooruTag::TagType tagType) { QString type; switch (tagType) { case DanbooruTag::General: type = "general"; break; case DanbooruTag::Artist: type = "artist"; break; case DanbooruTag::Copyright: type = "copyright"; break; case DanbooruTag::Character: type = "character"; break; case DanbooruTag::Unknown: type = "unknown"; break; } QMap parameters; parameters.insert("type", type); QUrl danbooruUrl = requestUrl(m_url, RELATED_TAG_URL, m_username, m_password, parameters, tags); //qDebug() << "Final constructed related tag URL" << danbooruUrl.url(); StoredTransferJob* job = KIO::storedGet( danbooruUrl, KIO::NoReload, KIO::HideProgressInfo ); connect(job, &StoredTransferJob::result, this, &DanbooruService::processRelatedTagList); } // Getters / setters void DanbooruService::setBlacklist(const QSet< QString >& blacklist) { if (!blacklist.isEmpty()) { m_blacklist = blacklist; } } const QSet< QString > DanbooruService::blacklist() const { return m_blacklist; } const QStringList DanbooruService::allowedRatings() const { QStringList ratings; if (m_maxRating.testFlag(Safe)) { ratings.append("Safe"); } if (m_maxRating.testFlag(Questionable)) { ratings.append("Questionable"); } if (m_maxRating.testFlag(Explicit)) { ratings.append("Explicit"); } return ratings; } void DanbooruService::setMaximumAllowedRating(const DanbooruPost::Rating& rating) { DanbooruPost::Rating flags; switch (rating) { case DanbooruPost::Safe: flags = DanbooruPost::Safe; break; case DanbooruPost::Questionable: flags = DanbooruPost::Safe | DanbooruPost::Questionable; break; case DanbooruPost::Explicit: flags = DanbooruPost::Safe | DanbooruPost::Questionable | DanbooruPost::Explicit; break; } m_maxRating = flags; } const Ratings DanbooruService::maximumAllowedRating() const { return m_maxRating; } // Slots void DanbooruService::processPostList(KJob* job) { ////qDebug() << "Got post data OK"; if (job->error()) { Q_EMIT(downloadError(job->errorString())); } StoredTransferJob* jobResult = qobject_cast(job); if (jobResult == 0) { Q_EMIT(downloadError(QString("Internal error"))); return; } QByteArray data = jobResult->data(); bool ok; bool needsXML = job->property("needsXML").toBool(); QList postList; if (needsXML) { // Special cases for pools postList = parseDanbooruResult(data, QString("post"), &ok); } else { postList = parseDanbooruResult(data, &ok).toList(); } if (!ok) { Q_EMIT(downloadError(QString("Unable to decode data"))); return; } // How many posts do we have to fetch? m_currentPosts = postList.length(); for (auto element : postList) { QVariantMap map = element.toMap(); DanbooruPost* post = new DanbooruPost(map); // First check, for rating if (post->rating() > m_maxRating) { m_currentPosts--; delete post; continue; } // second check, blacklist // We make a copy due to the fact that otherwise intersect() // will change the set in place QSet temp = m_blacklist; temp = temp.intersect(post->tags()); if (!temp.isEmpty()) { // Blacklisted tags are present, do not use this post m_currentPosts--; delete post; continue; } QPixmap* pix = new QPixmap(); bool result; if (m_cache) { result = m_cache->findPixmap(post->thumbnailUrl().url(), pix); } else { result = false; } if (result) { post->setPixmap(pix); Q_EMIT(postDownloaded(post)); m_currentPosts--; // Shortcut in case we have all posts in the cache or the // last post is in the cache if (m_currentPosts == 0) { Q_EMIT(postDownloadFinished()); return; } } else { delete pix; StoredTransferJob* pixmapJob = KIO::storedGet( post->thumbnailUrl(), KIO::NoReload, KIO::HideProgressInfo ); KIO::Scheduler::setJobPriority( static_cast(job), 1 ); QVariant variant; variant.setValue(post); // We don't want to overload the servers, so set some rational // priority pixmapJob->setProperty("danbooruPost", variant); connect(pixmapJob, &StoredTransferJob::result, this, &DanbooruService::downloadThumbnail); } } } void DanbooruService::processTagList(KJob* job) { if (job->error()) { Q_EMIT(downloadError(job->errorString())); return; } StoredTransferJob* jobResult = qobject_cast(job); if (jobResult == 0) { Q_EMIT(downloadError(QString("Internal error"))); return; } QByteArray data = jobResult->data(); bool ok; // Most Danbooru implementations return tags in wrong order when // using JSON, so we have to fall back to XML QList tagList = parseDanbooruResult(data, "tag", &ok); if (!ok) { Q_EMIT(downloadError(QString("Unable to decode data"))); return; } for (auto element : tagList) { QVariantMap map = element.toMap(); DanbooruTag* tag = new DanbooruTag(map); Q_EMIT(tagDownloaded(tag)); } } void DanbooruService::processRelatedTagList(KJob* job) { if (job->error()) { Q_EMIT(downloadError(job->errorString())); return; } StoredTransferJob* jobResult = qobject_cast(job); if (jobResult == 0) { Q_EMIT(downloadError(QString("Internal error"))); return; } QByteArray data = jobResult->data(); bool ok; QVariantMap tagList = parseDanbooruResult(data, &ok).toMap(); if (!ok) { Q_EMIT(downloadError(QString("Unable to decode data"))); return; } QVariantMap::const_iterator iter; // The service returns a list of key-related tag list pair, // we iterate through them and remove the empty (not found) ones, then // we call getTagList. Unfortunately Danbooru doesn't have a method to // fetch all tags in batch, so this is done one by one. for (iter = tagList.constBegin(); iter != tagList.constEnd(); ++iter) { QList tags = iter.value().toList(); if (tags.isEmpty()) { continue; } for (auto tag : tags) { // We get the first element in the list, the second is // the ID which is useless (no API methods in Danbooru) QString tagName = tag.toList()[0].toString(); getTagList(1, tagName); } } } void DanbooruService::processPoolList(KJob* job) { if (job->error()) { Q_EMIT(downloadError(job->errorString())); } StoredTransferJob* jobResult = qobject_cast(job); if (jobResult == 0) { Q_EMIT(downloadError(QString("Internal error"))); return; } QByteArray data = jobResult->data(); bool ok; QList poolList = parseDanbooruResult(data, &ok).toList(); if (!ok) { Q_EMIT(downloadError(QString("Unable to decode data"))); return; } for (auto element : poolList) { QVariantMap map = element.toMap(); DanbooruPool* pool = new DanbooruPool(map); Q_EMIT(poolDownloaded(pool)); } //qDebug() << "Pool download finished!"; Q_EMIT(poolDownloadFinished()); } void DanbooruService::downloadAllTags(KJob* job) { Q_UNUSED(job) } void DanbooruService::downloadThumbnail(KJob* job) { if (job->error()) { Q_EMIT(downloadError(job->errorString())); } QVariant postData = job->property("danbooruPost"); DanbooruPost* post = postData.value(); QPixmap* pix = new QPixmap(); StoredTransferJob* jobResult = qobject_cast(job); if (jobResult == 0) { Q_EMIT(downloadError(QString("Internal error"))); return; } bool ok = pix->loadFromData(jobResult->data()); if (!ok) { Q_EMIT(downloadError(QString("Pixmap data could not be loaded"))); return; } post->setPixmap(pix); if (m_cache) { //qDebug() << "Inserting item in cache"; m_cache->insertPixmap(post->thumbnailUrl().url(), *pix); } m_currentPosts--; // One less post to do //qDebug() << "Current posts remaining" << m_currentPosts; Q_EMIT(postDownloaded(post)); if (m_currentPosts == 0) { //qDebug() << "Post download finished"; Q_EMIT(postDownloadFinished()); } } } // namespace Danbooru