diff --git a/src/libdanbooru/danboorupost.h b/src/libdanbooru/danboorupost.h index ba750ab..921ca0f 100644 --- a/src/libdanbooru/danboorupost.h +++ b/src/libdanbooru/danboorupost.h @@ -78,8 +78,6 @@ namespace Danbooru { Q_PROPERTY(KUrl thumbnailUrl READ thumbnailUrl) public: - - /** * @brief Ratings for a Danbooru item diff --git a/src/libdanbooru/danbooruservice.cpp b/src/libdanbooru/danbooruservice.cpp index c3cafcc..18e9dd3 100644 --- a/src/libdanbooru/danbooruservice.cpp +++ b/src/libdanbooru/danbooruservice.cpp @@ -40,550 +40,551 @@ #include "danboorutag.h" #include "utils.h" -namespace Danbooru { +namespace Danbooru +{ - using KIO::StoredTransferJob; +using KIO::StoredTransferJob; - const QString DanbooruService::POST_URL ="post/index.json" ; - const QString DanbooruService::TAG_URL = "tag/index.xml"; - const QString DanbooruService::POOL_URL = "pool/index.json"; - const QString DanbooruService::ARTIST_URL = "artist/index.json"; - const QString DanbooruService::POOL_DATA_URL = "pool/show.xml"; - const QString DanbooruService::RELATED_TAG_URL = "tag/related.json"; +const QString DanbooruService::POST_URL = "post/index.json" ; +const QString DanbooruService::TAG_URL = "tag/index.xml"; +const QString DanbooruService::POOL_URL = "pool/index.json"; +const QString DanbooruService::ARTIST_URL = "artist/index.json"; +const QString DanbooruService::POOL_DATA_URL = "pool/show.xml"; +const QString DanbooruService::RELATED_TAG_URL = "tag/related.json"; - DanbooruService::DanbooruService(KUrl& boardUrl, QString username, - QString password, KImageCache* cache, - QObject* parent): - QObject(parent), - m_url(boardUrl), - m_username(username), - m_password(password), - m_maxRating(Safe), - m_currentPosts(0), - m_cache(cache) - { +DanbooruService::DanbooruService(KUrl& boardUrl, QString username, + QString password, KImageCache* cache, + QObject* parent): + QObject(parent), + m_url(boardUrl), + m_username(username), + m_password(password), + m_maxRating(Safe), + m_currentPosts(0), + m_cache(cache) +{ +} + +DanbooruService::~DanbooruService() +{ + +} + + +void DanbooruService::getPostList(int page, QStringList tags, int limit) +{ + + // We can't fetch more than 100 items, API limitation + + limit = limit > 100 ? 100 : limit; + + QMap parameters; + + parameters.insert("limit", QString::number(limit)); + parameters.insert("page", QString::number(page)); + + KUrl danbooruUrl = requestUrl(m_url, POST_URL, m_username, + m_password, parameters, tags); + + kDebug() << "Final constructed post URL" << danbooruUrl.url(); + + KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, + KIO::HideProgressInfo); + + // This job can use JSON data + job->setProperty("needsXML", false); + + connect(job, SIGNAL(result(KJob*)), this, + SLOT(processPostList(KJob*))); + +} + +void DanbooruService::getTagList(int limit, QString name) +{ + QMap parameters; + parameters.insert("limit", QString::number(limit)); + + if (!name.isEmpty()) { + parameters.insert("name", name); } + parameters.insert("order", "date"); - DanbooruService::~DanbooruService() - { + KUrl danbooruUrl = requestUrl(m_url, TAG_URL, m_username, m_password, + parameters); + kDebug() << "Final constructed tag URL" << danbooruUrl.url(); - } + KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, + KIO::HideProgressInfo); + connect(job, SIGNAL(result(KJob*)), this, SLOT(processTagList(KJob*))); +} - void DanbooruService::getPostList(int page, QStringList tags, int limit) - { +void DanbooruService::getPool(int poolId, int page) +{ - // We can't fetch more than 100 items, API limitation + QMap parameters; - limit = limit > 100 ? 100: limit; + parameters.insert("id", QString::number(poolId)); - QMap parameters; - - parameters.insert("limit", QString::number(limit)); + if (page > 1) { parameters.insert("page", QString::number(page)); + } - KUrl danbooruUrl = requestUrl(m_url, POST_URL, m_username, - m_password, parameters, tags); + KUrl danbooruUrl = requestUrl(m_url, POOL_DATA_URL, m_username, + m_password, parameters); - kDebug() << "Final constructed post URL" << danbooruUrl.url(); + kDebug() << "Final constructed pool URL" << danbooruUrl.url(); - KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, - KIO::HideProgressInfo); - // This job can use JSON data - job->setProperty("needsXML", false); + KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, + KIO::HideProgressInfo); - connect(job, SIGNAL(result(KJob*)), this, - SLOT(processPostList(KJob*))); + //HACK: Most Danbooru implementations don't provide valid data on + // pools via JSON, hence we set XML and parse the XML data. + + job->setProperty("needsXML", true); + + connect(job, SIGNAL(result(KJob*)), this, + SLOT(processPostList(KJob*))); + +} + +void DanbooruService::getPoolList(int page) +{ + + KUrl danbooruUrl; + + if (page == 0) { + danbooruUrl = requestUrl(m_url, POOL_URL, m_username, m_password); + } else { + QMap map; + map.insert("page", QString::number(page)); + + danbooruUrl = requestUrl(m_url, POOL_URL, m_username, + m_password, map); + } + + kDebug() << "Final constructed pool list URL" << danbooruUrl.url(); + + KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, + KIO::HideProgressInfo); + // This job can use JSON data + job->setProperty("needsXML", false); + + connect(job, SIGNAL(result(KJob*)), this, + SLOT(processPoolList(KJob*))); + +} + +void DanbooruService::getRelatedTags(const QStringList& tags, + Danbooru::TagType tagType) +{ + + QString type; + switch (tagType) { + case Danbooru::General: + type = "general"; + break; + case Danbooru::Artist: + type = "artist"; + break; + case Danbooru::Copyright: + type = "copyright"; + break; + case Danbooru::Character: + type = "character"; + break; + case Danbooru::Unknown: + type = "unknown"; + break; + } + + QMap parameters; + parameters.insert("type", type); + + KUrl danbooruUrl = requestUrl(m_url, RELATED_TAG_URL, m_username, + m_password, parameters, tags); + + kDebug() << "Final constructed related tag URL" << danbooruUrl.url(); + + StoredTransferJob* job = KIO::storedGet( + danbooruUrl, KIO::NoReload, + KIO::HideProgressInfo + ); + + connect(job, SIGNAL(result(KJob*)), this, + SLOT(processRelatedTagList(KJob*))); + +} + +// Getters / setters + +void DanbooruService::setBlacklist(const QSet< QString >& blacklist) +{ + + if (!blacklist.isEmpty()) { + m_blacklist = blacklist; + } + +} + + +const QSet< QString > DanbooruService::blacklist() const +{ + return m_blacklist; +} + +const QStringList DanbooruService::allowedRatings() const +{ + QStringList ratings; + + if (m_maxRating.testFlag(Safe)) { + ratings.append("Safe"); + } + + if (m_maxRating.testFlag(Questionable)) { + ratings.append("Questionable"); + } + + if (m_maxRating.testFlag(Explicit)) { + ratings.append("Explicit"); + } + + return ratings; + +} + +void DanbooruService::setMaximumAllowedRating(const Danbooru::Ratings& rating) +{ + Ratings flags; + + switch (rating) { + case Safe: + flags = Safe; + break; + case Questionable: + flags = Safe | Questionable; + break; + case Explicit: + flags = Safe | Questionable | Explicit; + break; + } + + m_maxRating = flags; + +} + +const Ratings DanbooruService::maximumAllowedRating() const +{ + + return m_maxRating; + +} + +// Slots + +void DanbooruService::processPostList(KJob* job) +{ + + //kDebug() << "Got post data OK"; + + if (job->error()) { + Q_EMIT(downloadError(job->errorString())); + } + + StoredTransferJob* jobResult = qobject_cast(job); + + if (jobResult == 0) { + Q_EMIT(downloadError(QString("Internal error"))); + return; } - void DanbooruService::getTagList(int limit, QString name) - { - QMap parameters; - parameters.insert("limit", QString::number(limit)); + QByteArray data = jobResult->data(); - if (!name.isEmpty()) { - parameters.insert("name", name); - } - parameters.insert("order", "date"); + kDebug() << jobResult->mimetype(); - KUrl danbooruUrl = requestUrl(m_url, TAG_URL, m_username, m_password, - parameters); - kDebug() << "Final constructed tag URL" << danbooruUrl.url(); + bool ok; - KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, - KIO::HideProgressInfo); + bool needsXML = job->property("needsXML").toBool(); - connect(job, SIGNAL(result(KJob*)), this, SLOT(processTagList(KJob*))); + QList postList; + + if (needsXML) { + // Special cases for pools + postList = parseDanbooruResult(data, QString("post"), &ok); + } else { + postList = parseDanbooruResult(data, &ok).toList(); } - void DanbooruService::getPool(int poolId, int page) - { - - QMap parameters; - - parameters.insert("id", QString::number(poolId)); - - if (page > 1) { - parameters.insert("page", QString::number(page)); - } - - KUrl danbooruUrl = requestUrl(m_url, POOL_DATA_URL, m_username, - m_password, parameters); - - kDebug() << "Final constructed pool URL" << danbooruUrl.url(); - - - KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, - KIO::HideProgressInfo); - - //HACK: Most Danbooru implementations don't provide valid data on - // pools via JSON, hence we set XML and parse the XML data. - - job->setProperty("needsXML", true); - - connect(job, SIGNAL(result(KJob*)), this, - SLOT(processPostList(KJob*))); - + if (!ok) { + Q_EMIT(downloadError(QString("Unable to decode data"))); + return; } - void DanbooruService::getPoolList(int page) - { + // How many posts do we have to fetch? - KUrl danbooruUrl; + m_currentPosts = postList.length(); - if (page == 0) { - danbooruUrl = requestUrl(m_url, POOL_URL, m_username, m_password); - } else { - QMap map; - map.insert("page", QString::number(page)); + for (auto element : postList) { - danbooruUrl = requestUrl(m_url, POOL_URL, m_username, - m_password, map); + QVariantMap map = element.toMap(); + + DanbooruPost* post = new DanbooruPost(map); + + // First check, for rating + + if (post->rating() > m_maxRating) { + m_currentPosts--; + delete post; + continue; } - kDebug() << "Final constructed pool list URL" << danbooruUrl.url(); + // second check, blacklist + // We make a copy due to the fact that otherwise intersect() + // will change the set in place - KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload, - KIO::HideProgressInfo); - // This job can use JSON data - job->setProperty("needsXML", false); + QSet temp = m_blacklist; - connect(job, SIGNAL(result(KJob*)), this, - SLOT(processPoolList(KJob*))); + temp = temp.intersect(post->tags()); - } - - void DanbooruService::getRelatedTags(const QStringList& tags, - Danbooru::TagType tagType) - { - - QString type; - switch(tagType) { - case Danbooru::General: - type = "general"; - break; - case Danbooru::Artist: - type = "artist"; - break; - case Danbooru::Copyright: - type = "copyright"; - break; - case Danbooru::Character: - type = "character"; - break; - case Danbooru::Unknown: - type = "unknown"; - break; + if (!temp.isEmpty()) { + // Blacklisted tags are present, do not use this post + m_currentPosts--; + delete post; + continue; } - QMap parameters; - parameters.insert("type", type); - - KUrl danbooruUrl = requestUrl(m_url, RELATED_TAG_URL, m_username, - m_password, parameters, tags ); - - kDebug() << "Final constructed related tag URL" << danbooruUrl.url(); - - StoredTransferJob* job = KIO::storedGet( - danbooruUrl, KIO::NoReload, - KIO::HideProgressInfo - ); - - connect(job, SIGNAL(result(KJob*)), this, - SLOT(processRelatedTagList(KJob*))); - - } - - // Getters / setters - - void DanbooruService::setBlacklist(const QSet< QString >& blacklist) - { - - if (!blacklist.isEmpty()) { - m_blacklist = blacklist; - } - - } - - - const QSet< QString > DanbooruService::blacklist() const - { - return m_blacklist; - } - - const QStringList DanbooruService::allowedRatings() const - { - QStringList ratings; - - if (m_maxRating.testFlag(Safe)) { - ratings.append("Safe"); - } - - if (m_maxRating.testFlag(Questionable)) { - ratings.append("Questionable"); - } - - if (m_maxRating.testFlag(Explicit)) { - ratings.append("Explicit"); - } - - return ratings; - - } - - void DanbooruService::setMaximumAllowedRating(const Danbooru::Ratings& rating) - { - Ratings flags; - - switch (rating) { - case Safe: - flags = Safe; - break; - case Questionable: - flags = Safe | Questionable; - break; - case Explicit: - flags = Safe | Questionable | Explicit; - break; - } - - m_maxRating = flags; - - } - - const Ratings DanbooruService::maximumAllowedRating() const - { - - return m_maxRating; - - } - - // Slots - - void DanbooruService::processPostList(KJob* job) - { - - //kDebug() << "Got post data OK"; - - if (job->error()) { - Q_EMIT(downloadError(job->errorString())); - } - - StoredTransferJob* jobResult = qobject_cast(job); - - if (jobResult == 0) { - Q_EMIT(downloadError(QString("Internal error"))); - return; - - } - - QByteArray data = jobResult->data(); - - kDebug() << jobResult->mimetype(); - - bool ok; - - bool needsXML = job->property("needsXML").toBool(); - - QList postList; - - if (needsXML) { - // Special cases for pools - postList = parseDanbooruResult(data, QString("post"), &ok); - } else { - postList = parseDanbooruResult(data, &ok).toList(); - } - - if (!ok) { - Q_EMIT(downloadError(QString("Unable to decode data"))); - return; - } - - // How many posts do we have to fetch? - - m_currentPosts = postList.length(); - - for (auto element: postList) { - - QVariantMap map = element.toMap(); - - DanbooruPost* post = new DanbooruPost(map); - - // First check, for rating - - if (post->rating() > m_maxRating) { - m_currentPosts--; - delete post; - continue; - } - - // second check, blacklist - // We make a copy due to the fact that otherwise intersect() - // will change the set in place - - QSet temp = m_blacklist; - - temp = temp.intersect(post->tags()); - - if (!temp.isEmpty()) { - // Blacklisted tags are present, do not use this post - m_currentPosts--; - delete post; - continue; - } - - QPixmap* pix = new QPixmap(); - bool result; - - if (m_cache) { - result = m_cache->findPixmap(post->thumbnailUrl().url(), - pix); - } else { - result = false; - } - - - if (result) { - post->setPixmap(pix); - Q_EMIT(postDownloaded(post)); - m_currentPosts--; - - // Shortcut in case we have all posts in the cache or the - // last post is in the cache - - if (m_currentPosts == 0) { - Q_EMIT(postDownloadFinished()); - return; - } - - } else { - - delete pix; - - StoredTransferJob* pixmapJob = KIO::storedGet( - post->thumbnailUrl(), - KIO::NoReload, KIO::HideProgressInfo - ); - - KIO::Scheduler::setJobPriority( - static_cast(job), - 1 - ); - - QVariant variant; - - variant.setValue(post); - - // We don't want to overload the servers, so set some rational - // priority - - pixmapJob->setProperty("danbooruPost", variant); - - connect(pixmapJob, SIGNAL(result(KJob*)), this, - SLOT(downloadThumbnail(KJob*))); - - } - - } - - } - - void DanbooruService::processTagList(KJob* job) - { - - if (job->error()) { - Q_EMIT(downloadError(job->errorString())); - return; - } - - StoredTransferJob* jobResult = qobject_cast(job); - - if (jobResult == 0) { - Q_EMIT(downloadError(QString("Internal error"))); - return; - } - - QByteArray data = jobResult->data(); - - bool ok; - - // Most Danbooru implementations return tags in wrong order when - // using JSON, so we have to fall back to XML - QList tagList = parseDanbooruResult(data, "tag", &ok); - - if (!ok) { - Q_EMIT(downloadError(QString("Unable to decode data"))); - return; - } - - for (auto element: tagList) { - QVariantMap map = element.toMap(); - DanbooruTag* tag = new DanbooruTag(map); - Q_EMIT(tagDownloaded(tag)); - } - } - - - void DanbooruService::processRelatedTagList(KJob* job) - { - if (job->error()) { - Q_EMIT(downloadError(job->errorString())); - return; - } - - StoredTransferJob* jobResult = qobject_cast(job); - - if (jobResult == 0) { - Q_EMIT(downloadError(QString("Internal error"))); - return; - - } - - QByteArray data = jobResult->data(); - bool ok; - - QVariantMap tagList = parseDanbooruResult(data, &ok).toMap(); - - if (!ok) { - Q_EMIT(downloadError(QString("Unable to decode data"))); - return; - } - - QVariantMap::const_iterator iter; - - // The service returns a list of key-related tag list pair, - // we iterate through them and remove the empty (not found) ones, then - // we call getTagList. Unfortunately Danbooru doesn't have a method to - // fetch all tags in batch, so this is done one by one. - - for (iter=tagList.constBegin(); iter!=tagList.constEnd(); ++iter) { - - QList tags = iter.value().toList(); - - if (tags.isEmpty()) { - continue; - } - - for (auto tag: tags) { - // We get the first element in the list, the second is - // the ID which is useless (no API methods in Danbooru) - QString tagName = tag.toList()[0].toString(); - getTagList(1, tagName); - } - - } - } - - void DanbooruService::processPoolList(KJob* job) - { - - - if (job->error()) { - Q_EMIT(downloadError(job->errorString())); - } - - StoredTransferJob* jobResult = qobject_cast(job); - - if (jobResult == 0) { - Q_EMIT(downloadError(QString("Internal error"))); - return; - - } - - QByteArray data = jobResult->data(); - - bool ok; - - QList poolList = parseDanbooruResult(data, &ok).toList(); - - if (!ok) { - Q_EMIT(downloadError(QString("Unable to decode data"))); - return; - } - - for (auto element: poolList) { - QVariantMap map = element.toMap(); - - DanbooruPool* pool = new DanbooruPool(map); - Q_EMIT(poolDownloaded(pool)); - } - - kDebug() << "Pool download finished!"; - Q_EMIT(poolDownloadFinished()); - - } - - void DanbooruService::downloadAllTags(KJob* job) - { - Q_UNUSED(job) - } - - void DanbooruService::downloadThumbnail(KJob* job) - { - - if (job->error()) { - Q_EMIT(downloadError(job->errorString())); - } - - QVariant postData = job->property("danbooruPost"); - - DanbooruPost* post = postData.value(); QPixmap* pix = new QPixmap(); - - StoredTransferJob* jobResult = qobject_cast(job); - - if (jobResult == 0) { - Q_EMIT(downloadError(QString("Internal error"))); - return; - - } - - bool ok = pix->loadFromData(jobResult->data()); - - if (!ok) { - Q_EMIT(downloadError(QString("Pixmap data could not be loaded"))); - return; - } - - post->setPixmap(pix); + bool result; if (m_cache) { - kDebug() << "Inserting item in cache"; - m_cache->insertPixmap(post->thumbnailUrl().url(), *pix); + result = m_cache->findPixmap(post->thumbnailUrl().url(), + pix); + } else { + result = false; } - m_currentPosts--; // One less post to do - kDebug() << "Current posts remaining" << m_currentPosts; - Q_EMIT(postDownloaded(post)); + if (result) { + post->setPixmap(pix); + Q_EMIT(postDownloaded(post)); + m_currentPosts--; + + // Shortcut in case we have all posts in the cache or the + // last post is in the cache + + if (m_currentPosts == 0) { + Q_EMIT(postDownloadFinished()); + return; + } + + } else { + + delete pix; + + StoredTransferJob* pixmapJob = KIO::storedGet( + post->thumbnailUrl(), + KIO::NoReload, KIO::HideProgressInfo + ); + + KIO::Scheduler::setJobPriority( + static_cast(job), + 1 + ); + + QVariant variant; + + variant.setValue(post); + + // We don't want to overload the servers, so set some rational + // priority + + pixmapJob->setProperty("danbooruPost", variant); + + connect(pixmapJob, SIGNAL(result(KJob*)), this, + SLOT(downloadThumbnail(KJob*))); - if (m_currentPosts == 0) { - kDebug() << "Post download finished"; - Q_EMIT(postDownloadFinished()); } } +} + +void DanbooruService::processTagList(KJob* job) +{ + + if (job->error()) { + Q_EMIT(downloadError(job->errorString())); + return; + } + + StoredTransferJob* jobResult = qobject_cast(job); + + if (jobResult == 0) { + Q_EMIT(downloadError(QString("Internal error"))); + return; + } + + QByteArray data = jobResult->data(); + + bool ok; + + // Most Danbooru implementations return tags in wrong order when + // using JSON, so we have to fall back to XML + QList tagList = parseDanbooruResult(data, "tag", &ok); + + if (!ok) { + Q_EMIT(downloadError(QString("Unable to decode data"))); + return; + } + + for (auto element : tagList) { + QVariantMap map = element.toMap(); + DanbooruTag* tag = new DanbooruTag(map); + Q_EMIT(tagDownloaded(tag)); + } +} + + +void DanbooruService::processRelatedTagList(KJob* job) +{ + if (job->error()) { + Q_EMIT(downloadError(job->errorString())); + return; + } + + StoredTransferJob* jobResult = qobject_cast(job); + + if (jobResult == 0) { + Q_EMIT(downloadError(QString("Internal error"))); + return; + + } + + QByteArray data = jobResult->data(); + bool ok; + + QVariantMap tagList = parseDanbooruResult(data, &ok).toMap(); + + if (!ok) { + Q_EMIT(downloadError(QString("Unable to decode data"))); + return; + } + + QVariantMap::const_iterator iter; + + // The service returns a list of key-related tag list pair, + // we iterate through them and remove the empty (not found) ones, then + // we call getTagList. Unfortunately Danbooru doesn't have a method to + // fetch all tags in batch, so this is done one by one. + + for (iter = tagList.constBegin(); iter != tagList.constEnd(); ++iter) { + + QList tags = iter.value().toList(); + + if (tags.isEmpty()) { + continue; + } + + for (auto tag : tags) { + // We get the first element in the list, the second is + // the ID which is useless (no API methods in Danbooru) + QString tagName = tag.toList()[0].toString(); + getTagList(1, tagName); + } + + } +} + +void DanbooruService::processPoolList(KJob* job) +{ + + + if (job->error()) { + Q_EMIT(downloadError(job->errorString())); + } + + StoredTransferJob* jobResult = qobject_cast(job); + + if (jobResult == 0) { + Q_EMIT(downloadError(QString("Internal error"))); + return; + + } + + QByteArray data = jobResult->data(); + + bool ok; + + QList poolList = parseDanbooruResult(data, &ok).toList(); + + if (!ok) { + Q_EMIT(downloadError(QString("Unable to decode data"))); + return; + } + + for (auto element : poolList) { + QVariantMap map = element.toMap(); + + DanbooruPool* pool = new DanbooruPool(map); + Q_EMIT(poolDownloaded(pool)); + } + + kDebug() << "Pool download finished!"; + Q_EMIT(poolDownloadFinished()); + +} + +void DanbooruService::downloadAllTags(KJob* job) +{ + Q_UNUSED(job) +} + +void DanbooruService::downloadThumbnail(KJob* job) +{ + + if (job->error()) { + Q_EMIT(downloadError(job->errorString())); + } + + QVariant postData = job->property("danbooruPost"); + + DanbooruPost* post = postData.value(); + QPixmap* pix = new QPixmap(); + + StoredTransferJob* jobResult = qobject_cast(job); + + if (jobResult == 0) { + Q_EMIT(downloadError(QString("Internal error"))); + return; + + } + + bool ok = pix->loadFromData(jobResult->data()); + + if (!ok) { + Q_EMIT(downloadError(QString("Pixmap data could not be loaded"))); + return; + } + + post->setPixmap(pix); + + if (m_cache) { + kDebug() << "Inserting item in cache"; + m_cache->insertPixmap(post->thumbnailUrl().url(), *pix); + } + + m_currentPosts--; // One less post to do + + kDebug() << "Current posts remaining" << m_currentPosts; + Q_EMIT(postDownloaded(post)); + + if (m_currentPosts == 0) { + kDebug() << "Post download finished"; + Q_EMIT(postDownloadFinished()); + } + +} + } // namespace Danbooru diff --git a/src/libdanbooru/danboorutag.cpp b/src/libdanbooru/danboorutag.cpp index b4c2f39..66633ca 100644 --- a/src/libdanbooru/danboorutag.cpp +++ b/src/libdanbooru/danboorutag.cpp @@ -22,64 +22,65 @@ #include "danboorutag.h" -namespace Danbooru { +namespace Danbooru +{ - DanbooruTag::DanbooruTag(const QVariantMap& postData, QObject* parent): - QObject(parent) - { - m_id = postData.value("id").toInt(); - m_name = postData.value("name").toString(); - m_count = postData.value("count").toInt(); - m_ambiguous = postData.value("ambiguous").toBool(); +DanbooruTag::DanbooruTag(const QVariantMap& postData, QObject* parent): + QObject(parent) +{ + m_id = postData.value("id").toInt(); + m_name = postData.value("name").toString(); + m_count = postData.value("count").toInt(); + m_ambiguous = postData.value("ambiguous").toBool(); - int type = postData.value("type").toInt(); + int type = postData.value("type").toInt(); - switch (type) { - case 0: - m_tagType = General; - break; - case 1: - m_tagType = Artist; - break; - case 2: - m_tagType = Copyright; - break; - case 3: - m_tagType = Character; - break; - case 4: - m_tagType = Unknown; - break; - default: - m_tagType = Unknown; - break; - } + switch (type) { + case 0: + m_tagType = General; + break; + case 1: + m_tagType = Artist; + break; + case 2: + m_tagType = Copyright; + break; + case 3: + m_tagType = Character; + break; + case 4: + m_tagType = Unknown; + break; + default: + m_tagType = Unknown; + break; } +} - int DanbooruTag::id() const - { - return m_id; - } +int DanbooruTag::id() const +{ + return m_id; +} - int DanbooruTag::count() const - { - return m_count; - } +int DanbooruTag::count() const +{ + return m_count; +} - const QString DanbooruTag::name() const - { - return m_name; - } +const QString DanbooruTag::name() const +{ + return m_name; +} - bool DanbooruTag::ambiguous() const - { - return m_ambiguous; - } +bool DanbooruTag::ambiguous() const +{ + return m_ambiguous; +} - TagType DanbooruTag::type() const - { - return m_tagType; - } +TagType DanbooruTag::type() const +{ + return m_tagType; +} }; // namespace Danbooru