Use lambdas for most of the jobs
This commit is contained in:
		
					parent
					
						
							
								3f760a7690
							
						
					
				
			
			
				commit
				
					
						4b85d63d68
					
				
			
		
					 1 changed files with 110 additions and 81 deletions
				
			
		| 
						 | 
				
			
			@ -97,6 +97,7 @@ void DanbooruService::getPostList(int page, QStringList tags, int limit)
 | 
			
		|||
 | 
			
		||||
    connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPostList);
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void DanbooruService::getTagList(int limit, QString name)
 | 
			
		||||
| 
						 | 
				
			
			@ -116,7 +117,35 @@ void DanbooruService::getTagList(int limit, QString name)
 | 
			
		|||
    KIO::StoredTransferJob* job = KIO::storedGet(danbooruUrl, KIO::NoReload,
 | 
			
		||||
                                  KIO::HideProgressInfo);
 | 
			
		||||
 | 
			
		||||
    connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processTagList);
 | 
			
		||||
    connect(job, &KIO::StoredTransferJob::result, [this](KJob* job) {
 | 
			
		||||
 | 
			
		||||
            if (job->error()) {
 | 
			
		||||
                Q_EMIT(downloadError(job->errorString()));
 | 
			
		||||
                return;
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
 | 
			
		||||
            QByteArray data = jobResult->data();
 | 
			
		||||
 | 
			
		||||
            bool ok;
 | 
			
		||||
 | 
			
		||||
            // Most Danbooru implementations return tags in wrong order when
 | 
			
		||||
            // using JSON, so we have to fall back to XML
 | 
			
		||||
            QList<QVariant> tagList = parseDanbooruResult(data, "tag", &ok);
 | 
			
		||||
 | 
			
		||||
            if (!ok) {
 | 
			
		||||
                Q_EMIT(downloadError(QString("Unable to decode data")));
 | 
			
		||||
                return;
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            for (auto element : tagList) {
 | 
			
		||||
                QVariantMap map = element.toMap();
 | 
			
		||||
                DanbooruTag* tag = new DanbooruTag(map);
 | 
			
		||||
                Q_EMIT(tagDownloaded(tag));
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
        }
 | 
			
		||||
    );
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void DanbooruService::getPool(int poolId, int page)
 | 
			
		||||
| 
						 | 
				
			
			@ -170,7 +199,38 @@ void DanbooruService::getPoolList(int page)
 | 
			
		|||
    // This job can use JSON data
 | 
			
		||||
    job->setProperty("needsXML", false);
 | 
			
		||||
 | 
			
		||||
    connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPoolList);
 | 
			
		||||
//     connect(job, &KIO::StoredTransferJob::result, this, &DanbooruService::processPoolList);
 | 
			
		||||
 | 
			
		||||
    connect(job, &KIO::StoredTransferJob::result, [this] (KJob* job) {
 | 
			
		||||
 | 
			
		||||
        if (job->error()) {
 | 
			
		||||
            Q_EMIT(downloadError(job->errorString()));
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
 | 
			
		||||
        QByteArray data = jobResult->data();
 | 
			
		||||
 | 
			
		||||
        bool ok;
 | 
			
		||||
        QList<QVariant> poolList = parseDanbooruResult(data,  &ok).toList();
 | 
			
		||||
 | 
			
		||||
        if (!ok) {
 | 
			
		||||
            Q_EMIT(downloadError(QString("Unable to decode data")));
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        for (auto element : poolList) {
 | 
			
		||||
            QVariantMap map = element.toMap();
 | 
			
		||||
 | 
			
		||||
            DanbooruPool* pool = new DanbooruPool(map);
 | 
			
		||||
            Q_EMIT(poolDownloaded(pool));
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        //qDebug() << "Pool download finished!";
 | 
			
		||||
        Q_EMIT(poolDownloadFinished());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -210,7 +270,51 @@ void DanbooruService::getRelatedTags(const QStringList& tags,
 | 
			
		|||
                                 KIO::HideProgressInfo
 | 
			
		||||
                             );
 | 
			
		||||
 | 
			
		||||
    connect(job, &StoredTransferJob::result, this, &DanbooruService::processRelatedTagList);
 | 
			
		||||
    connect(job, &StoredTransferJob::result, [this](KJob* job) {
 | 
			
		||||
 | 
			
		||||
        if (job->error()) {
 | 
			
		||||
        Q_EMIT(downloadError(job->errorString()));
 | 
			
		||||
        return;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
 | 
			
		||||
        QByteArray data = jobResult->data();
 | 
			
		||||
        bool ok;
 | 
			
		||||
 | 
			
		||||
        QVariantMap tagList = parseDanbooruResult(data, &ok).toMap();
 | 
			
		||||
 | 
			
		||||
        if (!ok) {
 | 
			
		||||
            Q_EMIT(downloadError(QString("Unable to decode data")));
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        QVariantMap::const_iterator iter;
 | 
			
		||||
 | 
			
		||||
        // The service returns a list of key-related tag list pair,
 | 
			
		||||
        // we iterate through them and remove the empty (not found) ones, then
 | 
			
		||||
        // we call getTagList. Unfortunately Danbooru doesn't have a method to
 | 
			
		||||
        // fetch all tags in batch, so this is done one by one.
 | 
			
		||||
 | 
			
		||||
        for (iter = tagList.constBegin(); iter != tagList.constEnd(); ++iter) {
 | 
			
		||||
 | 
			
		||||
            QList<QVariant> tags = iter.value().toList();
 | 
			
		||||
 | 
			
		||||
            if (tags.isEmpty()) {
 | 
			
		||||
                continue;
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            for (auto tag : tags) {
 | 
			
		||||
                // We get the first element in the list, the second is
 | 
			
		||||
                // the ID which is useless (no API methods in Danbooru)
 | 
			
		||||
                QString tagName = tag.toList()[0].toString();
 | 
			
		||||
                getTagList(1, tagName);
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    );
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -325,7 +429,7 @@ void DanbooruService::processPostList(KJob* job)
 | 
			
		|||
        QVariantMap map = element.toMap();
 | 
			
		||||
        DanbooruPost* post = new DanbooruPost(map);
 | 
			
		||||
 | 
			
		||||
        // First check, for rating
 | 
			
		||||
        // Remove unwanted posts
 | 
			
		||||
 | 
			
		||||
        if (isPostBlacklisted(post, m_blacklist, m_maxRating)) {
 | 
			
		||||
            m_postsToFetch--;
 | 
			
		||||
| 
						 | 
				
			
			@ -337,6 +441,7 @@ void DanbooruService::processPostList(KJob* job)
 | 
			
		|||
 | 
			
		||||
        if (m_cache->findPixmap(post->thumbnailUrl().url(), &pix)) {
 | 
			
		||||
 | 
			
		||||
            qDebug() << "in cache";
 | 
			
		||||
            post->setPixmap(pix);
 | 
			
		||||
            Q_EMIT(postDownloaded(post));
 | 
			
		||||
            m_postsToFetch--;
 | 
			
		||||
| 
						 | 
				
			
			@ -362,14 +467,13 @@ void DanbooruService::processPostList(KJob* job)
 | 
			
		|||
 | 
			
		||||
            pixmapJob->setProperty("danbooruPost", variant);
 | 
			
		||||
 | 
			
		||||
            connect(pixmapJob, &StoredTransferJob::result, [post, this] (KJob* job) {
 | 
			
		||||
            connect(pixmapJob, &StoredTransferJob::result, [post, this, pix] (KJob* job) mutable {
 | 
			
		||||
 | 
			
		||||
                if (job->error()) {
 | 
			
		||||
                    Q_EMIT(downloadError(job->errorString()));
 | 
			
		||||
                    return;
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                QPixmap pix;
 | 
			
		||||
                StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
 | 
			
		||||
 | 
			
		||||
                if (!pix.loadFromData(jobResult->data())) {
 | 
			
		||||
| 
						 | 
				
			
			@ -407,87 +511,12 @@ void DanbooruService::processPostList(KJob* job)
 | 
			
		|||
void DanbooruService::processTagList(KJob* job)
 | 
			
		||||
{
 | 
			
		||||
 | 
			
		||||
    if (job->error()) {
 | 
			
		||||
        Q_EMIT(downloadError(job->errorString()));
 | 
			
		||||
        return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
 | 
			
		||||
 | 
			
		||||
    if (jobResult == 0) {
 | 
			
		||||
        Q_EMIT(downloadError(QString("Internal error")));
 | 
			
		||||
        return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    QByteArray data = jobResult->data();
 | 
			
		||||
 | 
			
		||||
    bool ok;
 | 
			
		||||
 | 
			
		||||
    // Most Danbooru implementations return tags in wrong order when
 | 
			
		||||
    // using JSON, so we have to fall back to XML
 | 
			
		||||
    QList<QVariant> tagList = parseDanbooruResult(data, "tag", &ok);
 | 
			
		||||
 | 
			
		||||
    if (!ok) {
 | 
			
		||||
        Q_EMIT(downloadError(QString("Unable to decode data")));
 | 
			
		||||
        return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    for (auto element : tagList) {
 | 
			
		||||
        QVariantMap map = element.toMap();
 | 
			
		||||
        DanbooruTag* tag = new DanbooruTag(map);
 | 
			
		||||
        Q_EMIT(tagDownloaded(tag));
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
void DanbooruService::processRelatedTagList(KJob* job)
 | 
			
		||||
{
 | 
			
		||||
    if (job->error()) {
 | 
			
		||||
        Q_EMIT(downloadError(job->errorString()));
 | 
			
		||||
        return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    StoredTransferJob* jobResult = qobject_cast<StoredTransferJob*>(job);
 | 
			
		||||
 | 
			
		||||
    if (jobResult == 0) {
 | 
			
		||||
        Q_EMIT(downloadError(QString("Internal error")));
 | 
			
		||||
        return;
 | 
			
		||||
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    QByteArray data = jobResult->data();
 | 
			
		||||
    bool ok;
 | 
			
		||||
 | 
			
		||||
    QVariantMap tagList = parseDanbooruResult(data, &ok).toMap();
 | 
			
		||||
 | 
			
		||||
    if (!ok) {
 | 
			
		||||
        Q_EMIT(downloadError(QString("Unable to decode data")));
 | 
			
		||||
        return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    QVariantMap::const_iterator iter;
 | 
			
		||||
 | 
			
		||||
    // The service returns a list of key-related tag list pair,
 | 
			
		||||
    // we iterate through them and remove the empty (not found) ones, then
 | 
			
		||||
    // we call getTagList. Unfortunately Danbooru doesn't have a method to
 | 
			
		||||
    // fetch all tags in batch, so this is done one by one.
 | 
			
		||||
 | 
			
		||||
    for (iter = tagList.constBegin(); iter != tagList.constEnd(); ++iter) {
 | 
			
		||||
 | 
			
		||||
        QList<QVariant> tags = iter.value().toList();
 | 
			
		||||
 | 
			
		||||
        if (tags.isEmpty()) {
 | 
			
		||||
            continue;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        for (auto tag : tags) {
 | 
			
		||||
            // We get the first element in the list, the second is
 | 
			
		||||
            // the ID which is useless (no API methods in Danbooru)
 | 
			
		||||
            QString tagName = tag.toList()[0].toString();
 | 
			
		||||
            getTagList(1, tagName);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
void DanbooruService::processPoolList(KJob* job)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue