1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82
|
/*
SPDX-FileCopyrightText: 2007-2009 David Nolden <david.nolden.kdevelop@art-master.de>
SPDX-FileCopyrightText: 2016 Milian Wolff <mail@milianw.de>
SPDX-License-Identifier: LGPL-2.0-or-later
*/
#include "urlparselock.h"
#include <QHash>
#include <QMutexLocker>
#include <QRecursiveMutex>
using namespace KDevelop;
namespace {
struct PerUrlData
{
// TODO: make this non-recursive
QRecursiveMutex mutex;
// how many people are (trying to) parse this url
// we use this to delete the entry once no-one needs it anymore
uint ref = 0;
};
// this mutex protects the parsingUrls
// NOTE: QBasicMutex is safe to initialize statically
QBasicMutex parsingUrlsMutex;
// Hash of urls that are currently being parsed and their protection data
using ParsingUrls = QHash<IndexedString, PerUrlData*>;
ParsingUrls& parsingUrls()
{
// delay initialization of the hash until it's needed
static ParsingUrls parsingUrls;
return parsingUrls;
}
}
UrlParseLock::UrlParseLock(const IndexedString& url)
: m_url(url)
{
QMutexLocker lock(&parsingUrlsMutex);
// NOTE: operator[] default-initializes the ptr to zero for us when not available
auto& perUrlData = parsingUrls()[url];
if (!perUrlData) {
// if that was the case, we are the first to parse this url, create an entry
perUrlData = new PerUrlData;
}
// always increment the refcount
++perUrlData->ref;
// now lock the url, but don't do so while blocking the global mutex
auto& mutex = perUrlData->mutex;
lock.unlock();
mutex.lock();
}
UrlParseLock::~UrlParseLock()
{
QMutexLocker lock(&parsingUrlsMutex);
// find the entry for this url
auto& urls = parsingUrls();
auto it = urls.find(m_url);
Q_ASSERT(it != urls.end()); // it must exist
auto& perUrlData = it.value();
// unlock the per-url mutex
perUrlData->mutex.unlock();
// decrement the refcount
--perUrlData->ref;
if (perUrlData->ref == 0) {
// and cleanup, if possible
delete perUrlData;
urls.erase(it);
}
}
|