File indexing completed on 2024-05-12 04:37:43

0001 /*
0002     SPDX-FileCopyrightText: 2007-2009 David Nolden <david.nolden.kdevelop@art-master.de>
0003     SPDX-FileCopyrightText: 2016 Milian Wolff <mail@milianw.de>
0004 
0005     SPDX-License-Identifier: LGPL-2.0-or-later
0006 */
0007 
0008 #include "urlparselock.h"
0009 
0010 #include <QHash>
0011 #include <QMutexLocker>
0012 #include <QRecursiveMutex>
0013 
0014 using namespace KDevelop;
0015 
0016 namespace {
0017 struct PerUrlData
0018 {
0019     // TODO: make this non-recursive
0020     QRecursiveMutex mutex;
0021     // how many people are (trying to) parse this url
0022     // we use this to delete the entry once no-one needs it anymore
0023     uint ref = 0;
0024 };
0025 
0026 // this mutex protects the parsingUrls
0027 // NOTE: QBasicMutex is safe to initialize statically
0028 QBasicMutex parsingUrlsMutex;
0029 
0030 // Hash of urls that are currently being parsed and their protection data
0031 using ParsingUrls = QHash<IndexedString, PerUrlData*>;
0032 ParsingUrls& parsingUrls()
0033 {
0034     // delay initialization of the hash until it's needed
0035     static ParsingUrls parsingUrls;
0036     return parsingUrls;
0037 }
0038 }
0039 
0040 UrlParseLock::UrlParseLock(const IndexedString& url)
0041     : m_url(url)
0042 {
0043     QMutexLocker lock(&parsingUrlsMutex);
0044 
0045     // NOTE: operator[] default-initializes the ptr to zero for us when not available
0046     auto& perUrlData = parsingUrls()[url];
0047     if (!perUrlData) {
0048         // if that was the case, we are the first to parse this url, create an entry
0049         perUrlData = new PerUrlData;
0050     }
0051 
0052     // always increment the refcount
0053     ++perUrlData->ref;
0054 
0055     // now lock the url, but don't do so while blocking the global mutex
0056     auto& mutex = perUrlData->mutex;
0057     lock.unlock();
0058 
0059     mutex.lock();
0060 }
0061 
0062 UrlParseLock::~UrlParseLock()
0063 {
0064     QMutexLocker lock(&parsingUrlsMutex);
0065 
0066     // find the entry for this url
0067     auto& urls = parsingUrls();
0068     auto it = urls.find(m_url);
0069     Q_ASSERT(it != urls.end()); // it must exist
0070     auto& perUrlData = it.value();
0071 
0072     // unlock the per-url mutex
0073     perUrlData->mutex.unlock();
0074 
0075     // decrement the refcount
0076     --perUrlData->ref;
0077     if (perUrlData->ref == 0) {
0078         // and cleanup, if possible
0079         delete perUrlData;
0080         urls.erase(it);
0081     }
0082 }