File indexing completed on 2025-01-05 04:37:26
0001 /* 0002 SPDX-FileCopyrightText: 2005 Joris Guisson <joris.guisson@gmail.com> 0003 0004 SPDX-License-Identifier: GPL-2.0-or-later 0005 */ 0006 #include "httptracker.h" 0007 #include <config-ktorrent.h> 0008 0009 #include <QHostAddress> 0010 #include <QUrlQuery> 0011 0012 #include <KIO/StoredTransferJob> 0013 #include <KLocalizedString> 0014 0015 #include "kioannouncejob.h" 0016 #include "version.h" 0017 #include <bcodec/bdecoder.h> 0018 #include <bcodec/bnode.h> 0019 #include <interfaces/exitoperation.h> 0020 #include <interfaces/torrentinterface.h> 0021 #include <peer/peermanager.h> 0022 #include <torrent/globals.h> 0023 #include <torrent/server.h> 0024 #include <util/error.h> 0025 #include <util/functions.h> 0026 #include <util/log.h> 0027 #include <util/waitjob.h> 0028 0029 namespace bt 0030 { 0031 bool HTTPTracker::proxy_on = false; 0032 QString HTTPTracker::proxy = QString(); 0033 Uint16 HTTPTracker::proxy_port = 8080; 0034 0035 HTTPTracker::HTTPTracker(const QUrl &url, TrackerDataSource *tds, const PeerID &id, int tier) 0036 : Tracker(url, tds, id, tier) 0037 , active_job(nullptr) 0038 , failures(0) 0039 , supports_partial_seed_extension(false) 0040 { 0041 interval = 5 * 60; // default interval 5 minutes 0042 connect(&timer, &QTimer::timeout, this, &HTTPTracker::onTimeout); 0043 } 0044 0045 HTTPTracker::~HTTPTracker() 0046 { 0047 } 0048 0049 void HTTPTracker::start() 0050 { 0051 event = QStringLiteral("started"); 0052 resetTrackerStats(); 0053 doRequest(); 0054 } 0055 0056 void HTTPTracker::stop(WaitJob *wjob) 0057 { 0058 if (!started) { 0059 announce_queue.clear(); 0060 reannounce_timer.stop(); 0061 if (active_job) { 0062 active_job->kill(); 0063 active_job = nullptr; 0064 status = TRACKER_IDLE; 0065 requestOK(); 0066 } 0067 } else { 0068 reannounce_timer.stop(); 0069 event = QStringLiteral("stopped"); 0070 doRequest(wjob); 0071 started = false; 0072 } 0073 } 0074 0075 void HTTPTracker::completed() 0076 { 0077 event = QStringLiteral("completed"); 0078 doRequest(); 0079 event = QString(); 0080 } 0081 0082 void HTTPTracker::manualUpdate() 0083 { 0084 if (!started) 0085 start(); 0086 else 0087 doRequest(); 0088 } 0089 0090 void HTTPTracker::scrape() 0091 { 0092 if (!url.isValid()) { 0093 Out(SYS_TRK | LOG_NOTICE) << "Invalid tracker url, canceling scrape" << endl; 0094 return; 0095 } 0096 0097 if (!url.fileName().startsWith(QLatin1String("announce"))) { 0098 Out(SYS_TRK | LOG_NOTICE) << "Tracker " << url << " does not support scraping" << endl; 0099 return; 0100 } 0101 0102 QUrl scrape_url = url; 0103 scrape_url.setPath(url.path(QUrl::FullyEncoded).replace(QStringLiteral("announce"), QStringLiteral("scrape")), QUrl::StrictMode); 0104 0105 QString epq = scrape_url.query(QUrl::FullyEncoded); 0106 const SHA1Hash &info_hash = tds->infoHash(); 0107 if (epq.length()) 0108 epq += '&'; 0109 epq += QLatin1String("info_hash=") + info_hash.toURLString(); 0110 scrape_url.setQuery(epq, QUrl::StrictMode); 0111 0112 Out(SYS_TRK | LOG_NOTICE) << "Doing scrape request to url : " << scrape_url << endl; 0113 KIO::MetaData md; 0114 setupMetaData(md); 0115 0116 KIO::StoredTransferJob *j = KIO::storedGet(scrape_url, KIO::NoReload, KIO::HideProgressInfo); 0117 // set the meta data 0118 j->setMetaData(md); 0119 0120 connect(j, &KIO::StoredTransferJob::result, this, &HTTPTracker::onScrapeResult); 0121 } 0122 0123 void HTTPTracker::onScrapeResult(KJob *j) 0124 { 0125 if (j->error()) { 0126 Out(SYS_TRK | LOG_IMPORTANT) << "Scrape failed : " << j->errorString() << endl; 0127 return; 0128 } 0129 0130 KIO::StoredTransferJob *st = (KIO::StoredTransferJob *)j; 0131 BDecoder dec(st->data(), false, 0); 0132 QScopedPointer<BNode> n; 0133 0134 try { 0135 n.reset(dec.decode()); 0136 } catch (bt::Error &err) { 0137 Out(SYS_TRK | LOG_IMPORTANT) << "Invalid scrape data " << err.toString() << endl; 0138 return; 0139 } 0140 0141 if (n && n->getType() == BNode::DICT) { 0142 BDictNode *d = (BDictNode *)n.data(); 0143 d = d->getDict(QByteArrayLiteral("files")); 0144 if (d) { 0145 d = d->getDict(tds->infoHash().toByteArray()); 0146 if (d) { 0147 try { 0148 seeders = d->getInt(QByteArrayLiteral("complete")); 0149 leechers = d->getInt(QByteArrayLiteral("incomplete")); 0150 total_downloaded = d->getInt(QByteArrayLiteral("downloaded")); 0151 supports_partial_seed_extension = d->getValue(QByteArrayLiteral("downloaders")) != nullptr; 0152 Out(SYS_TRK | LOG_DEBUG) << "Scrape : leechers = " << leechers << ", seeders = " << seeders << ", downloaded = " << total_downloaded 0153 << endl; 0154 } catch (...) { 0155 } 0156 scrapeDone(); 0157 if (status == bt::TRACKER_ERROR) { 0158 status = bt::TRACKER_OK; 0159 failures = 0; 0160 } 0161 } 0162 } 0163 } 0164 } 0165 0166 void HTTPTracker::doRequest(WaitJob *wjob) 0167 { 0168 if (!url.isValid()) { 0169 requestPending(); 0170 QTimer::singleShot(500, this, &HTTPTracker::emitInvalidURLFailure); 0171 return; 0172 } 0173 0174 Uint16 port = ServerInterface::getPort(); 0175 0176 QUrlQuery query(url); 0177 query.addQueryItem(QStringLiteral("peer_id"), peer_id.toString()); 0178 query.addQueryItem(QStringLiteral("port"), QString::number(port)); 0179 query.addQueryItem(QStringLiteral("uploaded"), QString::number(bytesUploaded())); 0180 query.addQueryItem(QStringLiteral("downloaded"), QString::number(bytesDownloaded())); 0181 0182 if (event == QLatin1String("completed")) 0183 query.addQueryItem(QStringLiteral("left"), QStringLiteral("0")); // need to send 0 when we are completed 0184 else 0185 query.addQueryItem(QStringLiteral("left"), QString::number(tds->bytesLeft())); 0186 0187 query.addQueryItem(QStringLiteral("compact"), QStringLiteral("1")); 0188 if (event != QLatin1String("stopped")) 0189 query.addQueryItem(QStringLiteral("numwant"), QStringLiteral("200")); 0190 else 0191 query.addQueryItem(QStringLiteral("numwant"), QStringLiteral("0")); 0192 0193 query.addQueryItem(QStringLiteral("key"), QString::number(key)); 0194 QString cip = Tracker::getCustomIP(); 0195 if (cip.isNull()) 0196 cip = CurrentIPv6Address(); 0197 0198 if (!cip.isEmpty()) 0199 query.addQueryItem(QStringLiteral("ip"), cip); 0200 0201 if (event.isEmpty() && supports_partial_seed_extension && tds->isPartialSeed()) 0202 event = QStringLiteral("paused"); 0203 0204 if (!event.isEmpty()) 0205 query.addQueryItem(QStringLiteral("event"), event); 0206 0207 const SHA1Hash &info_hash = tds->infoHash(); 0208 QString epq = query.toString(QUrl::FullyEncoded) + QLatin1String("&info_hash=") + info_hash.toURLString(); 0209 0210 QUrl u = url; 0211 u.setQuery(epq, QUrl::StrictMode); 0212 0213 if (active_job) { 0214 announce_queue.append(u); 0215 Out(SYS_TRK | LOG_NOTICE) << "Announce ongoing, queueing announce" << endl; 0216 } else { 0217 doAnnounce(u); 0218 // if there is a wait job, add this job to the waitjob 0219 if (wjob) 0220 wjob->addExitOperation(new ExitJobOperation(active_job)); 0221 } 0222 } 0223 0224 bool HTTPTracker::updateData(const QByteArray &data) 0225 { 0226 // #define DEBUG_PRINT_RESPONSE 0227 #ifdef DEBUG_PRINT_RESPONSE 0228 Out(SYS_TRK | LOG_DEBUG) << "Data : " << endl; 0229 Out(SYS_TRK | LOG_DEBUG) << QString(data) << endl; 0230 #endif 0231 // search for dictionary, there might be random garbage infront of the data 0232 int i = 0; 0233 while (i < data.size()) { 0234 if (data[i] == 'd') 0235 break; 0236 i++; 0237 } 0238 0239 if (i == data.size()) { 0240 failures++; 0241 failed(i18n("Invalid response from tracker")); 0242 return false; 0243 } 0244 0245 BDecoder dec(data, false, i); 0246 BNode *n = nullptr; 0247 try { 0248 n = dec.decode(); 0249 } catch (...) { 0250 failures++; 0251 failed(i18n("Invalid data from tracker")); 0252 return false; 0253 } 0254 0255 if (!n || n->getType() != BNode::DICT) { 0256 failures++; 0257 failed(i18n("Invalid response from tracker")); 0258 delete n; 0259 return false; 0260 } 0261 0262 BDictNode *dict = (BDictNode *)n; 0263 if (dict->getData(QByteArrayLiteral("failure reason"))) { 0264 BValueNode *vn = dict->getValue(QByteArrayLiteral("failure reason")); 0265 error = vn->data().toString(); 0266 failures++; 0267 failed(error); 0268 delete n; 0269 return false; 0270 } 0271 0272 if (dict->getData(QByteArrayLiteral("warning message"))) { 0273 BValueNode *vn = dict->getValue(QByteArrayLiteral("warning message")); 0274 warning = vn->data().toString(); 0275 } else 0276 warning.clear(); 0277 0278 BValueNode *vn = dict->getValue(QByteArrayLiteral("interval")); 0279 0280 // if no interval is specified, use 5 minutes 0281 if (vn) 0282 interval = vn->data().toInt(); 0283 else 0284 interval = 5 * 60; 0285 0286 vn = dict->getValue(QByteArrayLiteral("incomplete")); 0287 if (vn) 0288 leechers = vn->data().toInt(); 0289 0290 vn = dict->getValue(QByteArrayLiteral("complete")); 0291 if (vn) 0292 seeders = vn->data().toInt(); 0293 0294 BListNode *ln = dict->getList(QByteArrayLiteral("peers")); 0295 if (!ln) { 0296 // no list, it might however be a compact response 0297 vn = dict->getValue(QByteArrayLiteral("peers")); 0298 if (vn && vn->data().getType() == Value::STRING) { 0299 QByteArray arr = vn->data().toByteArray(); 0300 for (int i = 0; i < arr.size(); i += 6) { 0301 Uint8 buf[6]; 0302 for (int j = 0; j < 6; j++) 0303 buf[j] = arr[i + j]; 0304 0305 Uint32 ip = ReadUint32(buf, 0); 0306 addPeer(net::Address(ip, ReadUint16(buf, 4)), false); 0307 } 0308 } 0309 } else { 0310 for (Uint32 i = 0; i < ln->getNumChildren(); i++) { 0311 BDictNode *dict = dynamic_cast<BDictNode *>(ln->getChild(i)); 0312 0313 if (!dict) 0314 continue; 0315 0316 BValueNode *ip_node = dict->getValue(QByteArrayLiteral("ip")); 0317 BValueNode *port_node = dict->getValue(QByteArrayLiteral("port")); 0318 0319 if (!ip_node || !port_node) 0320 continue; 0321 0322 net::Address addr(ip_node->data().toString(), port_node->data().toInt()); 0323 addPeer(addr, false); 0324 } 0325 } 0326 0327 // Check for IPv6 compact peers 0328 vn = dict->getValue(QByteArrayLiteral("peers6")); 0329 if (vn && vn->data().getType() == Value::STRING) { 0330 QByteArray arr = vn->data().toByteArray(); 0331 for (int i = 0; i < arr.size(); i += 18) { 0332 Q_IPV6ADDR ip; 0333 memcpy(ip.c, arr.data() + i, 16); 0334 quint16 port = ReadUint16((const Uint8 *)arr.data() + i, 16); 0335 0336 addPeer(net::Address(ip, port), false); 0337 } 0338 } 0339 0340 delete n; 0341 return true; 0342 } 0343 0344 void HTTPTracker::onKIOAnnounceResult(KJob *j) 0345 { 0346 KIOAnnounceJob *st = (KIOAnnounceJob *)j; 0347 onAnnounceResult(st->announceUrl(), st->replyData(), j); 0348 } 0349 0350 void HTTPTracker::onAnnounceResult(const QUrl &url, const QByteArray &data, KJob *j) 0351 { 0352 timer.stop(); 0353 active_job = nullptr; 0354 KIOAnnounceJob *st = (KIOAnnounceJob *)j; 0355 if (st->IsErrorPage() || (j->error() && data.size() == 0)) { 0356 QString err = error; 0357 error.clear(); 0358 if (err.isEmpty()) 0359 err = j->errorString(); 0360 0361 Out(SYS_TRK | LOG_IMPORTANT) << "Error : " << err << endl; 0362 0363 if (st->IsErrorPage()) { 0364 Out(SYS_TRK | LOG_IMPORTANT) << "HTTP Error page : " << QString::fromStdString(st->replyData().toStdString()) << endl; 0365 } 0366 0367 if (QUrlQuery(url).queryItemValue(QStringLiteral("event")) != QLatin1String("stopped")) { 0368 failures++; 0369 failed(err); 0370 } else { 0371 status = TRACKER_IDLE; 0372 stopDone(); 0373 } 0374 } else { 0375 if (QUrlQuery(url).queryItemValue(QStringLiteral("event")) != QLatin1String("stopped")) { 0376 try { 0377 if (updateData(data)) { 0378 failures = 0; 0379 peersReady(this); 0380 request_time = QDateTime::currentDateTime(); 0381 status = TRACKER_OK; 0382 if (QUrlQuery(url).queryItemValue(QStringLiteral("event")) == QLatin1String("started")) 0383 started = true; 0384 if (started) 0385 reannounce_timer.start(interval * 1000); 0386 requestOK(); 0387 } 0388 } catch (bt::Error &err) { 0389 failures++; 0390 failed(i18n("Invalid response from tracker")); 0391 } 0392 event = QString(); 0393 } else { 0394 status = TRACKER_IDLE; 0395 failures = 0; 0396 stopDone(); 0397 } 0398 } 0399 doAnnounceQueue(); 0400 } 0401 0402 void HTTPTracker::emitInvalidURLFailure() 0403 { 0404 failures++; 0405 failed(i18n("Invalid tracker URL")); 0406 } 0407 0408 void HTTPTracker::setupMetaData(KIO::MetaData &md) 0409 { 0410 md["UserAgent"] = bt::GetVersionString(); 0411 md["SendLanguageSettings"] = "false"; 0412 md["cookies"] = "none"; 0413 // md["accept"] = "text/plain"; 0414 md["accept"] = "text/html, image/gif, image/jpeg, *; q=.2, */*; q=.2"; 0415 if (proxy_on) { 0416 QString p = QString("%1:%2").arg(proxy).arg(proxy_port); 0417 if (!p.startsWith(QLatin1String("http://"))) 0418 p = "http://" + p; 0419 // set the proxy if the doNotUseKDEProxy ix enabled (URL must be valid to) 0420 QUrl url(p); 0421 if (url.isValid() && proxy.trimmed().length() > 0) { 0422 md["UseProxy"] = p; 0423 md["ProxyUrls"] = p; 0424 } else { 0425 md["UseProxy"] = QString(); 0426 md["ProxyUrls"] = QString(); 0427 } 0428 0429 Out(SYS_TRK | LOG_DEBUG) << "Using proxy : " << md["UseProxy"] << endl; 0430 } 0431 } 0432 0433 void HTTPTracker::doAnnounceQueue() 0434 { 0435 if (announce_queue.empty()) 0436 return; 0437 0438 QUrl u = announce_queue.front(); 0439 announce_queue.pop_front(); 0440 doAnnounce(u); 0441 } 0442 0443 void HTTPTracker::doAnnounce(const QUrl &u) 0444 { 0445 Out(SYS_TRK | LOG_NOTICE) << "Doing tracker request to url (via KIO): " << u.toString() << endl; 0446 0447 KIO::MetaData md; 0448 setupMetaData(md); 0449 KIOAnnounceJob *j = new KIOAnnounceJob(u, md); 0450 connect(j, &KIOAnnounceJob::result, this, &HTTPTracker::onKIOAnnounceResult); 0451 active_job = j; 0452 0453 time_out = false; 0454 timer.start(60 * 1000); 0455 status = TRACKER_ANNOUNCING; 0456 requestPending(); 0457 } 0458 0459 void HTTPTracker::onTimeout() 0460 { 0461 if (active_job) { 0462 time_out = true; 0463 error = i18n("Timeout contacting tracker %1", url.toString()); 0464 active_job->kill(KJob::EmitResult); 0465 } 0466 } 0467 0468 void HTTPTracker::setProxy(const QString &p, const bt::Uint16 port) 0469 { 0470 proxy = p; 0471 proxy_port = port; 0472 } 0473 0474 void HTTPTracker::setProxyEnabled(bool on) 0475 { 0476 proxy_on = on; 0477 } 0478 0479 void HTTPTracker::setUseQHttp(bool on) 0480 { 0481 Q_UNUSED(on) 0482 } 0483 0484 } 0485 0486 #include "moc_httptracker.cpp"