track.py 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935
  1. # The contents of this file are subject to the BitTorrent Open Source License
  2. # Version 1.1 (the License). You may not copy or use this file, in either
  3. # source code or executable form, except in compliance with the License. You
  4. # may obtain a copy of the License at http://www.bittorrent.com/license/.
  5. #
  6. # Software distributed under the License is distributed on an AS IS basis,
  7. # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
  8. # for the specific language governing rights and limitations under the
  9. # License.
  10. # Written by Bram Cohen and John Hoffman
  11. import sys
  12. import os
  13. import signal
  14. import re
  15. import cPickle
  16. import logging
  17. import datetime
  18. from urlparse import urlparse
  19. from traceback import print_exc
  20. from time import time, gmtime, strftime, localtime
  21. from random import shuffle
  22. from types import StringType, IntType, LongType, ListType, DictType
  23. from binascii import b2a_hex
  24. from cStringIO import StringIO
  25. from BTL.translation import _
  26. from BTL.obsoletepythonsupport import *
  27. from BitTorrent import platform
  28. from BTL import BTFailure
  29. from BTL.platform import decode_from_filesystem, efs2
  30. from BTL.defer import DeferredEvent, ThreadedDeferred
  31. from BTL.yielddefer import wrap_task
  32. from BitTorrent.configfile import parse_configuration_and_args
  33. #from BitTorrent.parseargs import parseargs, printHelp
  34. from BitTorrent.RawServer_twisted import RawServer
  35. from BitTorrent.HTTPHandler import HTTPHandler
  36. from BTL.parsedir import parsedir
  37. from BitTorrent.NatCheck import NatCheck
  38. from BTL.bencode import bencode, bdecode, Bencached
  39. from urllib import quote, unquote
  40. from BTL.exceptions import str_exc
  41. from BitTorrent import version
  42. from BitTorrent.prefs import Preferences
  43. from BitTorrent.defaultargs import get_defaults
  44. from BitTorrent.UI import Size
  45. import socket
  46. import threading
  47. import traceback
  48. NOISY = False
  49. # code duplication because ow.
  50. MAX_INCOMPLETE = 100
  51. if os.name == 'nt':
  52. from BitTorrent.platform import win_version_num
  53. # starting in XP SP2 the incomplete outgoing connection limit was set to 10
  54. if win_version_num >= (2, 5, 1, 2, 0):
  55. MAX_INCOMPLETE = 10
  56. def statefiletemplate(x):
  57. if type(x) != DictType:
  58. raise ValueError
  59. for cname, cinfo in x.iteritems():
  60. if cname == 'peers':
  61. for y in cinfo.itervalues(): # The 'peers' key is a dictionary of SHA hashes (torrent ids)
  62. if type(y) != DictType: # ... for the active torrents, and each is a dictionary
  63. raise ValueError
  64. for peerid, info in y.iteritems(): # ... of client ids interested in that torrent
  65. if (len(peerid) != 20):
  66. raise ValueError
  67. if type(info) != DictType: # ... each of which is also a dictionary
  68. raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
  69. if type(info.get('ip', '')) != StringType:
  70. raise ValueError
  71. port = info.get('port')
  72. if type(port) not in (IntType, LongType) or port < 0:
  73. raise ValueError
  74. left = info.get('left')
  75. if type(left) not in (IntType, LongType) or left < 0:
  76. raise ValueError
  77. elif cname == 'completed':
  78. if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
  79. raise ValueError # ... for keeping track of the total completions per torrent
  80. for y in cinfo.itervalues(): # ... each torrent has an integer value
  81. if type(y) not in (IntType,LongType):
  82. raise ValueError # ... for the number of reported completions for that torrent
  83. elif cname == 'allowed':
  84. if (type(cinfo) != DictType): # a list of info_hashes and included data
  85. raise ValueError
  86. if x.has_key('allowed_dir_files'):
  87. adlist = [z[1] for z in x['allowed_dir_files'].itervalues()]
  88. for y in cinfo.iterkeys(): # and each should have a corresponding key here
  89. if not y in adlist:
  90. raise ValueError
  91. elif cname == 'allowed_dir_files':
  92. if (type(cinfo) != DictType): # a list of files, their attributes and info hashes
  93. raise ValueError
  94. dirkeys = {}
  95. for y in cinfo.itervalues(): # each entry should have a corresponding info_hash
  96. if not y[1]:
  97. continue
  98. if not x['allowed'].has_key(y[1]):
  99. raise ValueError
  100. if dirkeys.has_key(y[1]): # and each should have a unique info_hash
  101. raise ValueError
  102. dirkeys[y[1]] = 1
  103. alas = _("your file may exist elsewhere in the universe\nbut alas, not here\n")
  104. def isotime():
  105. #return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
  106. return datetime.datetime.utcnow().isoformat()
  107. http_via_filter = re.compile(' for ([0-9.]+)\Z')
  108. def _get_forwarded_ip(headers):
  109. if headers.has_key('x_forwarded_for'):
  110. header = headers['x_forwarded_for']
  111. try:
  112. x,y = header.split(',')
  113. except:
  114. return header
  115. if not is_local_ip(x):
  116. return x
  117. return y
  118. if headers.has_key('client_ip'):
  119. return headers['client_ip']
  120. if headers.has_key('via'):
  121. x = http_via_filter.search(headers['via'])
  122. try:
  123. return x.group(1)
  124. except:
  125. pass
  126. if headers.has_key('from'):
  127. return headers['from']
  128. return None
  129. def get_forwarded_ip(headers):
  130. x = _get_forwarded_ip(headers)
  131. if x is None or not is_valid_ipv4(x) or is_local_ip(x):
  132. return None
  133. return x
  134. def compact_peer_info(ip, port):
  135. try:
  136. s = ( ''.join([chr(int(i)) for i in ip.split('.')])
  137. + chr((port & 0xFF00) >> 8) + chr(port & 0xFF) )
  138. if len(s) != 6:
  139. s = ''
  140. except:
  141. s = '' # not a valid IP, must be a domain name
  142. return s
  143. def is_valid_ipv4(ip):
  144. a = ip.split('.')
  145. if len(a) != 4:
  146. return False
  147. try:
  148. for x in a:
  149. chr(int(x))
  150. return True
  151. except:
  152. return False
  153. def is_local_ip(ip):
  154. try:
  155. v = [int(x) for x in ip.split('.')]
  156. if v[0] == 10 or v[0] == 127 or v[:2] in ([192, 168], [169, 254]):
  157. return 1
  158. if v[0] == 172 and v[1] >= 16 and v[1] <= 31:
  159. return 1
  160. except ValueError:
  161. return 0
  162. default_headers = {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}
  163. class Tracker(object):
  164. def __init__(self, config, rawserver):
  165. self.config = config
  166. self.response_size = config['response_size']
  167. self.max_give = config['max_give']
  168. self.dfile = efs2(config['dfile'])
  169. self.natcheck = config['nat_check']
  170. favicon = config['favicon']
  171. self.favicon = None
  172. if favicon:
  173. try:
  174. h = open(favicon,'r')
  175. self.favicon = h.read()
  176. h.close()
  177. except:
  178. errorfunc(logging.WARNING,
  179. _("specified favicon file -- %s -- does not exist.") %
  180. favicon)
  181. self.rawserver = rawserver
  182. self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], [time3, l3, s3]]
  183. self.cached_t = {} # format: infohash: [time, cache]
  184. self.times = {}
  185. self.state = {}
  186. self.seedcount = {}
  187. self.save_pending = False
  188. self.parse_pending = False
  189. self.only_local_override_ip = config['only_local_override_ip']
  190. if self.only_local_override_ip == 2:
  191. self.only_local_override_ip = not config['nat_check']
  192. if os.path.exists(self.dfile):
  193. try:
  194. h = open(self.dfile, 'rb')
  195. ds = h.read()
  196. h.close()
  197. try:
  198. tempstate = cPickle.loads(ds)
  199. except:
  200. tempstate = bdecode(ds) # backwards-compatibility.
  201. if not tempstate.has_key('peers'):
  202. tempstate = {'peers': tempstate}
  203. statefiletemplate(tempstate)
  204. self.state = tempstate
  205. except:
  206. errorfunc(logging.WARNING,
  207. _("statefile %s corrupt; resetting") % self.dfile)
  208. self.downloads = self.state.setdefault('peers', {})
  209. self.completed = self.state.setdefault('completed', {})
  210. self.becache = {} # format: infohash: [[l1, s1], [l2, s2], [l3, s3]]
  211. for infohash, ds in self.downloads.iteritems():
  212. self.seedcount[infohash] = 0
  213. for x, y in ds.iteritems():
  214. if not y.get('nat', -1):
  215. ip = y.get('given_ip')
  216. if not (ip and self.allow_local_override(y['ip'], ip)):
  217. ip = y['ip']
  218. self.natcheckOK(infohash, x, ip, y['port'], y['left'])
  219. if not y['left']:
  220. self.seedcount[infohash] += 1
  221. for infohash in self.downloads:
  222. self.times[infohash] = {}
  223. for peerid in self.downloads[infohash]:
  224. self.times[infohash][peerid] = 0
  225. self.reannounce_interval = config['reannounce_interval']
  226. self.save_dfile_interval = config['save_dfile_interval']
  227. self.show_names = config['show_names']
  228. rawserver.add_task(self.save_dfile_interval, self.save_dfile)
  229. self.prevtime = time()
  230. self.timeout_downloaders_interval = config['timeout_downloaders_interval']
  231. rawserver.add_task(self.timeout_downloaders_interval, self.expire_downloaders)
  232. self.logfile = None
  233. self.log = None
  234. if (config['logfile'] != '') and (config['logfile'] != '-'):
  235. try:
  236. self.logfile = config['logfile']
  237. self.log = open(self.logfile, 'a')
  238. sys.stdout = self.log
  239. print _("# Log Started: "), isotime()
  240. except:
  241. print _("**warning** could not redirect stdout to log file: "), sys.exc_info()[0]
  242. if config['hupmonitor']:
  243. def huphandler(signum, frame, self = self):
  244. try:
  245. self.log.close ()
  246. self.log = open(self.logfile, 'a')
  247. sys.stdout = self.log
  248. print _("# Log reopened: "), isotime()
  249. except:
  250. print _("***warning*** could not reopen logfile")
  251. signal.signal(signal.SIGHUP, huphandler)
  252. self.allow_get = config['allow_get']
  253. if config['allowed_dir'] != '':
  254. self.allowed_dir = config['allowed_dir']
  255. self.parse_dir_interval = config['parse_dir_interval']
  256. self.allowed = self.state.setdefault('allowed', {})
  257. self.allowed_dir_files = self.state.setdefault('allowed_dir_files', {})
  258. self.allowed_dir_blocked = {}
  259. self.parse_allowed()
  260. else:
  261. try:
  262. del self.state['allowed']
  263. except:
  264. pass
  265. try:
  266. del self.state['allowed_dir_files']
  267. except:
  268. pass
  269. self.allowed = None
  270. self.uq_broken = unquote('+') != ' '
  271. self.keep_dead = config['keep_dead']
  272. def allow_local_override(self, ip, given_ip):
  273. return is_valid_ipv4(given_ip) and (
  274. not self.only_local_override_ip or is_local_ip(ip) )
  275. def get_infopage(self):
  276. try:
  277. if not self.config['show_infopage']:
  278. return (404, 'Not Found', default_headers, alas)
  279. red = self.config['infopage_redirect']
  280. if red != '':
  281. return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
  282. '<A HREF="'+red+'">Click Here</A>')
  283. s = StringIO()
  284. s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
  285. '<html><head><title>BitTorrent download info</title>\n')
  286. if self.favicon is not None:
  287. s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
  288. s.write('</head>\n<body>\n' \
  289. '<h3>BitTorrent download info</h3>\n'\
  290. '<ul>\n'
  291. '<li><strong>tracker version:</strong> %s</li>\n' \
  292. '<li><strong>server time:</strong> %s</li>\n' \
  293. '</ul>\n' % (version, isotime()))
  294. if self.allowed is not None:
  295. if self.show_names:
  296. names = [ (value[1].name, infohash)
  297. for infohash, value in self.allowed.iteritems()]
  298. else:
  299. names = [(None, infohash) for infohash in self.allowed]
  300. else:
  301. names = [ (None, infohash) for infohash in self.downloads]
  302. if not names:
  303. s.write('<p>not tracking any files yet...</p>\n')
  304. else:
  305. names.sort()
  306. tn = 0
  307. tc = 0
  308. td = 0
  309. tt = 0 # Total transferred
  310. ts = 0 # Total size
  311. nf = 0 # Number of files displayed
  312. if self.allowed is not None and self.show_names:
  313. s.write('<table summary="files" border="1">\n' \
  314. '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
  315. else:
  316. s.write('<table summary="files">\n' \
  317. '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
  318. for name, infohash in names:
  319. l = self.downloads[infohash]
  320. n = self.completed.get(infohash, 0)
  321. tn = tn + n
  322. c = self.seedcount[infohash]
  323. tc = tc + c
  324. d = len(l) - c
  325. td = td + d
  326. nf = nf + 1
  327. if self.allowed is not None and self.show_names:
  328. if self.allowed.has_key(infohash):
  329. sz = self.allowed[infohash][1].total_bytes # size
  330. ts = ts + sz
  331. szt = sz * n # Transferred for this torrent
  332. tt = tt + szt
  333. if self.allow_get == 1:
  334. linkname = '<a href="/file?info_hash=' + quote(infohash) + '">' + name + '</a>'
  335. else:
  336. linkname = name
  337. s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
  338. % (b2a_hex(infohash), linkname, size_format(sz), c, d, n, size_format(szt)))
  339. else:
  340. s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
  341. % (b2a_hex(infohash), c, d, n))
  342. ttn = 0
  343. for i in self.completed.itervalues():
  344. ttn = ttn + i
  345. if self.allowed is not None and self.show_names:
  346. s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n'
  347. % (nf, size_format(ts), tc, td, tn, ttn, size_format(tt)))
  348. else:
  349. s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n'
  350. % (nf, tc, td, tn, ttn))
  351. s.write('</table>\n' \
  352. '<ul>\n' \
  353. '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
  354. '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
  355. '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
  356. '<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n' \
  357. '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
  358. '</ul>\n')
  359. s.write('</body>\n' \
  360. '</html>\n')
  361. return (200, 'OK',
  362. {'Content-Type': 'text/html; charset=iso-8859-1'},
  363. s.getvalue())
  364. except:
  365. print_exc()
  366. return (500, 'Internal Server Error',
  367. {'Content-Type': 'text/html; charset=iso-8859-1'},
  368. 'Server Error')
  369. def scrapedata(self, infohash, return_name = True):
  370. l = self.downloads[infohash]
  371. n = self.completed.get(infohash, 0)
  372. c = self.seedcount[infohash]
  373. d = len(l) - c
  374. f = {'complete': c, 'incomplete': d, 'downloaded': n}
  375. if return_name and self.show_names and self.allowed is not None:
  376. f['name'] = self.allowed[infohash]['name']
  377. return (f)
  378. def get_scrape(self, paramslist):
  379. fs = {}
  380. if paramslist.has_key('info_hash'):
  381. if self.config['scrape_allowed'] not in ['specific', 'full']:
  382. return (400, 'Not Authorized', default_headers,
  383. bencode({'failure_reason':
  384. "specific scrape function is not available with this tracker."}))
  385. for infohash in paramslist['info_hash']:
  386. if self.allowed is not None and infohash not in self.allowed:
  387. continue
  388. if infohash in self.downloads:
  389. fs[infohash] = self.scrapedata(infohash)
  390. else:
  391. if self.config['scrape_allowed'] != 'full':
  392. return (400, 'Not Authorized', default_headers,
  393. bencode({'failure reason':
  394. "full scrape function is not available with this tracker."}))
  395. #bencode({'failure reason':
  396. #_("full scrape function is not available with this tracker.")}))
  397. if self.allowed is not None:
  398. hashes = self.allowed
  399. else:
  400. hashes = self.downloads
  401. for infohash in hashes:
  402. fs[infohash] = self.scrapedata(infohash)
  403. return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
  404. def get_file(self, infohash):
  405. if not self.allow_get:
  406. return (400, 'Not Authorized',
  407. default_headers,
  408. _("get function is not available with this tracker."))
  409. if not self.allowed.has_key(infohash):
  410. return (404, 'Not Found', default_headers, alas)
  411. fname = self.allowed[infohash]['file']
  412. fpath = self.allowed[infohash]['path']
  413. return (200, 'OK', {'Content-Type': 'application/x-bittorrent',
  414. 'Content-Disposition': 'attachment; filename=' + fname},
  415. open(fpath, 'rb').read())
  416. def check_allowed(self, infohash, paramslist):
  417. if self.allowed is not None:
  418. if not self.allowed.has_key(infohash):
  419. return (200, 'Not Authorized', default_headers,
  420. bencode({'failure reason':
  421. "Requested download is not authorized for use with this tracker."}))
  422. #_("Requested download is not authorized for use with this tracker.")}))
  423. if self.config['allowed_controls']:
  424. if self.allowed[infohash].has_key('failure reason'):
  425. return (200, 'Not Authorized', default_headers,
  426. bencode({'failure reason': self.allowed[infohash]['failure reason']}))
  427. return None
  428. def add_data(self, infohash, event, ip, paramslist):
  429. peers = self.downloads.setdefault(infohash, {})
  430. ts = self.times.setdefault(infohash, {})
  431. self.completed.setdefault(infohash, 0)
  432. self.seedcount.setdefault(infohash, 0)
  433. def params(key, default = None, l = paramslist):
  434. if l.has_key(key):
  435. return l[key][0]
  436. return default
  437. myid = params('peer_id','')
  438. if len(myid) != 20:
  439. raise ValueError, 'id not of length 20'
  440. if event not in ['started', 'completed', 'stopped', 'snooped', None]:
  441. raise ValueError, 'invalid event'
  442. port = int(params('port',''))
  443. if port < 0 or port > 65535:
  444. raise ValueError, 'invalid port'
  445. left = int(params('left',''))
  446. if left < 0:
  447. raise ValueError, 'invalid amount left'
  448. peer = peers.get(myid)
  449. mykey = params('key')
  450. auth = not peer or peer.get('key', -1) == mykey or peer.get('ip') == ip
  451. gip = params('ip')
  452. local_override = gip and self.allow_local_override(ip, gip)
  453. if local_override:
  454. ip1 = gip
  455. else:
  456. ip1 = ip
  457. if not auth and local_override and self.only_local_override_ip:
  458. auth = True
  459. if params('numwant') is not None:
  460. rsize = min(int(params('numwant')), self.max_give)
  461. else:
  462. rsize = self.response_size
  463. if event == 'stopped':
  464. if peer and auth:
  465. self.delete_peer(infohash,myid)
  466. elif not peer:
  467. ts[myid] = time()
  468. peer = {'ip': ip, 'port': port, 'left': left}
  469. if mykey:
  470. peer['key'] = mykey
  471. if gip:
  472. peer['given ip'] = gip
  473. if port:
  474. if not self.natcheck or (local_override and self.only_local_override_ip):
  475. peer['nat'] = 0
  476. self.natcheckOK(infohash,myid,ip1,port,left)
  477. else:
  478. NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver)
  479. else:
  480. peer['nat'] = 2**30
  481. if event == 'completed':
  482. self.completed[infohash] += 1
  483. if not left:
  484. self.seedcount[infohash] += 1
  485. peers[myid] = peer
  486. else:
  487. if not auth:
  488. return rsize # return w/o changing stats
  489. ts[myid] = time()
  490. if not left and peer['left']:
  491. self.completed[infohash] += 1
  492. self.seedcount[infohash] += 1
  493. if not peer.get('nat', -1):
  494. for bc in self.becache[infohash]:
  495. bc[1][myid] = bc[0][myid]
  496. del bc[0][myid]
  497. if peer['left']:
  498. peer['left'] = left
  499. recheck = False
  500. if ip != peer['ip']:
  501. peer['ip'] = ip
  502. recheck = True
  503. if gip != peer.get('given ip'):
  504. if gip:
  505. peer['given ip'] = gip
  506. elif peer.has_key('given ip'):
  507. del peer['given ip']
  508. if local_override:
  509. if self.only_local_override_ip:
  510. self.natcheckOK(infohash,myid,ip1,port,left)
  511. else:
  512. recheck = True
  513. if port and self.natcheck:
  514. if recheck:
  515. if peer.has_key('nat'):
  516. if not peer['nat']:
  517. l = self.becache[infohash]
  518. y = not peer['left']
  519. for x in l:
  520. del x[y][myid]
  521. del peer['nat'] # restart NAT testing
  522. else:
  523. natted = peer.get('nat', -1)
  524. if natted and natted < self.natcheck:
  525. recheck = True
  526. if recheck:
  527. NatCheck(self.connectback_result,infohash,myid,ip1,port,self.rawserver)
  528. return rsize
  529. def peerlist(self, infohash, stopped, is_seed, return_type, rsize):
  530. data = {} # return data
  531. seeds = self.seedcount[infohash]
  532. data['complete'] = seeds
  533. data['incomplete'] = len(self.downloads[infohash]) - seeds
  534. if ( self.allowed is not None and self.config['allowed_controls'] and
  535. self.allowed[infohash].has_key('warning message') ):
  536. data['warning message'] = self.allowed[infohash]['warning message']
  537. data['interval'] = self.reannounce_interval
  538. if stopped or not rsize: # save some bandwidth
  539. data['peers'] = []
  540. return data
  541. bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]])
  542. len_l = len(bc[0][0])
  543. len_s = len(bc[0][1])
  544. if not (len_l+len_s): # caches are empty!
  545. data['peers'] = []
  546. return data
  547. l_get_size = int(float(rsize)*(len_l)/(len_l+len_s))
  548. cache = self.cached.setdefault(infohash,[None,None,None])[return_type]
  549. if cache:
  550. if cache[0] + self.config['min_time_between_cache_refreshes'] < time():
  551. cache = None
  552. else:
  553. if ( (is_seed and len(cache[1]) < rsize)
  554. or len(cache[1]) < l_get_size or not cache[1] ):
  555. cache = None
  556. if not cache:
  557. vv = [[],[],[]]
  558. cache = [ time(),
  559. bc[return_type][0].values()+vv[return_type],
  560. bc[return_type][1].values() ]
  561. shuffle(cache[1])
  562. shuffle(cache[2])
  563. self.cached[infohash][return_type] = cache
  564. for rr in xrange(len(self.cached[infohash])):
  565. if rr != return_type:
  566. try:
  567. self.cached[infohash][rr][1].extend(vv[rr])
  568. except:
  569. pass
  570. if len(cache[1]) < l_get_size:
  571. peerdata = cache[1]
  572. if not is_seed:
  573. peerdata.extend(cache[2])
  574. cache[1] = []
  575. cache[2] = []
  576. else:
  577. if not is_seed:
  578. peerdata = cache[2][l_get_size-rsize:]
  579. del cache[2][l_get_size-rsize:]
  580. rsize -= len(peerdata)
  581. else:
  582. peerdata = []
  583. if rsize:
  584. peerdata.extend(cache[1][-rsize:])
  585. del cache[1][-rsize:]
  586. if return_type == 2:
  587. peerdata = ''.join(peerdata)
  588. data['peers'] = peerdata
  589. return data
  590. def get(self, connection, path, headers):
  591. ip = connection.get_ip()
  592. nip = get_forwarded_ip(headers)
  593. if nip and not self.only_local_override_ip:
  594. ip = nip
  595. paramslist = {}
  596. def params(key, default = None, l = paramslist):
  597. if l.has_key(key):
  598. return l[key][0]
  599. return default
  600. try:
  601. (scheme, netloc, path, pars, query, fragment) = urlparse(path)
  602. if self.uq_broken == 1:
  603. path = path.replace('+',' ')
  604. query = query.replace('+',' ')
  605. path = unquote(path)[1:]
  606. for s in query.split('&'):
  607. if s != '':
  608. i = s.index('=')
  609. kw = unquote(s[:i])
  610. paramslist.setdefault(kw, [])
  611. paramslist[kw] += [unquote(s[i+1:])]
  612. if path == '' or path == 'index.html':
  613. return self.get_infopage()
  614. if path == 'scrape':
  615. return self.get_scrape(paramslist)
  616. if (path == 'file'):
  617. return self.get_file(params('info_hash'))
  618. if path == 'favicon.ico' and self.favicon is not None:
  619. return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
  620. if path != 'announce':
  621. return (404, 'Not Found', default_headers, alas)
  622. # main tracker function
  623. infohash = params('info_hash')
  624. if not infohash:
  625. raise ValueError, 'no info hash'
  626. notallowed = self.check_allowed(infohash, paramslist)
  627. if notallowed:
  628. if NOISY:
  629. self._print_event( "get: NOT ALLOWED: info_hash=%s, %s" %
  630. (infohash.encode('hex'). str(notallowed)) )
  631. return notallowed
  632. event = params('event')
  633. rsize = self.add_data(infohash, event, ip, paramslist)
  634. except ValueError, e:
  635. print e
  636. if NOISY:
  637. self._print_exc( "get: ",e )
  638. return (400, 'Bad Request',
  639. {'Content-Type': 'text/plain'},
  640. 'you sent me garbage - ' + str_exc(e))
  641. if params('compact'):
  642. return_type = 2
  643. elif params('no_peer_id'):
  644. return_type = 1
  645. else:
  646. return_type = 0
  647. data = self.peerlist(infohash, event=='stopped', not params('left'),
  648. return_type, rsize)
  649. if paramslist.has_key('scrape'):
  650. data['scrape'] = self.scrapedata(infohash, False)
  651. return (200, 'OK', default_headers, bencode(data))
  652. def natcheckOK(self, infohash, peerid, ip, port, not_seed):
  653. bc = self.becache.setdefault(infohash,[[{}, {}], [{}, {}], [{}, {}]])
  654. bc[0][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port,
  655. 'peer id': peerid}))
  656. bc[1][not not_seed][peerid] = Bencached(bencode({'ip': ip, 'port': port}))
  657. bc[2][not not_seed][peerid] = compact_peer_info(ip, port)
  658. def natchecklog(self, peerid, ip, port, result):
  659. print isotime(), '"!natcheck-%s:%i" %s %i 0 - -' % (
  660. ip, port, quote(peerid), result)
  661. def connectback_result(self, result, downloadid, peerid, ip, port):
  662. record = self.downloads.get(downloadid, {}).get(peerid)
  663. if ( record is None
  664. or (record['ip'] != ip and record.get('given ip') != ip)
  665. or record['port'] != port ):
  666. if self.config['log_nat_checks']:
  667. self.natchecklog(peerid, ip, port, 404)
  668. return
  669. if self.config['log_nat_checks']:
  670. if result:
  671. x = 200
  672. else:
  673. x = 503
  674. self.natchecklog(peerid, ip, port, x)
  675. if not record.has_key('nat'):
  676. record['nat'] = int(not result)
  677. if result:
  678. self.natcheckOK(downloadid,peerid,ip,port,record['left'])
  679. elif result and record['nat']:
  680. record['nat'] = 0
  681. self.natcheckOK(downloadid,peerid,ip,port,record['left'])
  682. elif not result:
  683. record['nat'] += 1
  684. def save_dfile(self):
  685. if self.save_pending:
  686. return
  687. self.save_pending = True
  688. # if this is taking all the time, threading it won't help anyway because
  689. # of the GIL
  690. #state = bencode(self.state)
  691. state = cPickle.dumps(self.state) # pickle handles Unicode.
  692. df = ThreadedDeferred(wrap_task(self.rawserver.external_add_task),
  693. self._save_dfile, state)
  694. def cb(r):
  695. self.save_pending = False
  696. if NOISY:
  697. self._print_event( "save_dfile: Completed" )
  698. def eb(etup):
  699. self.save_pending = False
  700. self._print_exc( "save_dfile: ", etup )
  701. df.addCallbacks(cb, eb)
  702. def _save_dfile(self, state):
  703. exc_info = None
  704. try:
  705. h = open(self.dfile, 'wb')
  706. h.write(state)
  707. h.close()
  708. except:
  709. exc_info = sys.exc_info()
  710. self.rawserver.external_add_task(self.save_dfile_interval, self.save_dfile)
  711. if exc_info:
  712. raise exc_info[0], exc_info[1], exc_info[2]
  713. def parse_allowed(self):
  714. if self.parse_pending:
  715. return
  716. self.parse_pending = True
  717. df = ThreadedDeferred(wrap_task(self.rawserver.external_add_task),
  718. self._parse_allowed, daemon=True)
  719. def eb(etup):
  720. self.parse_pending = False
  721. self._print_exc("parse_dir: ", etup)
  722. df.addCallbacks(self._parse_allowed_finished, eb)
  723. def _parse_allowed(self):
  724. def errfunc(message, exc_info=None):
  725. # logging broken .torrent files would be useful but could confuse
  726. # programs parsing log files
  727. m = "parse_dir: %s" % message
  728. if exc_info:
  729. self._print_exc(m, exc_info)
  730. else:
  731. self._print_event(m)
  732. pass
  733. r = parsedir(self.allowed_dir, self.allowed, self.allowed_dir_files,
  734. self.allowed_dir_blocked, errfunc, include_metainfo = False)
  735. # register the call to parse a dir.
  736. self.rawserver.external_add_task(self.parse_dir_interval,
  737. self.parse_allowed)
  738. return r
  739. def _parse_allowed_finished(self, r):
  740. self.parse_pending = False
  741. ( self.allowed, self.allowed_dir_files, self.allowed_dir_blocked,
  742. added, removed ) = r
  743. if NOISY:
  744. self._print_event("_parse_allowed_finished: removals: %s" %
  745. str(removed))
  746. for infohash in added:
  747. self.downloads.setdefault(infohash, {})
  748. self.completed.setdefault(infohash, 0)
  749. self.seedcount.setdefault(infohash, 0)
  750. self.state['allowed'] = self.allowed
  751. self.state['allowed_dir_files'] = self.allowed_dir_files
  752. def delete_peer(self, infohash, peerid):
  753. dls = self.downloads[infohash]
  754. peer = dls[peerid]
  755. if not peer['left']:
  756. self.seedcount[infohash] -= 1
  757. if not peer.get('nat', -1):
  758. l = self.becache[infohash]
  759. y = not peer['left']
  760. for x in l:
  761. del x[y][peerid]
  762. del self.times[infohash][peerid]
  763. del dls[peerid]
  764. def expire_downloaders(self):
  765. for infohash, peertimes in self.times.iteritems():
  766. items = peertimes.items()
  767. for myid, t in items:
  768. if t < self.prevtime:
  769. self.delete_peer(infohash, myid)
  770. self.prevtime = time()
  771. if self.keep_dead != 1:
  772. items = self.downloads.items()
  773. for key, peers in items:
  774. if len(peers) == 0 and (self.allowed is None or
  775. key not in self.allowed):
  776. del self.times[key]
  777. del self.downloads[key]
  778. del self.seedcount[key]
  779. self.rawserver.add_task(self.timeout_downloaders_interval,
  780. self.expire_downloaders)
  781. def _print_event(self, message):
  782. print datetime.datetime.utcnow().isoformat(), message
  783. def _print_exc(self, note, etup):
  784. print datetime.datetime.utcnow().isoformat(), note, ':'
  785. traceback.print_exception(*etup)
  786. def track(args):
  787. assert type(args) == list and \
  788. len([x for x in args if type(x)==str])==len(args)
  789. config = {}
  790. defaults = get_defaults('bittorrent-tracker') # hard-coded defaults.
  791. try:
  792. config, files = parse_configuration_and_args(defaults,
  793. 'bittorrent-tracker', args, 0, 0 )
  794. except ValueError, e:
  795. print _("error: ") + str_exc(e)
  796. print _("run with -? for parameter explanations")
  797. return
  798. except BTFailure, e:
  799. print _("error: ") + str_exc(e)
  800. print _("run with -? for parameter explanations")
  801. return
  802. if config['dfile']=="":
  803. config['dfile'] = decode_from_filesystem(
  804. os.path.join(platform.get_temp_dir(), efs2(u"dfile") +
  805. str(os.getpid())))
  806. config = Preferences().initWithDict(config)
  807. ef = lambda e: errorfunc(logging.WARNING, e)
  808. platform.write_pid_file(config['pid'], ef)
  809. t = None
  810. try:
  811. r = RawServer(config)
  812. t = Tracker(config, r)
  813. try:
  814. #DEBUG
  815. print "track: create_serversocket, port=", config['port']
  816. #END
  817. s = r.create_serversocket(config['port'], config['bind'])
  818. handler = HTTPHandler(t.get, config['min_time_between_log_flushes'])
  819. r.start_listening(s, handler)
  820. except socket.error, e:
  821. print ("Unable to open port %d. Use a different port?" %
  822. config['port'])
  823. return
  824. r.listen_forever()
  825. finally:
  826. if t: t.save_dfile()
  827. print _("# Shutting down: ") + isotime()
  828. def size_format(s):
  829. return str(Size(s))
  830. def errorfunc( level, text ):
  831. print "%s: %s" % (logging.getLevelName(level), text)