track.py 48 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137
  1. # Written by Bram Cohen
  2. # see LICENSE.txt for license information
  3. from BitTornado.parseargs import parseargs, formatDefinitions
  4. from BitTornado.RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
  5. from BitTornado.HTTPHandler import HTTPHandler, months, weekdays
  6. from BitTornado.parsedir import parsedir
  7. from NatCheck import NatCheck, CHECK_PEER_ID_ENCRYPTED
  8. from BitTornado.BTcrypto import CRYPTO_OK
  9. from T2T import T2TList
  10. from BitTornado.subnetparse import IP_List, ipv6_to_ipv4, to_ipv4, is_valid_ip, is_ipv4
  11. from BitTornado.iprangeparse import IP_List as IP_Range_List
  12. from BitTornado.torrentlistparse import parsetorrentlist
  13. from threading import Event, Thread
  14. from BitTornado.bencode import bencode, bdecode, Bencached
  15. from BitTornado.zurllib import urlopen, quote, unquote
  16. from Filter import Filter
  17. from urlparse import urlparse
  18. from os import rename, getpid
  19. from os.path import exists, isfile
  20. from cStringIO import StringIO
  21. from traceback import print_exc
  22. from time import time, gmtime, strftime, localtime
  23. from BitTornado.clock import clock
  24. from random import shuffle, seed, randrange
  25. from sha import sha
  26. from types import StringType, IntType, LongType, ListType, DictType
  27. from binascii import b2a_hex, a2b_hex, a2b_base64
  28. from string import lower
  29. import sys, os
  30. import signal
  31. import re
  32. import BitTornado.__init__
  33. from BitTornado.__init__ import version, createPeerID
  34. try:
  35. True
  36. except:
  37. True = 1
  38. False = 0
  39. bool = lambda x: not not x
  40. defaults = [
  41. ('port', 80, "Port to listen on."),
  42. ('dfile', None, 'file to store recent downloader info in'),
  43. ('bind', '', 'comma-separated list of ips/hostnames to bind to locally'),
  44. # ('ipv6_enabled', autodetect_ipv6(),
  45. ('ipv6_enabled', 0,
  46. 'allow the client to connect to peers via IPv6'),
  47. ('ipv6_binds_v4', autodetect_socket_style(),
  48. 'set if an IPv6 server socket will also field IPv4 connections'),
  49. ('socket_timeout', 15, 'timeout for closing connections'),
  50. ('save_dfile_interval', 5 * 60, 'seconds between saving dfile'),
  51. ('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'),
  52. ('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'),
  53. ('response_size', 50, 'number of peers to send in an info message'),
  54. ('timeout_check_interval', 5,
  55. 'time to wait between checking if any connections have timed out'),
  56. ('nat_check', 3,
  57. "how many times to check if a downloader is behind a NAT (0 = don't check)"),
  58. ('log_nat_checks', 0,
  59. "whether to add entries to the log for nat-check results"),
  60. ('min_time_between_log_flushes', 3.0,
  61. 'minimum time it must have been since the last flush to do another one'),
  62. ('min_time_between_cache_refreshes', 600.0,
  63. 'minimum time in seconds before a cache is considered stale and is flushed'),
  64. ('allowed_dir', '', 'only allow downloads for .torrents in this dir'),
  65. ('allowed_list', '', 'only allow downloads for hashes in this list (hex format, one per line)'),
  66. ('allowed_controls', 0, 'allow special keys in torrents in the allowed_dir to affect tracker access'),
  67. ('multitracker_enabled', 0, 'whether to enable multitracker operation'),
  68. ('multitracker_allowed', 'autodetect', 'whether to allow incoming tracker announces (can be none, autodetect or all)'),
  69. ('multitracker_reannounce_interval', 2 * 60, 'seconds between outgoing tracker announces'),
  70. ('multitracker_maxpeers', 20, 'number of peers to get in a tracker announce'),
  71. ('aggregate_forward', '', 'format: <url>[,<password>] - if set, forwards all non-multitracker to this url with this optional password'),
  72. ('aggregator', '0', 'whether to act as a data aggregator rather than a tracker. If enabled, may be 1, or <password>; ' +
  73. 'if password is set, then an incoming password is required for access'),
  74. ('hupmonitor', 0, 'whether to reopen the log file upon receipt of HUP signal'),
  75. ('http_timeout', 60,
  76. 'number of seconds to wait before assuming that an http connection has timed out'),
  77. ('parse_dir_interval', 60, 'seconds between reloading of allowed_dir or allowed_file ' +
  78. 'and allowed_ips and banned_ips lists'),
  79. ('show_infopage', 1, "whether to display an info page when the tracker's root dir is loaded"),
  80. ('infopage_redirect', '', 'a URL to redirect the info page to'),
  81. ('show_names', 1, 'whether to display names from allowed dir'),
  82. ('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'),
  83. ('allowed_ips', '', 'only allow connections from IPs specified in the given file; '+
  84. 'file contains subnet data in the format: aa.bb.cc.dd/len'),
  85. ('banned_ips', '', "don't allow connections from IPs specified in the given file; "+
  86. 'file contains IP range data in the format: xxx:xxx:ip1-ip2'),
  87. ('only_local_override_ip', 2, "ignore the ip GET parameter from machines which aren't on local network IPs " +
  88. "(0 = never, 1 = always, 2 = ignore if NAT checking is not enabled)"),
  89. ('logfile', '', 'file to write the tracker logs, use - for stdout (default)'),
  90. ('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'),
  91. ('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'),
  92. ('scrape_allowed', 'full', 'scrape access allowed (can be none, specific or full)'),
  93. ('dedicated_seed_id', '', 'allows tracker to monitor dedicated seed(s) and flag torrents as seeded'),
  94. ('compact_reqd', 1, "only allow peers that accept a compact response"),
  95. ]
  96. def statefiletemplate(x):
  97. if type(x) != DictType:
  98. raise ValueError
  99. for cname, cinfo in x.items():
  100. if cname == 'peers':
  101. for y in cinfo.values(): # The 'peers' key is a dictionary of SHA hashes (torrent ids)
  102. if type(y) != DictType: # ... for the active torrents, and each is a dictionary
  103. raise ValueError
  104. for id, info in y.items(): # ... of client ids interested in that torrent
  105. if (len(id) != 20):
  106. raise ValueError
  107. if type(info) != DictType: # ... each of which is also a dictionary
  108. raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
  109. if type(info.get('ip', '')) != StringType:
  110. raise ValueError
  111. port = info.get('port')
  112. if type(port) not in (IntType,LongType) or port < 0:
  113. raise ValueError
  114. left = info.get('left')
  115. if type(left) not in (IntType,LongType) or left < 0:
  116. raise ValueError
  117. if type(info.get('supportcrypto')) not in (IntType,LongType):
  118. raise ValueError
  119. if type(info.get('requirecrypto')) not in (IntType,LongType):
  120. raise ValueError
  121. elif cname == 'completed':
  122. if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
  123. raise ValueError # ... for keeping track of the total completions per torrent
  124. for y in cinfo.values(): # ... each torrent has an integer value
  125. if type(y) not in (IntType,LongType):
  126. raise ValueError # ... for the number of reported completions for that torrent
  127. elif cname == 'allowed':
  128. if (type(cinfo) != DictType): # a list of info_hashes and included data
  129. raise ValueError
  130. if x.has_key('allowed_dir_files'):
  131. adlist = [z[1] for z in x['allowed_dir_files'].values()]
  132. for y in cinfo.keys(): # and each should have a corresponding key here
  133. if not y in adlist:
  134. raise ValueError
  135. elif cname == 'allowed_dir_files':
  136. if (type(cinfo) != DictType): # a list of files, their attributes and info hashes
  137. raise ValueError
  138. dirkeys = {}
  139. for y in cinfo.values(): # each entry should have a corresponding info_hash
  140. if not y[1]:
  141. continue
  142. if not x['allowed'].has_key(y[1]):
  143. raise ValueError
  144. if dirkeys.has_key(y[1]): # and each should have a unique info_hash
  145. raise ValueError
  146. dirkeys[y[1]] = 1
  147. alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
  148. local_IPs = IP_List()
  149. local_IPs.set_intranet_addresses()
  150. def isotime(secs = None):
  151. if secs == None:
  152. secs = time()
  153. return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
  154. http_via_filter = re.compile(' for ([0-9.]+)\Z')
  155. def _get_forwarded_ip(headers):
  156. header = headers.get('x-forwarded-for')
  157. if header:
  158. try:
  159. x,y = header.split(',')
  160. except:
  161. return header
  162. if is_valid_ip(x) and not local_IPs.includes(x):
  163. return x
  164. return y
  165. header = headers.get('client-ip')
  166. if header:
  167. return header
  168. header = headers.get('via')
  169. if header:
  170. x = http_via_filter.search(header)
  171. try:
  172. return x.group(1)
  173. except:
  174. pass
  175. header = headers.get('from')
  176. #if header:
  177. # return header
  178. #return None
  179. return header
  180. def get_forwarded_ip(headers):
  181. x = _get_forwarded_ip(headers)
  182. if not is_valid_ip(x) or local_IPs.includes(x):
  183. return None
  184. return x
  185. def compact_peer_info(ip, port):
  186. try:
  187. s = ( ''.join([chr(int(i)) for i in ip.split('.')])
  188. + chr((port & 0xFF00) >> 8) + chr(port & 0xFF) )
  189. if len(s) != 6:
  190. raise ValueError
  191. except:
  192. s = '' # not a valid IP, must be a domain name
  193. return s
  194. class Tracker:
  195. def __init__(self, config, rawserver):
  196. self.config = config
  197. self.response_size = config['response_size']
  198. self.dfile = config['dfile']
  199. self.natcheck = config['nat_check']
  200. favicon = config['favicon']
  201. self.parse_dir_interval = config['parse_dir_interval']
  202. self.favicon = None
  203. if favicon:
  204. try:
  205. h = open(favicon,'r')
  206. self.favicon = h.read()
  207. h.close()
  208. except:
  209. print "**warning** specified favicon file -- %s -- does not exist." % favicon
  210. self.rawserver = rawserver
  211. self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], ...]
  212. self.cached_t = {} # format: infohash: [time, cache]
  213. self.times = {}
  214. self.state = {}
  215. self.seedcount = {}
  216. self.allowed_IPs = None
  217. self.banned_IPs = None
  218. if config['allowed_ips'] or config['banned_ips']:
  219. self.allowed_ip_mtime = 0
  220. self.banned_ip_mtime = 0
  221. self.read_ip_lists()
  222. self.only_local_override_ip = config['only_local_override_ip']
  223. if self.only_local_override_ip == 2:
  224. self.only_local_override_ip = not config['nat_check']
  225. if CHECK_PEER_ID_ENCRYPTED and not CRYPTO_OK:
  226. print ('**warning** crypto library not installed,' +
  227. ' cannot completely verify encrypted peers')
  228. if exists(self.dfile):
  229. try:
  230. h = open(self.dfile, 'rb')
  231. ds = h.read()
  232. h.close()
  233. tempstate = bdecode(ds)
  234. if not tempstate.has_key('peers'):
  235. tempstate = {'peers': tempstate}
  236. statefiletemplate(tempstate)
  237. self.state = tempstate
  238. except:
  239. print '**warning** statefile '+self.dfile+' corrupt; resetting'
  240. self.downloads = self.state.setdefault('peers', {})
  241. self.completed = self.state.setdefault('completed', {})
  242. self.becache = {}
  243. ''' format: infohash: [[l0, s0], [l1, s1], ...]
  244. l0,s0 = compact, not requirecrypto=1
  245. l1,s1 = compact, only supportcrypto=1
  246. l2,s2 = [compact, crypto_flag], all peers
  247. if --compact_reqd 0:
  248. l3,s3 = [ip,port,id]
  249. l4,l4 = [ip,port] nopeerid
  250. '''
  251. if config['compact_reqd']:
  252. x = 3
  253. else:
  254. x = 5
  255. self.cache_default = [({},{}) for i in xrange(x)]
  256. for infohash, ds in self.downloads.items():
  257. self.seedcount[infohash] = 0
  258. for x,y in ds.items():
  259. ip = y['ip']
  260. if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
  261. or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
  262. del ds[x]
  263. continue
  264. if not y['left']:
  265. self.seedcount[infohash] += 1
  266. if y.get('nat',-1):
  267. continue
  268. gip = y.get('given_ip')
  269. if is_valid_ip(gip) and (
  270. not self.only_local_override_ip or local_IPs.includes(ip) ):
  271. ip = gip
  272. self.natcheckOK(infohash,x,ip,y['port'],y)
  273. for x in self.downloads.keys():
  274. self.times[x] = {}
  275. for y in self.downloads[x].keys():
  276. self.times[x][y] = 0
  277. self.trackerid = createPeerID('-T-')
  278. seed(self.trackerid)
  279. self.reannounce_interval = config['reannounce_interval']
  280. self.save_dfile_interval = config['save_dfile_interval']
  281. self.show_names = config['show_names']
  282. rawserver.add_task(self.save_state, self.save_dfile_interval)
  283. self.prevtime = clock()
  284. self.timeout_downloaders_interval = config['timeout_downloaders_interval']
  285. rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
  286. self.logfile = None
  287. self.log = None
  288. if (config['logfile']) and (config['logfile'] != '-'):
  289. try:
  290. self.logfile = config['logfile']
  291. self.log = open(self.logfile,'a')
  292. sys.stdout = self.log
  293. print "# Log Started: ", isotime()
  294. except:
  295. print "**warning** could not redirect stdout to log file: ", sys.exc_info()[0]
  296. if config['hupmonitor']:
  297. def huphandler(signum, frame, self = self):
  298. try:
  299. self.log.close ()
  300. self.log = open(self.logfile,'a')
  301. sys.stdout = self.log
  302. print "# Log reopened: ", isotime()
  303. except:
  304. print "**warning** could not reopen logfile"
  305. signal.signal(signal.SIGHUP, huphandler)
  306. self.allow_get = config['allow_get']
  307. self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid,
  308. config['multitracker_reannounce_interval'],
  309. config['multitracker_maxpeers'], config['http_timeout'],
  310. self.rawserver)
  311. if config['allowed_list']:
  312. if config['allowed_dir']:
  313. print '**warning** allowed_dir and allowed_list options cannot be used together'
  314. print '**warning** disregarding allowed_dir'
  315. config['allowed_dir'] = ''
  316. self.allowed = self.state.setdefault('allowed_list',{})
  317. self.allowed_list_mtime = 0
  318. self.parse_allowed()
  319. self.remove_from_state('allowed','allowed_dir_files')
  320. if config['multitracker_allowed'] == 'autodetect':
  321. config['multitracker_allowed'] = 'none'
  322. config['allowed_controls'] = 0
  323. elif config['allowed_dir']:
  324. self.allowed = self.state.setdefault('allowed',{})
  325. self.allowed_dir_files = self.state.setdefault('allowed_dir_files',{})
  326. self.allowed_dir_blocked = {}
  327. self.parse_allowed()
  328. self.remove_from_state('allowed_list')
  329. else:
  330. self.allowed = None
  331. self.remove_from_state('allowed','allowed_dir_files', 'allowed_list')
  332. if config['multitracker_allowed'] == 'autodetect':
  333. config['multitracker_allowed'] = 'none'
  334. config['allowed_controls'] = 0
  335. self.uq_broken = unquote('+') != ' '
  336. self.keep_dead = config['keep_dead']
  337. self.Filter = Filter(rawserver.add_task)
  338. aggregator = config['aggregator']
  339. if aggregator == '0':
  340. self.is_aggregator = False
  341. self.aggregator_key = None
  342. else:
  343. self.is_aggregator = True
  344. if aggregator == '1':
  345. self.aggregator_key = None
  346. else:
  347. self.aggregator_key = aggregator
  348. self.natcheck = False
  349. send = config['aggregate_forward']
  350. if not send:
  351. self.aggregate_forward = None
  352. else:
  353. try:
  354. self.aggregate_forward, self.aggregate_password = send.split(',')
  355. except:
  356. self.aggregate_forward = send
  357. self.aggregate_password = None
  358. self.dedicated_seed_id = config['dedicated_seed_id']
  359. self.is_seeded = {}
  360. self.cachetime = 0
  361. self.cachetimeupdate()
  362. def cachetimeupdate(self):
  363. self.cachetime += 1 # raw clock, but more efficient for cache
  364. self.rawserver.add_task(self.cachetimeupdate,1)
  365. def aggregate_senddata(self, query):
  366. url = self.aggregate_forward+'?'+query
  367. if self.aggregate_password is not None:
  368. url += '&password='+self.aggregate_password
  369. rq = Thread(target = self._aggregate_senddata, args = [url])
  370. rq.setDaemon(False)
  371. rq.start()
  372. def _aggregate_senddata(self, url): # just send, don't attempt to error check,
  373. try: # discard any returned data
  374. h = urlopen(url)
  375. h.read()
  376. h.close()
  377. except:
  378. return
  379. def get_infopage(self):
  380. try:
  381. if not self.config['show_infopage']:
  382. return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
  383. red = self.config['infopage_redirect']
  384. if red:
  385. return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
  386. '<A HREF="'+red+'">Click Here</A>')
  387. s = StringIO()
  388. s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
  389. '<html><head><title>BitTorrent download info</title>\n')
  390. if self.favicon is not None:
  391. s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
  392. s.write('</head>\n<body>\n' \
  393. '<h3>BitTorrent download info</h3>\n'\
  394. '<ul>\n'
  395. '<li><strong>tracker version:</strong> %s</li>\n' \
  396. '<li><strong>server time:</strong> %s</li>\n' \
  397. '</ul>\n' % (version, isotime()))
  398. if self.config['allowed_dir']:
  399. if self.show_names:
  400. names = [ (self.allowed[hash]['name'],hash)
  401. for hash in self.allowed.keys() ]
  402. else:
  403. names = [ (None,hash)
  404. for hash in self.allowed.keys() ]
  405. else:
  406. names = [ (None,hash) for hash in self.downloads.keys() ]
  407. if not names:
  408. s.write('<p>not tracking any files yet...</p>\n')
  409. else:
  410. names.sort()
  411. tn = 0
  412. tc = 0
  413. td = 0
  414. tt = 0 # Total transferred
  415. ts = 0 # Total size
  416. nf = 0 # Number of files displayed
  417. if self.config['allowed_dir'] and self.show_names:
  418. s.write('<table summary="files" border="1">\n' \
  419. '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
  420. else:
  421. s.write('<table summary="files">\n' \
  422. '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
  423. for name,hash in names:
  424. l = self.downloads[hash]
  425. n = self.completed.get(hash, 0)
  426. tn = tn + n
  427. c = self.seedcount[hash]
  428. tc = tc + c
  429. d = len(l) - c
  430. td = td + d
  431. if self.config['allowed_dir'] and self.show_names:
  432. if self.allowed.has_key(hash):
  433. nf = nf + 1
  434. sz = self.allowed[hash]['length'] # size
  435. ts = ts + sz
  436. szt = sz * n # Transferred for this torrent
  437. tt = tt + szt
  438. if self.allow_get == 1:
  439. linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
  440. else:
  441. linkname = name
  442. s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
  443. % (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
  444. else:
  445. s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
  446. % (b2a_hex(hash), c, d, n))
  447. if self.config['allowed_dir'] and self.show_names:
  448. s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n'
  449. % (nf, size_format(ts), tc, td, tn, size_format(tt)))
  450. else:
  451. s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td></tr>\n'
  452. % (nf, tc, td, tn))
  453. s.write('</table>\n' \
  454. '<ul>\n' \
  455. '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
  456. '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
  457. '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
  458. '<li><em>downloaded:</em> reported complete downloads</li>\n' \
  459. '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
  460. '</ul>\n')
  461. s.write('</body>\n' \
  462. '</html>\n')
  463. return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
  464. except:
  465. print_exc()
  466. return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')
  467. def scrapedata(self, hash, return_name = True):
  468. l = self.downloads[hash]
  469. n = self.completed.get(hash, 0)
  470. c = self.seedcount[hash]
  471. d = len(l) - c
  472. f = {'complete': c, 'incomplete': d, 'downloaded': n}
  473. if return_name and self.show_names and self.config['allowed_dir']:
  474. f['name'] = self.allowed[hash]['name']
  475. return (f)
  476. def get_scrape(self, paramslist):
  477. fs = {}
  478. if paramslist.has_key('info_hash'):
  479. if self.config['scrape_allowed'] not in ['specific', 'full']:
  480. return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  481. bencode({'failure reason':
  482. 'specific scrape function is not available with this tracker.'}))
  483. for hash in paramslist['info_hash']:
  484. if self.allowed is not None:
  485. if self.allowed.has_key(hash):
  486. fs[hash] = self.scrapedata(hash)
  487. else:
  488. if self.downloads.has_key(hash):
  489. fs[hash] = self.scrapedata(hash)
  490. else:
  491. if self.config['scrape_allowed'] != 'full':
  492. return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  493. bencode({'failure reason':
  494. 'full scrape function is not available with this tracker.'}))
  495. if self.allowed is not None:
  496. keys = self.allowed.keys()
  497. else:
  498. keys = self.downloads.keys()
  499. for hash in keys:
  500. fs[hash] = self.scrapedata(hash)
  501. return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
  502. def get_file(self, hash):
  503. if not self.allow_get:
  504. return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  505. 'get function is not available with this tracker.')
  506. if not self.allowed.has_key(hash):
  507. return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
  508. fname = self.allowed[hash]['file']
  509. fpath = self.allowed[hash]['path']
  510. return (200, 'OK', {'Content-Type': 'application/x-bittorrent',
  511. 'Content-Disposition': 'attachment; filename=' + fname},
  512. open(fpath, 'rb').read())
  513. def check_allowed(self, infohash, paramslist):
  514. if ( self.aggregator_key is not None
  515. and not ( paramslist.has_key('password')
  516. and paramslist['password'][0] == self.aggregator_key ) ):
  517. return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  518. bencode({'failure reason':
  519. 'Requested download is not authorized for use with this tracker.'}))
  520. if self.allowed is not None:
  521. if not self.allowed.has_key(infohash):
  522. return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  523. bencode({'failure reason':
  524. 'Requested download is not authorized for use with this tracker.'}))
  525. if self.config['allowed_controls']:
  526. if self.allowed[infohash].has_key('failure reason'):
  527. return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  528. bencode({'failure reason': self.allowed[infohash]['failure reason']}))
  529. if paramslist.has_key('tracker'):
  530. if ( self.config['multitracker_allowed'] == 'none' or # turned off
  531. paramslist['peer_id'][0] == self.trackerid ): # oops! contacted myself
  532. return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  533. bencode({'failure reason': 'disallowed'}))
  534. if ( self.config['multitracker_allowed'] == 'autodetect'
  535. and not self.allowed[infohash].has_key('announce-list') ):
  536. return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  537. bencode({'failure reason':
  538. 'Requested download is not authorized for multitracker use.'}))
  539. return None
  540. def add_data(self, infohash, event, ip, paramslist):
  541. peers = self.downloads.setdefault(infohash, {})
  542. ts = self.times.setdefault(infohash, {})
  543. self.completed.setdefault(infohash, 0)
  544. self.seedcount.setdefault(infohash, 0)
  545. def params(key, default = None, l = paramslist):
  546. if l.has_key(key):
  547. return l[key][0]
  548. return default
  549. myid = params('peer_id','')
  550. if len(myid) != 20:
  551. raise ValueError, 'id not of length 20'
  552. if event not in ['started', 'completed', 'stopped', 'snooped', None]:
  553. raise ValueError, 'invalid event'
  554. port = params('cryptoport')
  555. if port is None:
  556. port = params('port','')
  557. port = long(port)
  558. if port < 0 or port > 65535:
  559. raise ValueError, 'invalid port'
  560. left = long(params('left',''))
  561. if left < 0:
  562. raise ValueError, 'invalid amount left'
  563. uploaded = long(params('uploaded',''))
  564. downloaded = long(params('downloaded',''))
  565. if params('supportcrypto'):
  566. supportcrypto = 1
  567. try:
  568. s = int(params['requirecrypto'])
  569. chr(s)
  570. except:
  571. s = 0
  572. requirecrypto = s
  573. else:
  574. supportcrypto = 0
  575. requirecrypto = 0
  576. peer = peers.get(myid)
  577. islocal = local_IPs.includes(ip)
  578. mykey = params('key')
  579. if peer:
  580. auth = peer.get('key',-1) == mykey or peer.get('ip') == ip
  581. gip = params('ip')
  582. if is_valid_ip(gip) and (islocal or not self.only_local_override_ip):
  583. ip1 = gip
  584. else:
  585. ip1 = ip
  586. if params('numwant') is not None:
  587. rsize = min(int(params('numwant')),self.response_size)
  588. else:
  589. rsize = self.response_size
  590. if event == 'stopped':
  591. if peer:
  592. if auth:
  593. self.delete_peer(infohash,myid)
  594. elif not peer:
  595. ts[myid] = clock()
  596. peer = { 'ip': ip, 'port': port, 'left': left,
  597. 'supportcrypto': supportcrypto,
  598. 'requirecrypto': requirecrypto }
  599. if mykey:
  600. peer['key'] = mykey
  601. if gip:
  602. peer['given ip'] = gip
  603. if port:
  604. if not self.natcheck or islocal:
  605. peer['nat'] = 0
  606. self.natcheckOK(infohash,myid,ip1,port,peer)
  607. else:
  608. NatCheck(self.connectback_result,infohash,myid,ip1,port,
  609. self.rawserver,encrypted=requirecrypto)
  610. else:
  611. peer['nat'] = 2**30
  612. if event == 'completed':
  613. self.completed[infohash] += 1
  614. if not left:
  615. self.seedcount[infohash] += 1
  616. peers[myid] = peer
  617. else:
  618. if not auth:
  619. return rsize # return w/o changing stats
  620. ts[myid] = clock()
  621. if not left and peer['left']:
  622. self.completed[infohash] += 1
  623. self.seedcount[infohash] += 1
  624. if not peer.get('nat', -1):
  625. for bc in self.becache[infohash]:
  626. bc[1][myid] = bc[0][myid]
  627. del bc[0][myid]
  628. elif left and not peer['left']:
  629. self.completed[infohash] -= 1
  630. self.seedcount[infohash] -= 1
  631. if not peer.get('nat', -1):
  632. for bc in self.becache[infohash]:
  633. bc[0][myid] = bc[1][myid]
  634. del bc[1][myid]
  635. peer['left'] = left
  636. if port:
  637. recheck = False
  638. if ip != peer['ip']:
  639. peer['ip'] = ip
  640. recheck = True
  641. if gip != peer.get('given ip'):
  642. if gip:
  643. peer['given ip'] = gip
  644. elif peer.has_key('given ip'):
  645. del peer['given ip']
  646. recheck = True
  647. natted = peer.get('nat', -1)
  648. if recheck:
  649. if natted == 0:
  650. l = self.becache[infohash]
  651. y = not peer['left']
  652. for x in l:
  653. del x[y][myid]
  654. if natted >= 0:
  655. del peer['nat'] # restart NAT testing
  656. if natted and natted < self.natcheck:
  657. recheck = True
  658. if recheck:
  659. if not self.natcheck or islocal:
  660. peer['nat'] = 0
  661. self.natcheckOK(infohash,myid,ip1,port,peer)
  662. else:
  663. NatCheck(self.connectback_result,infohash,myid,ip1,port,
  664. self.rawserver,encrypted=requirecrypto)
  665. return rsize
  666. def peerlist(self, infohash, stopped, tracker, is_seed,
  667. return_type, rsize, supportcrypto):
  668. data = {} # return data
  669. seeds = self.seedcount[infohash]
  670. data['complete'] = seeds
  671. data['incomplete'] = len(self.downloads[infohash]) - seeds
  672. if ( self.config['allowed_controls']
  673. and self.allowed[infohash].has_key('warning message') ):
  674. data['warning message'] = self.allowed[infohash]['warning message']
  675. if tracker:
  676. data['interval'] = self.config['multitracker_reannounce_interval']
  677. if not rsize:
  678. return data
  679. cache = self.cached_t.setdefault(infohash, None)
  680. if ( not cache or len(cache[1]) < rsize
  681. or cache[0] + self.config['min_time_between_cache_refreshes'] < clock() ):
  682. bc = self.becache.setdefault(infohash,self.cache_default)
  683. cache = [ clock(), bc[0][0].values() + bc[0][1].values() ]
  684. self.cached_t[infohash] = cache
  685. shuffle(cache[1])
  686. cache = cache[1]
  687. data['peers'] = cache[-rsize:]
  688. del cache[-rsize:]
  689. return data
  690. data['interval'] = self.reannounce_interval
  691. if stopped or not rsize: # save some bandwidth
  692. data['peers'] = []
  693. return data
  694. bc = self.becache.setdefault(infohash,self.cache_default)
  695. len_l = len(bc[2][0])
  696. len_s = len(bc[2][1])
  697. if not (len_l+len_s): # caches are empty!
  698. data['peers'] = []
  699. return data
  700. l_get_size = int(float(rsize)*(len_l)/(len_l+len_s))
  701. cache = self.cached.setdefault(infohash,[None,None,None])[return_type]
  702. if cache and ( not cache[1]
  703. or (is_seed and len(cache[1]) < rsize)
  704. or len(cache[1]) < l_get_size
  705. or cache[0]+self.config['min_time_between_cache_refreshes'] < self.cachetime ):
  706. cache = None
  707. if not cache:
  708. peers = self.downloads[infohash]
  709. if self.config['compact_reqd']:
  710. vv = ([],[],[])
  711. else:
  712. vv = ([],[],[],[],[])
  713. for key, ip, port in self.t2tlist.harvest(infohash): # empty if disabled
  714. if not peers.has_key(key):
  715. cp = compact_peer_info(ip, port)
  716. vv[0].append(cp)
  717. vv[2].append((cp,'\x00'))
  718. if not self.config['compact_reqd']:
  719. vv[3].append({'ip': ip, 'port': port, 'peer id': key})
  720. vv[4].append({'ip': ip, 'port': port})
  721. cache = [ self.cachetime,
  722. bc[return_type][0].values()+vv[return_type],
  723. bc[return_type][1].values() ]
  724. shuffle(cache[1])
  725. shuffle(cache[2])
  726. self.cached[infohash][return_type] = cache
  727. for rr in xrange(len(self.cached[infohash])):
  728. if rr != return_type:
  729. try:
  730. self.cached[infohash][rr][1].extend(vv[rr])
  731. except:
  732. pass
  733. if len(cache[1]) < l_get_size:
  734. peerdata = cache[1]
  735. if not is_seed:
  736. peerdata.extend(cache[2])
  737. cache[1] = []
  738. cache[2] = []
  739. else:
  740. if not is_seed:
  741. peerdata = cache[2][l_get_size-rsize:]
  742. del cache[2][l_get_size-rsize:]
  743. rsize -= len(peerdata)
  744. else:
  745. peerdata = []
  746. if rsize:
  747. peerdata.extend(cache[1][-rsize:])
  748. del cache[1][-rsize:]
  749. if return_type == 0:
  750. data['peers'] = ''.join(peerdata)
  751. elif return_type == 1:
  752. data['crypto_flags'] = "0x01"*len(peerdata)
  753. data['peers'] = ''.join(peerdata)
  754. elif return_type == 2:
  755. data['crypto_flags'] = ''.join([p[1] for p in peerdata])
  756. data['peers'] = ''.join([p[0] for p in peerdata])
  757. else:
  758. data['peers'] = peerdata
  759. return data
  760. def get(self, connection, path, headers):
  761. real_ip = connection.get_ip()
  762. ip = real_ip
  763. if is_ipv4(ip):
  764. ipv4 = True
  765. else:
  766. try:
  767. ip = ipv6_to_ipv4(ip)
  768. ipv4 = True
  769. except ValueError:
  770. ipv4 = False
  771. if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
  772. or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
  773. return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  774. bencode({'failure reason':
  775. 'your IP is not allowed on this tracker'}))
  776. nip = get_forwarded_ip(headers)
  777. if nip and not self.only_local_override_ip:
  778. ip = nip
  779. try:
  780. ip = to_ipv4(ip)
  781. ipv4 = True
  782. except ValueError:
  783. ipv4 = False
  784. paramslist = {}
  785. def params(key, default = None, l = paramslist):
  786. if l.has_key(key):
  787. return l[key][0]
  788. return default
  789. try:
  790. (scheme, netloc, path, pars, query, fragment) = urlparse(path)
  791. if self.uq_broken == 1:
  792. path = path.replace('+',' ')
  793. query = query.replace('+',' ')
  794. path = unquote(path)[1:]
  795. for s in query.split('&'):
  796. if s:
  797. i = s.index('=')
  798. kw = unquote(s[:i])
  799. paramslist.setdefault(kw, [])
  800. paramslist[kw] += [unquote(s[i+1:])]
  801. if path == '' or path == 'index.html':
  802. return self.get_infopage()
  803. if (path == 'file'):
  804. return self.get_file(params('info_hash'))
  805. if path == 'favicon.ico' and self.favicon is not None:
  806. return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
  807. # automated access from here on
  808. if path in ('scrape', 'scrape.php', 'tracker.php/scrape'):
  809. return self.get_scrape(paramslist)
  810. if not path in ('announce', 'announce.php', 'tracker.php/announce'):
  811. return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
  812. # main tracker function
  813. filtered = self.Filter.check(real_ip, paramslist, headers)
  814. if filtered:
  815. return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  816. bencode({'failure reason': filtered}))
  817. infohash = params('info_hash')
  818. if not infohash:
  819. raise ValueError, 'no info hash'
  820. notallowed = self.check_allowed(infohash, paramslist)
  821. if notallowed:
  822. return notallowed
  823. event = params('event')
  824. rsize = self.add_data(infohash, event, ip, paramslist)
  825. except ValueError, e:
  826. return (400, 'Bad Request', {'Content-Type': 'text/plain'},
  827. 'you sent me garbage - ' + str(e))
  828. if self.aggregate_forward and not paramslist.has_key('tracker'):
  829. self.aggregate_senddata(query)
  830. if self.is_aggregator: # don't return peer data here
  831. return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
  832. bencode({'response': 'OK'}))
  833. if params('compact') and ipv4:
  834. if params('requirecrypto'):
  835. return_type = 1
  836. elif params('supportcrypto'):
  837. return_type = 2
  838. else:
  839. return_type = 0
  840. elif self.config['compact_reqd'] and ipv4:
  841. return (400, 'Bad Request', {'Content-Type': 'text/plain'},
  842. 'your client is outdated, please upgrade')
  843. elif params('no_peer_id'):
  844. return_type = 4
  845. else:
  846. return_type = 3
  847. data = self.peerlist(infohash, event=='stopped',
  848. params('tracker'), not params('left'),
  849. return_type, rsize, params('supportcrypto'))
  850. if paramslist.has_key('scrape'): # deprecated
  851. data['scrape'] = self.scrapedata(infohash, False)
  852. if self.dedicated_seed_id:
  853. if params('seed_id') == self.dedicated_seed_id and params('left') == 0:
  854. self.is_seeded[infohash] = True
  855. if params('check_seeded') and self.is_seeded.get(infohash):
  856. data['seeded'] = 1
  857. return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data))
  858. def natcheckOK(self, infohash, peerid, ip, port, peer):
  859. seed = not peer['left']
  860. bc = self.becache.setdefault(infohash,self.cache_default)
  861. cp = compact_peer_info(ip, port)
  862. reqc = peer['requirecrypto']
  863. bc[2][seed][peerid] = (cp,chr(reqc))
  864. if peer['supportcrypto']:
  865. bc[1][seed][peerid] = cp
  866. if not reqc:
  867. bc[0][seed][peerid] = cp
  868. if not self.config['compact_reqd']:
  869. bc[3][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port,
  870. 'peer id': peerid}))
  871. bc[4][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port}))
  872. def natchecklog(self, peerid, ip, port, result):
  873. year, month, day, hour, minute, second, a, b, c = localtime(time())
  874. print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % (
  875. ip, quote(peerid), day, months[month], year, hour, minute, second,
  876. ip, port, result)
  877. def connectback_result(self, result, downloadid, peerid, ip, port):
  878. record = self.downloads.get(downloadid,{}).get(peerid)
  879. if ( record is None
  880. or (record['ip'] != ip and record.get('given ip') != ip)
  881. or record['port'] != port ):
  882. if self.config['log_nat_checks']:
  883. self.natchecklog(peerid, ip, port, 404)
  884. return
  885. if self.config['log_nat_checks']:
  886. if result:
  887. x = 200
  888. else:
  889. x = 503
  890. self.natchecklog(peerid, ip, port, x)
  891. if not record.has_key('nat'):
  892. record['nat'] = int(not result)
  893. if result:
  894. self.natcheckOK(downloadid,peerid,ip,port,record)
  895. elif result and record['nat']:
  896. record['nat'] = 0
  897. self.natcheckOK(downloadid,peerid,ip,port,record)
  898. elif not result:
  899. record['nat'] += 1
  900. def remove_from_state(self, *l):
  901. for s in l:
  902. try:
  903. del self.state[s]
  904. except:
  905. pass
  906. def save_state(self):
  907. self.rawserver.add_task(self.save_state, self.save_dfile_interval)
  908. h = open(self.dfile, 'wb')
  909. h.write(bencode(self.state))
  910. h.close()
  911. def parse_allowed(self):
  912. self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval)
  913. if self.config['allowed_dir']:
  914. r = parsedir( self.config['allowed_dir'], self.allowed,
  915. self.allowed_dir_files, self.allowed_dir_blocked,
  916. [".torrent"] )
  917. ( self.allowed, self.allowed_dir_files, self.allowed_dir_blocked,
  918. added, garbage2 ) = r
  919. self.state['allowed'] = self.allowed
  920. self.state['allowed_dir_files'] = self.allowed_dir_files
  921. self.t2tlist.parse(self.allowed)
  922. else:
  923. f = self.config['allowed_list']
  924. if self.allowed_list_mtime == os.path.getmtime(f):
  925. return
  926. try:
  927. r = parsetorrentlist(f, self.allowed)
  928. (self.allowed, added, garbage2) = r
  929. self.state['allowed_list'] = self.allowed
  930. except (IOError, OSError):
  931. print '**warning** unable to read allowed torrent list'
  932. return
  933. self.allowed_list_mtime = os.path.getmtime(f)
  934. for infohash in added.keys():
  935. self.downloads.setdefault(infohash, {})
  936. self.completed.setdefault(infohash, 0)
  937. self.seedcount.setdefault(infohash, 0)
  938. def read_ip_lists(self):
  939. self.rawserver.add_task(self.read_ip_lists,self.parse_dir_interval)
  940. f = self.config['allowed_ips']
  941. if f and self.allowed_ip_mtime != os.path.getmtime(f):
  942. self.allowed_IPs = IP_List()
  943. try:
  944. self.allowed_IPs.read_fieldlist(f)
  945. self.allowed_ip_mtime = os.path.getmtime(f)
  946. except (IOError, OSError):
  947. print '**warning** unable to read allowed_IP list'
  948. f = self.config['banned_ips']
  949. if f and self.banned_ip_mtime != os.path.getmtime(f):
  950. self.banned_IPs = IP_Range_List()
  951. try:
  952. self.banned_IPs.read_rangelist(f)
  953. self.banned_ip_mtime = os.path.getmtime(f)
  954. except (IOError, OSError):
  955. print '**warning** unable to read banned_IP list'
  956. def delete_peer(self, infohash, peerid):
  957. dls = self.downloads[infohash]
  958. peer = dls[peerid]
  959. if not peer['left']:
  960. self.seedcount[infohash] -= 1
  961. if not peer.get('nat',-1):
  962. l = self.becache[infohash]
  963. y = not peer['left']
  964. for x in l:
  965. if x[y].has_key(peerid):
  966. del x[y][peerid]
  967. del self.times[infohash][peerid]
  968. del dls[peerid]
  969. def expire_downloaders(self):
  970. for x in self.times.keys():
  971. for myid, t in self.times[x].items():
  972. if t < self.prevtime:
  973. self.delete_peer(x,myid)
  974. self.prevtime = clock()
  975. if (self.keep_dead != 1):
  976. for key, value in self.downloads.items():
  977. if len(value) == 0 and (
  978. self.allowed is None or not self.allowed.has_key(key) ):
  979. del self.times[key]
  980. del self.downloads[key]
  981. del self.seedcount[key]
  982. self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
  983. def track(args):
  984. if len(args) == 0:
  985. print formatDefinitions(defaults, 80)
  986. return
  987. try:
  988. config, files = parseargs(args, defaults, 0, 0)
  989. except ValueError, e:
  990. print 'error: ' + str(e)
  991. print 'run with no arguments for parameter explanations'
  992. return
  993. r = RawServer(Event(), config['timeout_check_interval'],
  994. config['socket_timeout'], ipv6_enable = config['ipv6_enabled'])
  995. t = Tracker(config, r)
  996. r.bind(config['port'], config['bind'],
  997. reuse = True, ipv6_socket_style = config['ipv6_binds_v4'])
  998. r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes']))
  999. t.save_state()
  1000. print '# Shutting down: ' + isotime()
  1001. def size_format(s):
  1002. if (s < 1024):
  1003. r = str(s) + 'B'
  1004. elif (s < 1048576):
  1005. r = str(int(s/1024)) + 'KiB'
  1006. elif (s < 1073741824L):
  1007. r = str(int(s/1048576)) + 'MiB'
  1008. elif (s < 1099511627776L):
  1009. r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB'
  1010. else:
  1011. r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB'
  1012. return(r)