1
0

makemetafile.py 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263
  1. # Written by Bram Cohen
  2. # multitracker extensions by John Hoffman
  3. # see LICENSE.txt for license information
  4. from os.path import getsize, split, join, abspath, isdir
  5. from os import listdir
  6. from sha import sha
  7. from copy import copy
  8. from string import strip
  9. from BitTornado.bencode import bencode
  10. from btformats import check_info
  11. from threading import Event
  12. from time import time
  13. from traceback import print_exc
  14. try:
  15. from sys import getfilesystemencoding
  16. ENCODING = getfilesystemencoding()
  17. except:
  18. from sys import getdefaultencoding
  19. ENCODING = getdefaultencoding()
  20. defaults = [
  21. ('announce_list', '',
  22. 'a list of announce URLs - explained below'),
  23. ('httpseeds', '',
  24. 'a list of http seed URLs - explained below'),
  25. ('piece_size_pow2', 0,
  26. "which power of 2 to set the piece size to (0 = automatic)"),
  27. ('comment', '',
  28. "optional human-readable comment to put in .torrent"),
  29. ('filesystem_encoding', '',
  30. "optional specification for filesystem encoding " +
  31. "(set automatically in recent Python versions)"),
  32. ('target', '',
  33. "optional target file for the torrent")
  34. ]
  35. default_piece_len_exp = 18
  36. ignore = ['core', 'CVS']
  37. def print_announcelist_details():
  38. print (' announce_list = optional list of redundant/backup tracker URLs, in the format:')
  39. print (' url[,url...][|url[,url...]...]')
  40. print (' where URLs separated by commas are all tried first')
  41. print (' before the next group of URLs separated by the pipe is checked.')
  42. print (" If none is given, it is assumed you don't want one in the metafile.")
  43. print (' If announce_list is given, clients which support it')
  44. print (' will ignore the <announce> value.')
  45. print (' Examples:')
  46. print (' http://tracker1.com|http://tracker2.com|http://tracker3.com')
  47. print (' (tries trackers 1-3 in order)')
  48. print (' http://tracker1.com,http://tracker2.com,http://tracker3.com')
  49. print (' (tries trackers 1-3 in a randomly selected order)')
  50. print (' http://tracker1.com|http://backup1.com,http://backup2.com')
  51. print (' (tries tracker 1 first, then tries between the 2 backups randomly)')
  52. print ('')
  53. print (' httpseeds = optional list of http-seed URLs, in the format:')
  54. print (' url[|url...]')
  55. def make_meta_file(file, url, params = {}, flag = Event(),
  56. progress = lambda x: None, progress_percent = 1):
  57. if params.has_key('piece_size_pow2'):
  58. piece_len_exp = params['piece_size_pow2']
  59. else:
  60. piece_len_exp = default_piece_len_exp
  61. if params.has_key('target') and params['target'] != '':
  62. f = params['target']
  63. else:
  64. a, b = split(file)
  65. if b == '':
  66. f = a + '.torrent'
  67. else:
  68. f = join(a, b + '.torrent')
  69. if piece_len_exp == 0: # automatic
  70. size = calcsize(file)
  71. if size > 8L*1024*1024*1024: # > 8 gig =
  72. piece_len_exp = 21 # 2 meg pieces
  73. elif size > 2*1024*1024*1024: # > 2 gig =
  74. piece_len_exp = 20 # 1 meg pieces
  75. elif size > 512*1024*1024: # > 512M =
  76. piece_len_exp = 19 # 512K pieces
  77. elif size > 64*1024*1024: # > 64M =
  78. piece_len_exp = 18 # 256K pieces
  79. elif size > 16*1024*1024: # > 16M =
  80. piece_len_exp = 17 # 128K pieces
  81. elif size > 4*1024*1024: # > 4M =
  82. piece_len_exp = 16 # 64K pieces
  83. else: # < 4M =
  84. piece_len_exp = 15 # 32K pieces
  85. piece_length = 2 ** piece_len_exp
  86. encoding = None
  87. if params.has_key('filesystem_encoding'):
  88. encoding = params['filesystem_encoding']
  89. if not encoding:
  90. encoding = ENCODING
  91. if not encoding:
  92. encoding = 'ascii'
  93. info = makeinfo(file, piece_length, encoding, flag, progress, progress_percent)
  94. if flag.isSet():
  95. return
  96. check_info(info)
  97. h = open(f, 'wb')
  98. data = {'info': info, 'announce': strip(url), 'creation date': long(time())}
  99. if params.has_key('comment') and params['comment']:
  100. data['comment'] = params['comment']
  101. if params.has_key('real_announce_list'): # shortcut for progs calling in from outside
  102. data['announce-list'] = params['real_announce_list']
  103. elif params.has_key('announce_list') and params['announce_list']:
  104. l = []
  105. for tier in params['announce_list'].split('|'):
  106. l.append(tier.split(','))
  107. data['announce-list'] = l
  108. if params.has_key('real_httpseeds'): # shortcut for progs calling in from outside
  109. data['httpseeds'] = params['real_httpseeds']
  110. elif params.has_key('httpseeds') and params['httpseeds']:
  111. data['httpseeds'] = params['httpseeds'].split('|')
  112. h.write(bencode(data))
  113. h.close()
  114. def calcsize(file):
  115. if not isdir(file):
  116. return getsize(file)
  117. total = 0L
  118. for s in subfiles(abspath(file)):
  119. total += getsize(s[1])
  120. return total
  121. def uniconvertl(l, e):
  122. r = []
  123. try:
  124. for s in l:
  125. r.append(uniconvert(s, e))
  126. except UnicodeError:
  127. raise UnicodeError('bad filename: '+join(l))
  128. return r
  129. def uniconvert(s, e):
  130. try:
  131. s = unicode(s,e)
  132. except UnicodeError:
  133. raise UnicodeError('bad filename: '+s)
  134. return s.encode('utf-8')
  135. def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):
  136. file = abspath(file)
  137. if isdir(file):
  138. subs = subfiles(file)
  139. subs.sort()
  140. pieces = []
  141. sh = sha()
  142. done = 0L
  143. fs = []
  144. totalsize = 0.0
  145. totalhashed = 0L
  146. for p, f in subs:
  147. totalsize += getsize(f)
  148. for p, f in subs:
  149. pos = 0L
  150. size = getsize(f)
  151. fs.append({'length': size, 'path': uniconvertl(p, encoding)})
  152. h = open(f, 'rb')
  153. while pos < size:
  154. a = min(size - pos, piece_length - done)
  155. sh.update(h.read(a))
  156. if flag.isSet():
  157. return
  158. done += a
  159. pos += a
  160. totalhashed += a
  161. if done == piece_length:
  162. pieces.append(sh.digest())
  163. done = 0
  164. sh = sha()
  165. if progress_percent:
  166. progress(totalhashed / totalsize)
  167. else:
  168. progress(a)
  169. h.close()
  170. if done > 0:
  171. pieces.append(sh.digest())
  172. return {'pieces': ''.join(pieces),
  173. 'piece length': piece_length, 'files': fs,
  174. 'name': uniconvert(split(file)[1], encoding) }
  175. else:
  176. size = getsize(file)
  177. pieces = []
  178. p = 0L
  179. h = open(file, 'rb')
  180. while p < size:
  181. x = h.read(min(piece_length, size - p))
  182. if flag.isSet():
  183. return
  184. pieces.append(sha(x).digest())
  185. p += piece_length
  186. if p > size:
  187. p = size
  188. if progress_percent:
  189. progress(float(p) / size)
  190. else:
  191. progress(min(piece_length, size - p))
  192. h.close()
  193. return {'pieces': ''.join(pieces),
  194. 'piece length': piece_length, 'length': size,
  195. 'name': uniconvert(split(file)[1], encoding) }
  196. def subfiles(d):
  197. r = []
  198. stack = [([], d)]
  199. while len(stack) > 0:
  200. p, n = stack.pop()
  201. if isdir(n):
  202. for s in listdir(n):
  203. if s not in ignore and s[:1] != '.':
  204. stack.append((copy(p) + [s], join(n, s)))
  205. else:
  206. r.append((p, n))
  207. return r
  208. def completedir(dir, url, params = {}, flag = Event(),
  209. vc = lambda x: None, fc = lambda x: None):
  210. files = listdir(dir)
  211. files.sort()
  212. ext = '.torrent'
  213. if params.has_key('target'):
  214. target = params['target']
  215. else:
  216. target = ''
  217. togen = []
  218. for f in files:
  219. if f[-len(ext):] != ext and (f + ext) not in files:
  220. togen.append(join(dir, f))
  221. total = 0
  222. for i in togen:
  223. total += calcsize(i)
  224. subtotal = [0]
  225. def callback(x, subtotal = subtotal, total = total, vc = vc):
  226. subtotal[0] += x
  227. vc(float(subtotal[0]) / total)
  228. for i in togen:
  229. fc(i)
  230. try:
  231. t = split(i)[-1]
  232. if t not in ignore and t[0] != '.':
  233. if target != '':
  234. params['target'] = join(target,t+ext)
  235. make_meta_file(i, url, params, flag, progress = callback, progress_percent = 0)
  236. except ValueError:
  237. print_exc()