makemetafile.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310
  1. #!/usr/bin/env python
  2. # The contents of this file are subject to the BitTorrent Open Source License
  3. # Version 1.1 (the License). You may not copy or use this file, in either
  4. # source code or executable form, except in compliance with the License. You
  5. # may obtain a copy of the License at http://www.bittorrent.com/license/.
  6. #
  7. # Software distributed under the License is distributed on an AS IS basis,
  8. # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
  9. # for the specific language governing rights and limitations under the
  10. # License.
  11. # Written by Bram Cohen
  12. from __future__ import division
  13. import os
  14. import sys
  15. import math
  16. from BTL.hash import sha
  17. import time
  18. from threading import Event
  19. from BitTorrent.translation import _
  20. from BTL.bencode import bencode, bdecode
  21. from BTL.btformats import check_info
  22. from BTL.exceptions import str_exc
  23. from BitTorrent.parseargs import parseargs, printHelp
  24. from BTL.obsoletepythonsupport import *
  25. from BTL.exceptions import str_exc
  26. from BitTorrent import BTFailure
  27. from BTL.platform import decode_from_filesystem, get_filesystem_encoding
  28. from BitTorrent.platform import read_language_file
  29. from khashmir.node import Node
  30. from khashmir.ktable import KTable
  31. from khashmir.util import packPeers, compact_peer_info
  32. ignore = ['core', 'CVS', 'Thumbs.db', 'desktop.ini']
  33. from BTL.ConvertedMetainfo import noncharacter_translate
  34. def gmtime():
  35. return time.mktime(time.gmtime())
  36. def dummy(v):
  37. pass
  38. minimum_piece_len_pow2 = 15 # minimum piece size is 2**15 = 32KB
  39. def size_to_piece_len_pow2(size):
  40. num_pieces_pow2 = 11 # 2**num_pieces_pow2 = desired number of pieces
  41. return max(minimum_piece_len_pow2, int(round(math.log(size, 2)) - num_pieces_pow2))
  42. def make_meta_files(url,
  43. files,
  44. flag=Event(),
  45. progressfunc=dummy,
  46. filefunc=dummy,
  47. piece_len_pow2=0,
  48. target=None,
  49. title=None,
  50. comment=None,
  51. safe=None,
  52. content_type=None, # <---what to do for batch torrents?
  53. use_tracker=True,
  54. data_dir = None,
  55. url_list = None):
  56. if len(files) > 1 and target:
  57. raise BTFailure(_("You can't specify the name of the .torrent file "
  58. "when generating multiple torrents at once"))
  59. files.sort()
  60. ext = '.torrent'
  61. togen = []
  62. for f in files:
  63. if not f.endswith(ext):
  64. togen.append(f)
  65. sizes = []
  66. for f in togen:
  67. sizes.append(calcsize(f))
  68. total = sum(sizes)
  69. subtotal = [0]
  70. def callback(x):
  71. subtotal[0] += x
  72. progressfunc(subtotal[0] / total)
  73. for i, f in enumerate(togen):
  74. if flag.isSet():
  75. break
  76. if sizes[i] < 1:
  77. continue # duh, skip empty files/directories
  78. t = os.path.split(f)
  79. if t[1] == '':
  80. f = t[0]
  81. filefunc(f)
  82. if piece_len_pow2 > 0:
  83. my_piece_len_pow2 = piece_len_pow2
  84. else:
  85. my_piece_len_pow2 = size_to_piece_len_pow2(sizes[i])
  86. if use_tracker:
  87. make_meta_file(f, url, flag=flag, progress=callback,
  88. piece_len_exp=my_piece_len_pow2, target=target,
  89. title=title, comment=comment, safe=safe,
  90. content_type=content_type,
  91. url_list=url_list)
  92. else:
  93. make_meta_file_dht(f, url, flag=flag, progress=callback,
  94. piece_len_exp=my_piece_len_pow2, target=target,
  95. title=title, comment=comment, safe=safe,
  96. content_type = content_type,
  97. data_dir=data_dir)
  98. def make_meta_file(path, url, piece_len_exp, flag=Event(), progress=dummy,
  99. title=None, comment=None, safe=None, content_type=None,
  100. target=None, url_list=None, name=None):
  101. data = {'announce': url.strip(), 'creation date': int(gmtime())}
  102. piece_length = 2 ** piece_len_exp
  103. a, b = os.path.split(path)
  104. if not target:
  105. if b == '':
  106. f = a + '.torrent'
  107. else:
  108. f = os.path.join(a, b + '.torrent')
  109. else:
  110. f = target
  111. info = makeinfo(path, piece_length, flag, progress, name, content_type)
  112. if flag.isSet():
  113. return
  114. check_info(info)
  115. h = file(f, 'wb')
  116. data['info'] = info
  117. lang = read_language_file() or 'en'
  118. if lang:
  119. data['locale'] = lang
  120. if title:
  121. data['title'] = title
  122. if comment:
  123. data['comment'] = comment
  124. if safe:
  125. data['safe'] = safe
  126. if url_list:
  127. data['url-list'] = url_list
  128. h.write(bencode(data))
  129. h.close()
  130. def make_meta_file_dht(path, nodes, piece_len_exp, flag=Event(),
  131. progress=dummy, title=None, comment=None, safe=None,
  132. content_type=None, target=None, data_dir=None):
  133. # if nodes is empty, then get them out of the routing table in data_dir
  134. # else, expect nodes to be a string of comma seperated <ip>:<port> pairs
  135. # this has a lot of duplicated code from make_meta_file
  136. piece_length = 2 ** piece_len_exp
  137. a, b = os.path.split(path)
  138. if not target:
  139. if b == '':
  140. f = a + '.torrent'
  141. else:
  142. f = os.path.join(a, b + '.torrent')
  143. else:
  144. f = target
  145. info = makeinfo(path, piece_length, flag, progress, content_type)
  146. if flag.isSet():
  147. return
  148. check_info(info)
  149. info_hash = sha(bencode(info)).digest()
  150. if not nodes:
  151. x = open(os.path.join(data_dir, 'routing_table'), 'rb')
  152. d = bdecode(x.read())
  153. x.close()
  154. t = KTable(Node().initWithDict({'id':d['id'], 'host':'127.0.0.1','port': 0}))
  155. for n in d['rt']:
  156. t.insertNode(Node().initWithDict(n))
  157. nodes = [(node.host, node.port) for node in t.findNodes(info_hash) if node.host != '127.0.0.1']
  158. else:
  159. nodes = [(a[0], int(a[1])) for a in [node.strip().split(":") for node in nodes.split(",")]]
  160. data = {'nodes': nodes, 'creation date': int(gmtime())}
  161. h = file(f, 'wb')
  162. data['info'] = info
  163. if title:
  164. data['title'] = title
  165. if comment:
  166. data['comment'] = comment
  167. if safe:
  168. data['safe'] = safe
  169. h.write(bencode(data))
  170. h.close()
  171. def calcsize(path):
  172. total = 0
  173. for s in subfiles(os.path.abspath(path)):
  174. total += os.path.getsize(s[1])
  175. return total
  176. def makeinfo(path, piece_length, flag, progress, name = None,
  177. content_type = None): # HEREDAVE. If path is directory,
  178. # how do we assign content type?
  179. def to_utf8(name):
  180. if isinstance(name, unicode):
  181. u = name
  182. else:
  183. try:
  184. u = decode_from_filesystem(name)
  185. except Exception, e:
  186. s = str_exc(e)
  187. raise BTFailure(_('Could not convert file/directory name %r to '
  188. 'Unicode (%s). Either the assumed filesystem '
  189. 'encoding "%s" is wrong or the filename contains '
  190. 'illegal bytes.') % (name, s, get_filesystem_encoding()))
  191. if u.translate(noncharacter_translate) != u:
  192. raise BTFailure(_('File/directory name "%s" contains reserved '
  193. 'unicode values that do not correspond to '
  194. 'characters.') % name)
  195. return u.encode('utf-8')
  196. path = os.path.abspath(path)
  197. if os.path.isdir(path):
  198. subs = subfiles(path)
  199. subs.sort()
  200. pieces = []
  201. sh = sha()
  202. done = 0
  203. fs = []
  204. totalsize = 0.0
  205. totalhashed = 0
  206. for p, f in subs:
  207. totalsize += os.path.getsize(f)
  208. for p, f in subs:
  209. pos = 0
  210. size = os.path.getsize(f)
  211. p2 = [to_utf8(n) for n in p]
  212. if content_type:
  213. fs.append({'length': size, 'path': p2,
  214. 'content_type' : content_type}) # HEREDAVE. bad for batch!
  215. else:
  216. fs.append({'length': size, 'path': p2})
  217. h = file(f, 'rb')
  218. while pos < size:
  219. a = min(size - pos, piece_length - done)
  220. sh.update(h.read(a))
  221. if flag.isSet():
  222. return
  223. done += a
  224. pos += a
  225. totalhashed += a
  226. if done == piece_length:
  227. pieces.append(sh.digest())
  228. done = 0
  229. sh = sha()
  230. progress(a)
  231. h.close()
  232. if done > 0:
  233. pieces.append(sh.digest())
  234. if name is not None:
  235. assert isinstance(name, unicode)
  236. name = to_utf8(name)
  237. else:
  238. name = to_utf8(os.path.split(path)[1])
  239. return {'pieces': ''.join(pieces),
  240. 'piece length': piece_length, 'files': fs,
  241. 'name': name}
  242. else:
  243. size = os.path.getsize(path)
  244. pieces = []
  245. p = 0
  246. h = file(path, 'rb')
  247. while p < size:
  248. x = h.read(min(piece_length, size - p))
  249. if flag.isSet():
  250. return
  251. pieces.append(sha(x).digest())
  252. p += piece_length
  253. if p > size:
  254. p = size
  255. progress(min(piece_length, size - p))
  256. h.close()
  257. if content_type is not None:
  258. return {'pieces': ''.join(pieces),
  259. 'piece length': piece_length, 'length': size,
  260. 'name': to_utf8(os.path.split(path)[1]),
  261. 'content_type' : content_type }
  262. return {'pieces': ''.join(pieces),
  263. 'piece length': piece_length, 'length': size,
  264. 'name': to_utf8(os.path.split(path)[1])}
  265. def subfiles(d):
  266. r = []
  267. stack = [([], d)]
  268. while stack:
  269. p, n = stack.pop()
  270. if os.path.isdir(n):
  271. for s in os.listdir(n):
  272. if s not in ignore and not s.startswith('.'):
  273. stack.append((p + [s], os.path.join(n, s)))
  274. else:
  275. r.append((p, n))
  276. return r