[#3388|WebUI] Fix md5sums in torrent files breaking file listing
Torrents containing md5sum optional hashes are not being decoded and so causes errors in the json_api when the TorrentInfo is returned: Object of type bytes is not JSON serializable Fixed by removing all optional hashes from the paths returned from TorrentInfo and only including the required path keys. The optional hashes are unused by Deluge so simplify by removing. Fixed Windows path issue in TorrentInfo by ensuring conversion to posix paths. Refs: http://wiki.bitcomet.com/inside_bitcomet http://wiki.depthstrike.com/index.php/P2P:Protocol:Specifications:Optional_Hashes https://wiki.theory.org/index.php/BitTorrentSpecification
This commit is contained in:
parent
dcd3918f36
commit
3ec23ad96b
|
@ -24,6 +24,102 @@ class UICommonTestCase(unittest.TestCase):
|
|||
def tearDown(self): # NOQA: N803
|
||||
pass
|
||||
|
||||
def test_hash_optional_single_file(self):
|
||||
"""Ensure single file with `ed2k` and `sha1` keys are not in filetree output."""
|
||||
filename = common.get_test_data_file('test.torrent')
|
||||
files_tree = {'azcvsupdater_2.6.2.jar': (0, 307949, True)}
|
||||
ti = TorrentInfo(filename, filetree=1)
|
||||
self.assertEqual(ti.files_tree, files_tree)
|
||||
|
||||
files_tree2 = {
|
||||
'contents': {
|
||||
'azcvsupdater_2.6.2.jar': {
|
||||
'type': 'file',
|
||||
'index': 0,
|
||||
'length': 307949,
|
||||
'download': True,
|
||||
}
|
||||
}
|
||||
}
|
||||
ti = TorrentInfo(filename, filetree=2)
|
||||
self.assertEqual(ti.files_tree, files_tree2)
|
||||
|
||||
def test_hash_optional_multi_file(self):
|
||||
"""Ensure multi-file with `filehash` and `ed2k` are keys not in filetree output."""
|
||||
filename = common.get_test_data_file('filehash_field.torrent')
|
||||
files_tree = {
|
||||
'torrent_filehash': {
|
||||
'tull.txt': (0, 54, True),
|
||||
'還在一個人無聊嗎~還不趕緊上來聊天美.txt': (1, 54, True),
|
||||
}
|
||||
}
|
||||
ti = TorrentInfo(filename, filetree=1)
|
||||
self.assertEqual(ti.files_tree, files_tree)
|
||||
|
||||
filestree2 = {
|
||||
'contents': {
|
||||
'torrent_filehash': {
|
||||
'type': 'dir',
|
||||
'contents': {
|
||||
'tull.txt': {
|
||||
'type': 'file',
|
||||
'path': 'torrent_filehash/tull.txt',
|
||||
'length': 54,
|
||||
'index': 0,
|
||||
'download': True,
|
||||
},
|
||||
'還在一個人無聊嗎~還不趕緊上來聊天美.txt': {
|
||||
'type': 'file',
|
||||
'path': 'torrent_filehash/還在一個人無聊嗎~還不趕緊上來聊天美.txt',
|
||||
'length': 54,
|
||||
'index': 1,
|
||||
'download': True,
|
||||
},
|
||||
},
|
||||
'length': 108,
|
||||
'download': True,
|
||||
}
|
||||
},
|
||||
'type': 'dir',
|
||||
}
|
||||
ti = TorrentInfo(filename, filetree=2)
|
||||
self.assertEqual(ti.files_tree, filestree2)
|
||||
|
||||
def test_hash_optional_md5sum(self):
|
||||
# Ensure `md5sum` key is not included in filetree output
|
||||
filename = common.get_test_data_file('md5sum.torrent')
|
||||
files_tree = {'test': {'lol': (0, 4, True), 'rofl': (1, 5, True)}}
|
||||
ti = TorrentInfo(filename, filetree=1)
|
||||
self.assertEqual(ti.files_tree, files_tree)
|
||||
ti = TorrentInfo(filename, filetree=2)
|
||||
files_tree2 = {
|
||||
'contents': {
|
||||
'test': {
|
||||
'type': 'dir',
|
||||
'contents': {
|
||||
'lol': {
|
||||
'type': 'file',
|
||||
'path': 'test/lol',
|
||||
'index': 0,
|
||||
'length': 4,
|
||||
'download': True,
|
||||
},
|
||||
'rofl': {
|
||||
'type': 'file',
|
||||
'path': 'test/rofl',
|
||||
'index': 1,
|
||||
'length': 5,
|
||||
'download': True,
|
||||
},
|
||||
},
|
||||
'length': 9,
|
||||
'download': True,
|
||||
}
|
||||
},
|
||||
'type': 'dir',
|
||||
}
|
||||
self.assertEqual(ti.files_tree, files_tree2)
|
||||
|
||||
def test_utf8_encoded_paths(self):
|
||||
filename = common.get_test_data_file('test.torrent')
|
||||
ti = TorrentInfo(filename)
|
||||
|
|
|
@ -15,7 +15,6 @@ from __future__ import unicode_literals
|
|||
|
||||
import logging
|
||||
import os
|
||||
from binascii import hexlify
|
||||
from hashlib import sha1 as sha
|
||||
|
||||
from deluge import bencode
|
||||
|
@ -206,12 +205,9 @@ class TorrentInfo(object):
|
|||
self._info_hash = sha(bencode.bencode(info_dict)).hexdigest()
|
||||
|
||||
# Get encoding from torrent file if available
|
||||
encoding = info_dict.get('encoding', None)
|
||||
codepage = info_dict.get('codepage', None)
|
||||
if not encoding:
|
||||
encoding = codepage if codepage else b'UTF-8'
|
||||
if encoding:
|
||||
encoding = encoding.decode()
|
||||
encoding = info_dict.get(
|
||||
'encoding', info_dict.get('codepage', b'UTF-8')
|
||||
).decode()
|
||||
|
||||
# Decode 'name' with encoding unless 'name.utf-8' found.
|
||||
if 'name.utf-8' in info_dict:
|
||||
|
@ -231,27 +227,20 @@ class TorrentInfo(object):
|
|||
|
||||
if 'path.utf-8' in f:
|
||||
path = decode_bytes(os.path.join(*f['path.utf-8']))
|
||||
del f['path.utf-8']
|
||||
else:
|
||||
path = decode_bytes(os.path.join(*f['path']), encoding)
|
||||
|
||||
if prefix:
|
||||
path = os.path.join(prefix, path)
|
||||
|
||||
# Ensure agnostic path separator
|
||||
path = path.replace('\\', '/')
|
||||
|
||||
self._files.append(
|
||||
{'path': path, 'size': f['length'], 'download': True}
|
||||
)
|
||||
paths[path] = {'path': path, 'index': index, 'length': f['length']}
|
||||
|
||||
f['path'] = path
|
||||
f['index'] = index
|
||||
if 'sha1' in f and len(f['sha1']) == 20:
|
||||
f['sha1'] = hexlify(f['sha1']).decode()
|
||||
if 'ed2k' in f and len(f['ed2k']) == 16:
|
||||
f['ed2k'] = hexlify(f['ed2k']).decode()
|
||||
if 'filehash' in f and len(f['filehash']) == 20:
|
||||
f['filehash'] = hexlify(f['filehash']).decode()
|
||||
|
||||
paths[path] = f
|
||||
dirname = os.path.dirname(path)
|
||||
while dirname:
|
||||
dirinfo = dirs.setdefault(dirname, {})
|
||||
|
@ -538,7 +527,7 @@ class FileTree(object):
|
|||
|
||||
def walk(directory, parent_path):
|
||||
for path in list(directory):
|
||||
full_path = os.path.join(parent_path, path)
|
||||
full_path = os.path.join(parent_path, path).replace('\\', '/')
|
||||
if isinstance(directory[path], dict):
|
||||
directory[path] = (
|
||||
callback(full_path, directory[path]) or directory[path]
|
||||
|
|
Loading…
Reference in New Issue