refactor:remove ws.(part1)

This commit is contained in:
Martijn Voncken 2008-02-19 20:50:29 +00:00
parent 35446801c6
commit 86eeef4b92
11 changed files with 111 additions and 151 deletions

View File

@ -32,7 +32,7 @@
import lib.newforms_plus as forms import lib.newforms_plus as forms
import page_decorators as deco import page_decorators as deco
import lib.webpy022 as web import lib.webpy022 as web
from webserver_common import ws from webserver_common import ws, proxy, log
from render import render from render import render
from lib.webpy022.http import seeother from lib.webpy022.http import seeother
import sys import sys
@ -64,9 +64,9 @@ class CookieCfgForm(forms.Form):
class CfgForm(forms.Form): class CfgForm(forms.Form):
"config base for deluge-cfg" "config base for deluge-cfg"
def initial_data(self): def initial_data(self):
return ws.proxy.get_config() return proxy.get_config()
def save(self, data): def save(self, data):
ws.proxy.set_config(dict(data)) proxy.set_config(dict(data))
class config_page: class config_page:
""" """
@ -95,12 +95,12 @@ class config_page:
form_data = web.Storage(utils.get_newforms_data(form_class)) form_data = web.Storage(utils.get_newforms_data(form_class))
form = form_class(form_data) form = form_class(form_data)
if form.is_valid(): if form.is_valid():
ws.log.debug('save config %s' % form_data) log.debug('save config %s' % form_data)
try: try:
form.start_save() form.start_save()
return self.render(form , name, _('These changes were saved')) return self.render(form , name, _('These changes were saved'))
except forms.ValidationError, e: except forms.ValidationError, e:
ws.log.debug(e.message) log.debug(e.message)
return self.render(form , name, error = e.message) return self.render(form , name, error = e.message)
else: else:
return self.render(form , name, return self.render(form , name,

View File

@ -34,7 +34,7 @@
import lib.newforms_plus as forms import lib.newforms_plus as forms
import config import config
import utils import utils
from webserver_common import ws from webserver_common import ws, proxy , log
class NetworkPorts(config.CfgForm ): class NetworkPorts(config.CfgForm ):
@ -126,15 +126,15 @@ config.register_block('deluge','daemon', Daemon)
class Plugins(forms.Form): class Plugins(forms.Form):
title = _("Enabled Plugins") title = _("Enabled Plugins")
try: try:
_choices = [(p,p) for p in ws.proxy.get_available_plugins()] _choices = [(p,p) for p in proxy.get_available_plugins()]
enabled_plugins = forms.MultipleChoice(_(""), _choices) enabled_plugins = forms.MultipleChoice(_(""), _choices)
except: except:
ws.log.error("Not connected to daemon, Unable to load plugin-list") log.error("Not connected to daemon, Unable to load plugin-list")
#TODO: reload on reconnect! #TODO: reload on reconnect!
def initial_data(self): def initial_data(self):
return {'enabled_plugins':ws.proxy.get_enabled_plugins()} return {'enabled_plugins':proxy.get_enabled_plugins()}
def save(self, value): def save(self, value):
raise forms.ValidationError("SAVE:TODO") raise forms.ValidationError("SAVE:TODO")

View File

@ -38,11 +38,11 @@ it would be possible not to include the python-json dependency.
from new import instancemethod from new import instancemethod
from inspect import getargspec from inspect import getargspec
from utils import ws,get_torrent_status,get_category_choosers, get_stats,filter_torrent_state,fsize,fspeed from utils import get_torrent_status,get_category_choosers, get_stats,filter_torrent_state,fsize,fspeed
from page_decorators import remote from page_decorators import remote
from operator import attrgetter from operator import attrgetter
import lib.webpy022 as web import lib.webpy022 as web
proxy = ws.proxy from webserver_common import proxy, log
def to_json(obj): def to_json(obj):
from lib.pythonize import pythonize from lib.pythonize import pythonize
@ -80,7 +80,7 @@ class json_api:
method = getattr(self,name) method = getattr(self,name)
vars = web.input(kwargs= None) vars = web.input(kwargs= None)
ws.log.debug('vars=%s' % vars) log.debug('vars=%s' % vars)
if vars.kwargs: if vars.kwargs:
kwargs = json.read(vars.kwargs) kwargs = json.read(vars.kwargs)
else: else:
@ -118,7 +118,7 @@ class json_api:
#extra's: #extra's:
def list_torrents(self): def list_torrents(self):
return [get_torrent_status(torrent_id) return [get_torrent_status(torrent_id)
for torrent_id in ws.proxy.get_session_state()] for torrent_id in proxy.get_session_state()]
def simplify_torrent_status(self, torrent): def simplify_torrent_status(self, torrent):
"""smaller subset and preformatted data for the treelist""" """smaller subset and preformatted data for the treelist"""

View File

@ -3,7 +3,7 @@ decorators for html-pages.
""" """
#relative imports #relative imports
from render import render from render import render
from webserver_common import ws from webserver_common import ws, log
from utils import * from utils import *
#/relative #/relative
@ -30,7 +30,7 @@ def check_session(func):
mostly used for POST-pages. mostly used for POST-pages.
""" """
def deco(self, name = None): def deco(self, name = None):
ws.log.debug('%s.%s(name=%s)' % (self.__class__.__name__, func.__name__, log.debug('%s.%s(name=%s)' % (self.__class__.__name__, func.__name__,
name)) name))
vars = web.input(redir_after_login = None) vars = web.input(redir_after_login = None)
ck = cookies() ck = cookies()
@ -95,7 +95,7 @@ def remote(func):
"decorator for remote (string) api's" "decorator for remote (string) api's"
def deco(self, name = None): def deco(self, name = None):
try: try:
ws.log.debug('%s.%s(%s)' ,self.__class__.__name__, func.__name__,name ) log.debug('%s.%s(%s)' ,self.__class__.__name__, func.__name__,name )
print func(self, name) print func(self, name)
except Exception, e: except Exception, e:
print 'error:%s' % e.message print 'error:%s' % e.message

View File

@ -31,7 +31,7 @@
# this exception statement from your version. If you delete this exception # this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here. # statement from all source files in the program, then also delete it here.
# #
from webserver_common import ws from webserver_common import ws, proxy, log
from utils import * from utils import *
from render import render, error_page from render import render, error_page
import page_decorators as deco import page_decorators as deco
@ -179,28 +179,28 @@ class torrent_start:
@deco.check_session @deco.check_session
@deco.torrent_ids @deco.torrent_ids
def POST(self, torrent_ids): def POST(self, torrent_ids):
ws.proxy.resume_torrent(torrent_ids) proxy.resume_torrent(torrent_ids)
do_redirect() do_redirect()
class torrent_stop: class torrent_stop:
@deco.check_session @deco.check_session
@deco.torrent_ids @deco.torrent_ids
def POST(self, torrent_ids): def POST(self, torrent_ids):
ws.proxy.pause_torrent(torrent_ids) proxy.pause_torrent(torrent_ids)
do_redirect() do_redirect()
class torrent_reannounce: class torrent_reannounce:
@deco.check_session @deco.check_session
@deco.torrent_ids @deco.torrent_ids
def POST(self, torrent_ids): def POST(self, torrent_ids):
ws.proxy.force_reannounce(torrent_ids) proxy.force_reannounce(torrent_ids)
do_redirect() do_redirect()
class torrent_recheck: class torrent_recheck:
@deco.check_session @deco.check_session
@deco.torrent_ids @deco.torrent_ids
def POST(self, torrent_ids): def POST(self, torrent_ids):
ws.proxy.force_recheck(torrent_ids) proxy.force_recheck(torrent_ids)
do_redirect() do_redirect()
class torrent_delete: class torrent_delete:
@ -217,7 +217,7 @@ class torrent_delete:
vars = web.input(data_also = None, torrent_also = None) vars = web.input(data_also = None, torrent_also = None)
data_also = bool(vars.data_also) data_also = bool(vars.data_also)
torrent_also = bool(vars.torrent_also) torrent_also = bool(vars.torrent_also)
ws.proxy.remove_torrent(torrent_ids, torrent_also, data_also) proxy.remove_torrent(torrent_ids, torrent_also, data_also)
do_redirect() do_redirect()
class torrent_queue_up: class torrent_queue_up:
@ -228,7 +228,7 @@ class torrent_queue_up:
torrent_list.sort(lambda x, y : x.queue - y.queue) torrent_list.sort(lambda x, y : x.queue - y.queue)
torrent_ids = [t.id for t in torrent_list] torrent_ids = [t.id for t in torrent_list]
for torrent_id in torrent_ids: for torrent_id in torrent_ids:
ws.async_proxy.get_core().call("queue_queue_up", None, torrent_id) async_proxy.get_core().call("queue_queue_up", None, torrent_id)
do_redirect() do_redirect()
class torrent_queue_down: class torrent_queue_down:
@ -239,7 +239,7 @@ class torrent_queue_down:
torrent_list.sort(lambda x, y : x.queue - y.queue) torrent_list.sort(lambda x, y : x.queue - y.queue)
torrent_ids = [t.id for t in torrent_list] torrent_ids = [t.id for t in torrent_list]
for torrent_id in reversed(torrent_ids): for torrent_id in reversed(torrent_ids):
ws.async_proxy.get_core().call("queue_queue_down", None, torrent_id) async_proxy.get_core().call("queue_queue_down", None, torrent_id)
do_redirect() do_redirect()
class torrent_files: class torrent_files:
@ -253,19 +253,19 @@ class torrent_files:
for pos in file_priorities: for pos in file_priorities:
proxy_prio[int(pos)] = 1 proxy_prio[int(pos)] = 1
ws.proxy.set_torrent_file_priorities(torrent_id, proxy_prio) proxy.set_torrent_file_priorities(torrent_id, proxy_prio)
do_redirect() do_redirect()
class pause_all: class pause_all:
@deco.check_session @deco.check_session
def POST(self, name): def POST(self, name):
ws.proxy.pause_torrent(ws.proxy.get_session_state()) proxy.pause_torrent(proxy.get_session_state())
do_redirect() do_redirect()
class resume_all: class resume_all:
@deco.check_session @deco.check_session
def POST(self, name): def POST(self, name):
ws.proxy.resume_torrent(ws.proxy.get_session_state()) proxy.resume_torrent(proxy.get_session_state())
do_redirect() do_redirect()
class refresh: class refresh:
@ -319,7 +319,7 @@ class logout:
class connect: class connect:
@deco.deluge_page @deco.deluge_page
def GET(self, name): def GET(self, name):
#if ws.proxy.connected(): #if proxy.connected():
# error = _("Not Connected to a daemon") # error = _("Not Connected to a daemon")
#else: #else:
error = None error = None
@ -361,7 +361,7 @@ class remote_torrent_add:
else: #file-post (curl) else: #file-post (curl)
data_b64 = base64.b64encode(vars.torrent.file.read()) data_b64 = base64.b64encode(vars.torrent.file.read())
torrent_name = vars.torrent.filename torrent_name = vars.torrent.filename
ws.proxy.add_torrent_filecontent(torrent_name, data_b64) proxy.add_torrent_filecontent(torrent_name, data_b64)
return 'ok' return 'ok'
class static(static_handler): class static(static_handler):
@ -374,7 +374,7 @@ class template_static(static_handler):
class downloads(static_handler): class downloads(static_handler):
def GET(self, name): def GET(self, name):
self.base_dir = ws.proxy.get_config_value('default_download_path') self.base_dir = proxy.get_config_value('default_download_path')
if not ws.config.get('share_downloads'): if not ws.config.get('share_downloads'):
raise Exception('Access to downloads is forbidden.') raise Exception('Access to downloads is forbidden.')
return static_handler.GET(self, name) return static_handler.GET(self, name)

View File

@ -3,9 +3,8 @@ test multicall.
""" """
import time import time
from WebUi.webserver_common import ws from WebUi.webserver_common import ws, proxy, async_proxy
ws.init_06() ws.init_06()
async_proxy = ws.async_proxy
TORRENT_KEYS = ['name', 'total_size', 'num_files', 'num_pieces', 'piece_length', TORRENT_KEYS = ['name', 'total_size', 'num_files', 'num_pieces', 'piece_length',
'eta', 'ratio', 'file_progress', 'distributed_copies', 'total_done', 'eta', 'ratio', 'file_progress', 'distributed_copies', 'total_done',
@ -17,21 +16,18 @@ TORRENT_KEYS = ['name', 'total_size', 'num_files', 'num_pieces', 'piece_length',
'max_upload_slots', 'max_download_speed', 'prioritize_first_last', 'private' 'max_upload_slots', 'max_download_speed', 'prioritize_first_last', 'private'
] ]
if False: if False:
# #
#A: translate this into 1 multicall: #A: translate this into 1 multicall:
start = time.time() start = time.time()
stats = { stats = {
'download_rate':ws.proxy.get_download_rate(), 'download_rate':proxy.get_download_rate(),
'upload_rate':ws.proxy.get_upload_rate(), 'upload_rate':proxy.get_upload_rate(),
'max_download':ws.proxy.get_config_value('max_download_speed'), 'max_download':proxy.get_config_value('max_download_speed'),
'max_upload':ws.proxy.get_config_value('max_upload_speed'), 'max_upload':proxy.get_config_value('max_upload_speed'),
'num_connections':ws.proxy.get_num_connections(), 'num_connections':proxy.get_num_connections(),
'max_num_connections':ws.proxy.get_config_value('max_connections_global') 'max_num_connections':proxy.get_config_value('max_connections_global')
} }
print "sync-stats:",time.time() - start print "sync-stats:",time.time() - start
@ -66,8 +62,8 @@ if False:
#old-sync: #old-sync:
start = time.time() start = time.time()
torrent_list = [ws.proxy.get_torrent_status(id, TORRENT_KEYS ) torrent_list = [proxy.get_torrent_status(id, TORRENT_KEYS )
for id in ws.proxy.get_session_state() for id in proxy.get_session_state()
] ]
print "sync-list:",time.time() - start print "sync-list:",time.time() - start
@ -87,7 +83,7 @@ if False:
start = time.time() start = time.time()
torrent_ids = ws.proxy.get_session_state() #Syc-api. torrent_ids = proxy.get_session_state() #Syc-api.
torrent_dict = {} torrent_dict = {}
for id in torrent_ids: for id in torrent_ids:
async_proxy.get_torrent_status(dict_cb(id,torrent_dict), id, TORRENT_KEYS ) async_proxy.get_torrent_status(dict_cb(id,torrent_dict), id, TORRENT_KEYS )
@ -98,9 +94,9 @@ if False:
print torrent_dict[torrent_ids[0]] print torrent_dict[torrent_ids[0]]
if False: if False:
print ws.proxy.get_config_value('download_location') print proxy.get_config_value('download_location')
if True: if True:
torrent_id = ws.proxy.get_session_state()[0] torrent_id = proxy.get_session_state()[0]
print torrent_id print torrent_id
ws.proxy.move_torrent([torrent_id],"/media/sdb1/test") proxy.move_torrent([torrent_id],"/media/sdb1/test")

View File

@ -6,7 +6,7 @@ unittest the right way feels so unpythonic :(
""" """
import unittest import unittest
import cookielib, urllib2 , urllib import cookielib, urllib2 , urllib
from WebUi.webserver_common import ws,TORRENT_KEYS from WebUi.webserver_common import ws,TORRENT_KEYS, proxy
import operator import operator
ws.init_06() ws.init_06()
@ -17,7 +17,7 @@ BASE_URL = 'http://localhost:8112'
PWD = 'deluge' PWD = 'deluge'
def get_status(id): def get_status(id):
return ws.proxy.get_torrent_status(id,TORRENT_KEYS) return proxy.get_torrent_status(id,TORRENT_KEYS)
#BASE: #BASE:
#303 = see other #303 = see other
@ -94,7 +94,7 @@ class TestWebUiBase(unittest.TestCase):
else: else:
pass pass
first_torrent_id = property(lambda self: ws.proxy.get_session_state()[0]) first_torrent_id = property(lambda self: proxy.get_session_state()[0])
first_torrent = property(lambda self: get_status(self.first_torrent_id)) first_torrent = property(lambda self: get_status(self.first_torrent_id))
@ -200,17 +200,17 @@ class TestIntegration(TestWebUiBase):
'http://torrents.aelitis.com:88/torrents/azautoseeder_0.1.1.jar.torrent' 'http://torrents.aelitis.com:88/torrents/azautoseeder_0.1.1.jar.torrent'
]) ])
torrent_ids = ws.proxy.get_session_state() torrent_ids = proxy.get_session_state()
#avoid hammering, investigate current torrent-list and do not re-add. #avoid hammering, investigate current torrent-list and do not re-add.
#correct means : 3 torrent's in list (for now) #correct means : 3 torrent's in list (for now)
if len(torrent_ids) <> 3: if len(torrent_ids) <> 3:
#delete all, nice use case for refactoring delete.. #delete all, nice use case for refactoring delete..
torrent_ids = ws.proxy.get_session_state() torrent_ids = proxy.get_session_state()
for torrent in torrent_ids: for torrent in torrent_ids:
ws.proxy.remove_torrent([torrent], False, False) proxy.remove_torrent([torrent], False, False)
torrent_ids = ws.proxy.get_session_state() torrent_ids = proxy.get_session_state()
self.assertEqual(torrent_ids, []) self.assertEqual(torrent_ids, [])
#add 3 using url. #add 3 using url.
@ -218,7 +218,7 @@ class TestIntegration(TestWebUiBase):
self.assert_303('/torrent/add','/index',{'url':url,'torrent':None}) self.assert_303('/torrent/add','/index',{'url':url,'torrent':None})
#added? #added?
self.torrent_ids = ws.proxy.get_session_state() self.torrent_ids = proxy.get_session_state()
self.assertEqual(len(self.torrent_ids), 3) self.assertEqual(len(self.torrent_ids), 3)
else: else:
@ -231,14 +231,14 @@ class TestIntegration(TestWebUiBase):
#pause all #pause all
self.assert_303('/pause_all','/index', post=1) self.assert_303('/pause_all','/index', post=1)
#pause worked? #pause worked?
pause_status = [get_status(id)["user_paused"] for id in ws.proxy.get_session_state()] pause_status = [get_status(id)["user_paused"] for id in proxy.get_session_state()]
for paused in pause_status: for paused in pause_status:
self.assertEqual(paused, True) self.assertEqual(paused, True)
#resume all #resume all
self.assert_303('/resume_all','/index', post=1) self.assert_303('/resume_all','/index', post=1)
#resume worked? #resume worked?
pause_status = [get_status(id)["user_paused"] for id in ws.proxy.get_session_state()] pause_status = [get_status(id)["user_paused"] for id in proxy.get_session_state()]
for paused in pause_status: for paused in pause_status:
self.assertEqual(paused,False) self.assertEqual(paused,False)
#pause again. #pause again.
@ -254,7 +254,7 @@ class TestIntegration(TestWebUiBase):
def testQueue(self): def testQueue(self):
#find last: #find last:
torrent_id = [id for id in ws.proxy.get_session_state() torrent_id = [id for id in proxy.get_session_state()
if (get_status(id)['queue_pos'] ==3 )][0] if (get_status(id)['queue_pos'] ==3 )][0]
#queue #queue
@ -285,7 +285,7 @@ class TestIntegration(TestWebUiBase):
def testMeta(self): def testMeta(self):
#info available? #info available?
for torrent_id in ws.proxy.get_session_state(): for torrent_id in proxy.get_session_state():
self.assert_exists('/torrent/info/%s' % torrent_id) self.assert_exists('/torrent/info/%s' % torrent_id)
self.assert_exists('/torrent/delete/%s' % torrent_id) self.assert_exists('/torrent/delete/%s' % torrent_id)

View File

@ -29,7 +29,7 @@
# this exception statement from your version. If you delete this exception # this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here. # statement from all source files in the program, then also delete it here.
# #
from webserver_common import ws from webserver_common import ws, log, proxy
import utils import utils
from render import render, error_page from render import render, error_page
import page_decorators as deco import page_decorators as deco
@ -55,8 +55,8 @@ class OptionsForm(forms.Form):
default_private = forms.CheckBox(_('Set Private Flag')) default_private = forms.CheckBox(_('Set Private Flag'))
def initial_data(self): def initial_data(self):
data = ws.proxy.get_config() data = proxy.get_config()
ws.log.debug("add:Init options with:%s" % data) log.debug("add:Init options with:%s" % data)
return data return data
class AddForm(forms.Form): class AddForm(forms.Form):
@ -73,7 +73,7 @@ class torrent_add:
def add_page(self,error = None): def add_page(self,error = None):
#form_data = utils.get_newforms_data(AddForm) #form_data = utils.get_newforms_data(AddForm)
ws.log.debug("add-page") log.debug("add-page")
#TODO: CLEANUP!!! #TODO: CLEANUP!!!
vars = web.input(url = None) vars = web.input(url = None)
form_data = {'url':vars.url} form_data = {'url':vars.url}
@ -81,7 +81,7 @@ class torrent_add:
options_data = None options_data = None
if error: if error:
options_data = utils.get_newforms_data(OptionsForm) options_data = utils.get_newforms_data(OptionsForm)
ws.log.debug("add:(error-state):Init options with:%s" % options_data) log.debug("add:(error-state):Init options with:%s" % options_data)
return render.torrent_add(AddForm(form_data),OptionsForm(options_data), error) return render.torrent_add(AddForm(form_data),OptionsForm(options_data), error)
@deco.deluge_page @deco.deluge_page
@ -116,14 +116,14 @@ class torrent_add:
print self.add_page(error = _("Choose an url or a torrent, not both.")) print self.add_page(error = _("Choose an url or a torrent, not both."))
return return
if vars.url: if vars.url:
ws.proxy.add_torrent_url(vars.url,options) proxy.add_torrent_url(vars.url,options)
ws.log.debug("add-url:options :%s" % options) log.debug("add-url:options :%s" % options)
utils.do_redirect() utils.do_redirect()
elif torrent_name: elif torrent_name:
data_b64 = base64.b64encode(torrent_data) data_b64 = base64.b64encode(torrent_data)
#b64 because of strange bug-reports related to binary data #b64 because of strange bug-reports related to binary data
ws.proxy.add_torrent_filecontent(vars.torrent.filename, data_b64, options) proxy.add_torrent_filecontent(vars.torrent.filename, data_b64, options)
ws.log.debug("add-file:options :%s" % options) log.debug("add-file:options :%s" % options)
utils.do_redirect() utils.do_redirect()
else: else:
print self.add_page(error = _("No data")) print self.add_page(error = _("No data"))

View File

@ -30,7 +30,7 @@
# statement from all source files in the program, then also delete it here. # statement from all source files in the program, then also delete it here.
# #
from webserver_common import ws from webserver_common import ws, proxy
import utils import utils
from render import render from render import render
import page_decorators as deco import page_decorators as deco
@ -42,7 +42,7 @@ import lib.webpy022 as web
class MoveForm(forms.Form): class MoveForm(forms.Form):
save_path = forms.ServerFolder(_("Move To")) save_path = forms.ServerFolder(_("Move To"))
def initial_data(self): def initial_data(self):
return {'save_path':ws.proxy.get_config_value('download_location')} return {'save_path':proxy.get_config_value('download_location')}
class torrent_move: class torrent_move:
@ -70,5 +70,5 @@ class torrent_move:
print self.move_page(name, error = _("Error in Path.")) print self.move_page(name, error = _("Error in Path."))
return return
save_path = form.clean_data["save_path"] save_path = form.clean_data["save_path"]
ws.proxy.move_torrent(torrent_ids, save_path) proxy.move_torrent(torrent_ids, save_path)
utils.do_redirect() utils.do_redirect()

View File

@ -47,7 +47,7 @@ import pickle
from urlparse import urlparse from urlparse import urlparse
from webserver_common import REVNO, VERSION, TORRENT_KEYS, STATE_MESSAGES from webserver_common import REVNO, VERSION, TORRENT_KEYS, STATE_MESSAGES
from webserver_common import ws from webserver_common import ws, proxy, async_proxy, log
debug_unicode = False debug_unicode = False
@ -64,7 +64,7 @@ def setcookie(key, val):
#really simple sessions, to bad i had to implement them myself. #really simple sessions, to bad i had to implement them myself.
def start_session(): def start_session():
ws.log.debug('start session') log.debug('start session')
session_id = str(random.random()) session_id = str(random.random())
ws.SESSIONS.append(session_id) ws.SESSIONS.append(session_id)
#if len(ws.SESSIONS) > 20: #save max 20 sessions? #if len(ws.SESSIONS) > 20: #save max 20 sessions?
@ -113,21 +113,21 @@ def getcookie(key, default = None):
def get_stats(): def get_stats():
stats = Storage() stats = Storage()
ws.async_proxy.get_download_rate(dict_cb('download_rate',stats)) async_proxy.get_download_rate(dict_cb('download_rate',stats))
ws.async_proxy.get_upload_rate(dict_cb('upload_rate',stats)) async_proxy.get_upload_rate(dict_cb('upload_rate',stats))
ws.async_proxy.get_config_value(dict_cb('max_download',stats) async_proxy.get_config_value(dict_cb('max_download',stats)
,"max_download_speed") ,"max_download_speed")
ws.async_proxy.get_config_value(dict_cb('max_upload',stats) async_proxy.get_config_value(dict_cb('max_upload',stats)
,"max_upload_speed") ,"max_upload_speed")
ws.async_proxy.get_num_connections(dict_cb("num_connections",stats)) async_proxy.get_num_connections(dict_cb("num_connections",stats))
ws.async_proxy.get_config_value(dict_cb('max_num_connections',stats) async_proxy.get_config_value(dict_cb('max_num_connections',stats)
,"max_connections_global") ,"max_connections_global")
ws.async_proxy.get_dht_nodes(dict_cb('dht_nodes',stats)) async_proxy.get_dht_nodes(dict_cb('dht_nodes',stats))
ws.async_proxy.force_call(block=True) async_proxy.force_call(block=True)
#ws.log.debug(str(stats)) #log.debug(str(stats))
stats.download_rate = fspeed(stats.download_rate) stats.download_rate = fspeed(stats.download_rate)
stats.upload_rate = fspeed(stats.upload_rate) stats.upload_rate = fspeed(stats.upload_rate)
@ -157,10 +157,10 @@ def enhance_torrent_status(torrent_id,status):
for key in TORRENT_KEYS: for key in TORRENT_KEYS:
if not key in status: if not key in status:
status[key] = 0 status[key] = 0
#ws.log.warning('torrent_status:empty key in status:%s' % key) #log.warning('torrent_status:empty key in status:%s' % key)
elif status[key] == None: elif status[key] == None:
status[key] = 0 status[key] = 0
#ws.log.warning('torrent_status:None key in status:%s' % key) #log.warning('torrent_status:None key in status:%s' % key)
if status.tracker == 0: if status.tracker == 0:
@ -222,9 +222,9 @@ def enhance_torrent_status(torrent_id,status):
def get_torrent_status(torrent_id): def get_torrent_status(torrent_id):
""" """
helper method. helper method.
enhance ws.proxy.get_torrent_status with some extra data enhance proxy.get_torrent_status with some extra data
""" """
status = ws.proxy.get_torrent_status(torrent_id,TORRENT_KEYS) status = proxy.get_torrent_status(torrent_id,TORRENT_KEYS)
return enhance_torrent_status(torrent_id, status) return enhance_torrent_status(torrent_id, status)
@ -234,12 +234,12 @@ def get_torrent_list():
""" """
uses async. uses async.
""" """
torrent_ids = ws.proxy.get_session_state() #Syc-api. torrent_ids = proxy.get_session_state() #Syc-api.
torrent_dict = {} torrent_dict = {}
for id in torrent_ids: for id in torrent_ids:
ws.async_proxy.get_torrent_status(dict_cb(id,torrent_dict), id, async_proxy.get_torrent_status(dict_cb(id,torrent_dict), id,
TORRENT_KEYS) TORRENT_KEYS)
ws.async_proxy.force_call(block=True) async_proxy.force_call(block=True)
return [enhance_torrent_status(id, status) return [enhance_torrent_status(id, status)
for id, status in torrent_dict.iteritems()] for id, status in torrent_dict.iteritems()]
@ -307,7 +307,7 @@ def get_newforms_data(form_class):
vars = web.input() vars = web.input()
for field in fields: for field in fields:
form_data[field] = vars.get(field) form_data[field] = vars.get(field)
#ws.log.debug("form-field:%s=%s" % (field, form_data[field])) #log.debug("form-field:%s=%s" % (field, form_data[field]))
#DIRTY HACK: (for multiple-select) #DIRTY HACK: (for multiple-select)
if isinstance(form_class.base_fields[field], if isinstance(form_class.base_fields[field],
forms.MultipleChoiceField): forms.MultipleChoiceField):

View File

@ -43,6 +43,8 @@ import sys
import base64 import base64
from md5 import md5 from md5 import md5
import inspect import inspect
from deluge.ui import client
from deluge.log import LOG as log
random.seed() random.seed()
@ -80,12 +82,6 @@ TORRENT_KEYS = ['name', 'total_size', 'num_files', 'num_pieces', 'piece_length',
"user_paused" "user_paused"
] ]
"""
NOT:is_seed,total_download,total_upload,uploaded_memory,queue_pos,user_paused
"""
STATE_MESSAGES = [ STATE_MESSAGES = [
"Allocating", "Allocating",
@ -111,29 +107,28 @@ CONFIG_DEFAULTS = {
#/constants #/constants
#some magic to transform the async-proxy back to sync:
class SyncProxyFunction: class SyncProxyMethod:
""" """
helper class for SyncProxy helper class for SyncProxy
""" """
def __init__(self,client, func_name): def __init__(self, func_name):
self.func_name = func_name self.func_name = func_name
self.client = client
def __call__(self,*args,**kwargs): def __call__(self,*args,**kwargs):
func = getattr(self.client,self.func_name) func = getattr(client,self.func_name)
if self.has_callback(func): if self.has_callback(func):
#(ab)using list.append as a builtin callback method
sync_result = [] sync_result = []
func(sync_result.append,*args, **kwargs) func(sync_result.append,*args, **kwargs)
self.client.force_call(block=True) client.force_call(block=True)
if not sync_result: if not sync_result:
return None return None
return sync_result[0] return sync_result[0]
else: else:
ws.log.debug('no-cb: %s' % self.func_name)
func(*args, **kwargs) func(*args, **kwargs)
self.client.force_call(block=True) client.force_call(block=True)
return return
@staticmethod @staticmethod
@ -142,11 +137,14 @@ class SyncProxyFunction:
class SyncProxy(object): class SyncProxy(object):
"""acts like the old synchonous proxy""" """acts like the old synchonous proxy"""
def __init__(self, client): def __getattr__(self, attr):
self.client = client return SyncProxyMethod(attr)
def __getattr__(self, attr,*args,**kwargs): #moving stuff from WS to module
return SyncProxyFunction(self.client, attr) #goal: eliminate WS, because the 05 compatiblilty is not needed anymore
proxy = SyncProxy()
async_proxy = client
#log is already imported.
class Ws: class Ws:
@ -182,37 +180,31 @@ class Ws:
self.config = pickle.load(open(self.config_file)) self.config = pickle.load(open(self.config_file))
def init_06(self, uri = 'http://localhost:58846'): def init_06(self, uri = 'http://localhost:58846'):
import deluge.ui.client as async_proxy client.set_core_uri(uri)
from deluge.log import LOG as log self.async_proxy = client
self.log = log
async_proxy.set_core_uri(uri)
self.async_proxy = async_proxy
self.proxy = SyncProxy(self.async_proxy)
#MONKEY PATCH, TODO->REMOVE!!! #MONKEY PATCH, TODO->REMOVE!!!
def add_torrent_filecontent(name , data_b64, options): def add_torrent_filecontent(name , data_b64, options):
self.log.debug('monkeypatched add_torrent_filecontent:%s,len(data:%s))' % log.debug('monkeypatched add_torrent_filecontent:%s,len(data:%s))' %
(name , len(data_b64))) (name , len(data_b64)))
name = name.replace('\\','/') name = name.replace('\\','/')
name = 'deluge06_' + str(random.random()) + '_' + name.split('/')[-1] name = 'deluge06_' + str(random.random()) + '_' + name.split('/')[-1]
filename = os.path.join('/tmp', name) filename = os.path.join('/tmp', name)
self.log.debug('write: %s' % filename) log.debug('write: %s' % filename)
f = open(filename,"wb") f = open(filename,"wb")
f.write(base64.b64decode(data_b64)) f.write(base64.b64decode(data_b64))
f.close() f.close()
self.log.debug("options:%s" % options) log.debug("options:%s" % options)
self.proxy.add_torrent_file([filename] , [options]) self.proxy.add_torrent_file([filename] , [options])
self.proxy.add_torrent_filecontent = add_torrent_filecontent proxy.add_torrent_filecontent = add_torrent_filecontent
self.log.debug('cfg-file %s' % self.config_file) log.debug('cfg-file %s' % self.config_file)
if not os.path.exists(self.config_file): if not os.path.exists(self.config_file):
self.log.debug('create cfg file %s' % self.config_file) log.debug('create cfg file %s' % self.config_file)
#load&save defaults. #load&save defaults.
f = file(self.config_file,'wb') f = file(self.config_file,'wb')
pickle.dump(CONFIG_DEFAULTS,f) pickle.dump(CONFIG_DEFAULTS,f)
@ -221,34 +213,6 @@ class Ws:
self.init_process() self.init_process()
self.env = '0.6' self.env = '0.6'
def init_05(self):
import dbus
self.init_process()
bus = dbus.SessionBus()
self.proxy = bus.get_object("org.deluge_torrent.dbusplugin"
, "/org/deluge_torrent/DelugeDbusPlugin")
self.env = '0.5_process'
self.init_logger()
def init_gtk_05(self):
#appy possibly changed config-vars, only called in when runing inside gtk.
#new bug ws.render will not update!!!!
#other bug: must warn if blocklist plugin is active!
from dbus_interface import get_dbus_manager
self.proxy = get_dbus_manager()
self.config = deluge.pref.Preferences(self.config_file, False)
self.env = '0.5_gtk'
self.init_logger()
def init_logger(self):
#only for 0.5..
import logging
logging.basicConfig(level=logging.DEBUG,
format="[%(levelname)s] %(message)s")
self.log = logging
#utils for config: #utils for config:
def get_templates(self): def get_templates(self):
template_path = os.path.join(os.path.dirname(__file__), 'templates') template_path = os.path.join(os.path.dirname(__file__), 'templates')
@ -258,7 +222,7 @@ class Ws:
and not dirname.startswith('.')] and not dirname.startswith('.')]
def save_config(self): def save_config(self):
self.log.debug('Save Webui Config') log.debug('Save Webui Config')
data = pickle.dumps(self.config) data = pickle.dumps(self.config)
f = open(self.config_file,'wb') f = open(self.config_file,'wb')
f.write(data) f.write(data)