Update of /cvsroot/btplusplus/BT++/src/core
In directory sc8-pr-cvs1:/tmp/cvs-serv23881/src/core
Modified Files:
Choker.py Connecter.py CurrentRateMeasure.py
DownloaderFeedback.py Encrypter.py HTTPHandler.py NatCheck.py
PiecePicker.py RateMeasure.py RawServer.py Rerequester.py
Storage.py StorageWrapper.py Uploader.py __init__.py
btformats.py download.py testtest.py track.py
Added Files:
zurllib.py
Log Message:
- Updated to wxPython 2.4.0.7 (no source changes).
- Updated to BT 3.2 (some Loader changes + core).
- Moved some stuff.
--- NEW FILE: zurllib.py ---
#
# zurllib.py
#
# This is (hopefully) a drop-in for urllib which will request gzip/deflate
# compression and then decompress the output if a compressed response is
# received while maintaining the API.
#
# by Robert Stone 2/22/2003
#
from urllib import *
from urllib2 import *
from gzip import GzipFile
from StringIO import StringIO
import pprint
DEBUG=0
class HTTPContentEncodingHandler(HTTPHandler):
"""Inherit and add gzip/deflate/etc support to HTTP gets."""
def http_open(self, req):
# add the Accept-Encoding header to the request
# support gzip encoding (identity is assumed)
req.add_header("Accept-Encoding","gzip")
if DEBUG:
print "Sending:"
print req.headers
print "\n"
fp = HTTPHandler.http_open(self,req)
headers = fp.headers
if DEBUG:
pprint.pprint(headers.dict)
url = fp.url
return addinfourldecompress(fp, headers, url)
class addinfourldecompress(addinfourl):
"""Do gzip decompression if necessary. Do addinfourl stuff too."""
def __init__(self, fp, headers, url):
# we need to do something more sophisticated here to deal with
# multiple values? What about other weird crap like q-values?
# basically this only works for the most simplistic case and will
# break in some other cases, but for now we only care about making
# this work with the BT tracker so....
if headers.has_key('content-encoding') and headers['content-encoding'] == 'gzip':
if DEBUG:
print "Contents of Content-encoding: " + headers['Content-encoding'] + "\n"
self.gzip = 1
self.rawfp = fp
fp = GzipStream(fp)
else:
self.gzip = 0
return addinfourl.__init__(self, fp, headers, url)
def close(self):
self.fp.close()
if self.gzip:
self.rawfp.close()
def iscompressed(self):
return self.gzip
class GzipStream(StringIO):
"""Magically decompress a file object.
This is not the most efficient way to do this but GzipFile() wants
to seek, etc, which won't work for a stream such as that from a socket.
So we copy the whole shebang info a StringIO object, decompress that
then let people access the decompressed output as a StringIO object.
The disadvantage is memory use and the advantage is random access.
Will mess with fixing this later.
"""
def __init__(self,fp):
self.fp = fp
# this is nasty and needs to be fixed at some point
# copy everything into a StringIO (compressed)
compressed = StringIO()
r = fp.read()
while r:
compressed.write(r)
r = fp.read()
# now, unzip (gz) the StringIO to a string
compressed.seek(0,0)
gz = GzipFile(fileobj = compressed)
str = ''
r = gz.read()
while r:
str += r
r = gz.read()
# close our utility files
compressed.close()
gz.close()
# init our stringio selves with the string
StringIO.__init__(self, str)
del str
def close(self):
self.fp.close()
return StringIO.close(self)
def test():
"""Test this module.
At the moment this is lame.
"""
print "Running unit tests.\n"
def printcomp(fp):
try:
if fp.iscompressed():
print "GET was compressed.\n"
else:
print "GET was uncompressed.\n"
except:
print "no iscompressed function! this shouldn't happen"
print "Trying to GET a compressed document...\n"
fp = urlopen('http://a.scarywater.net/hng/index.shtml')
print fp.read()
printcomp(fp)
fp.close()
print "Trying to GET an unknown document...\n"
fp = urlopen('http://www.otaku.org/')
print fp.read()
printcomp(fp)
fp.close()
#
# Install the HTTPContentEncodingHandler that we've defined above.
#
install_opener(build_opener(HTTPContentEncodingHandler))
if __name__ == '__main__':
test()
Index: Choker.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/Choker.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** Choker.py 19 Feb 2003 20:40:37 -0000 1.1
--- Choker.py 28 Mar 2003 12:44:27 -0000 1.2
***************
*** 7,15 ****
class Choker:
! def __init__(self, max_uploads, schedule):
self.max_uploads = max_uploads
self.schedule = schedule
self.connections = []
self.count = 0
schedule(self._round_robin, 10)
--- 7,16 ----
class Choker:
! def __init__(self, max_uploads, schedule, done = lambda: false):
self.max_uploads = max_uploads
self.schedule = schedule
self.connections = []
self.count = 0
+ self.done = done
schedule(self._round_robin, 10)
***************
*** 20,70 ****
for i in xrange(len(self.connections)):
u = self.connections[i].get_upload()
! if u.get_hit() or u.is_choked():
self.connections = self.connections[i:] + self.connections[:i]
break
! self._rechoke()
! for c in self.connections:
! c.get_upload().set_not_hit()
else:
! self._rechoke()
!
def _rechoke(self):
preferred = []
for c in self.connections:
! d = c.get_download()
! if not d.is_snubbed() and c.get_upload().is_interested():
! preferred.append((d.get_rate(), c))
preferred.sort()
- preferred.reverse()
del preferred[self.max_uploads - 1:]
- if self.max_uploads == 0:
- preferred = []
preferred = [x[1] for x in preferred]
- for c in preferred:
- c.get_upload().unchoke()
count = len(preferred)
for c in self.connections:
- if c in preferred:
- continue
u = c.get_upload()
! if count < self.max_uploads:
u.unchoke()
- if u.is_interested():
- count += 1
else:
! u.choke()
def connection_made(self, connection, p = None):
if p is None:
p = randrange(-2, len(self.connections) + 1)
! if p <= 0:
! self.connections.insert(0, connection)
! else:
! self.connections.insert(p, connection)
self._rechoke()
def connection_lost(self, connection):
self.connections.remove(connection)
! self._rechoke()
def interested(self, connection):
--- 21,71 ----
for i in xrange(len(self.connections)):
u = self.connections[i].get_upload()
! if u.is_choked() and u.is_interested():
self.connections = self.connections[i:] + self.connections[:i]
break
! self._rechoke()
!
! def _snubbed(self, c):
! if self.done():
! return false
! return c.get_download().is_snubbed()
!
! def _rate(self, c):
! if self.done():
! return c.get_upload().get_rate()
else:
! return c.get_download().get_rate()
!
def _rechoke(self):
preferred = []
for c in self.connections:
! if not self._snubbed(c) and c.get_upload().is_interested():
! preferred.append((-self._rate(c), c))
preferred.sort()
del preferred[self.max_uploads - 1:]
preferred = [x[1] for x in preferred]
count = len(preferred)
for c in self.connections:
u = c.get_upload()
! if c in preferred:
u.unchoke()
else:
! if count < self.max_uploads:
! u.unchoke()
! if u.is_interested():
! count += 1
! else:
! u.choke()
def connection_made(self, connection, p = None):
if p is None:
p = randrange(-2, len(self.connections) + 1)
! self.connections.insert(max(p, 0), connection)
self._rechoke()
def connection_lost(self, connection):
self.connections.remove(connection)
! if connection.get_upload().is_interested() and not connection.get_upload().is_choked():
! self._rechoke()
def interested(self, connection):
***************
*** 110,124 ****
self.i = false
self.c = true
- self.hit = true
def choke(self):
if not self.c:
self.c = true
! self.hit = true
!
def unchoke(self):
if self.c:
self.c = false
- self.hit = true
def is_choked(self):
--- 111,122 ----
self.i = false
self.c = true
def choke(self):
if not self.c:
self.c = true
!
def unchoke(self):
if self.c:
self.c = false
def is_choked(self):
***************
*** 128,137 ****
return self.i
- def set_not_hit(self):
- self.hit = false
-
- def get_hit(self):
- return self.hit
-
def test_round_robin_with_no_downloads():
s = DummyScheduler()
--- 126,129 ----
***************
*** 237,241 ****
assert not c2.u.c
! def test_interrupt_by_connection_lost():
s = DummyScheduler()
choker = Choker(1, s)
--- 229,233 ----
assert not c2.u.c
! def test_skip_not_interested():
s = DummyScheduler()
choker = Choker(1, s)
***************
*** 244,261 ****
c3 = DummyConnection(2)
c1.u.i = true
- c2.u.i = true
c3.u.i = true
! choker.connection_made(c1)
! choker.connection_made(c2, 1)
! choker.connection_made(c3, 2)
! f = s.s[0][0]
! f()
assert not c1.u.c
assert c2.u.c
! assert c3.u.c
! f()
assert not c1.u.c
assert c2.u.c
assert c3.u.c
f()
assert not c1.u.c
--- 236,250 ----
c3 = DummyConnection(2)
c1.u.i = true
c3.u.i = true
! choker.connection_made(c2)
! assert not c2.u.c
! choker.connection_made(c1, 0)
assert not c1.u.c
assert c2.u.c
! choker.connection_made(c3, 2)
assert not c1.u.c
assert c2.u.c
assert c3.u.c
+ f = s.s[0][0]
f()
assert not c1.u.c
***************
*** 267,279 ****
assert c3.u.c
f()
! assert not c1.u.c
assert c2.u.c
! assert c3.u.c
! choker.connection_lost(c1)
! assert not c2.u.c
! assert c3.u.c
! f()
! assert not c2.u.c
! assert c3.u.c
def test_connection_lost_no_interrupt():
--- 256,262 ----
assert c3.u.c
f()
! assert c1.u.c
assert c2.u.c
! assert not c3.u.c
def test_connection_lost_no_interrupt():
***************
*** 299,355 ****
assert c3.u.c
f()
! assert not c1.u.c
! assert c2.u.c
assert c3.u.c
f()
! assert not c1.u.c
! assert c2.u.c
assert c3.u.c
f()
! assert not c1.u.c
! assert c2.u.c
! assert c3.u.c
! choker.connection_lost(c2)
! assert not c1.u.c
assert c3.u.c
! f()
assert c1.u.c
! assert not c3.u.c
!
! def test_interrupt_by_connection_made():
! s = DummyScheduler()
! choker = Choker(1, s)
! c1 = DummyConnection(0)
! c2 = DummyConnection(1)
! c3 = DummyConnection(2)
! c1.u.i = true
! c2.u.i = true
! c3.u.i = true
! choker.connection_made(c1)
! choker.connection_made(c2, 1)
! f = s.s[0][0]
! f()
! assert not c1.u.c
! assert c2.u.c
! f()
! assert not c1.u.c
! assert c2.u.c
! f()
! assert not c1.u.c
! assert c2.u.c
f()
assert not c1.u.c
assert c2.u.c
! f()
assert not c1.u.c
- assert c2.u.c
- choker.connection_made(c3, 0)
- assert c1.u.c
- assert c2.u.c
- assert not c3.u.c
- f()
- assert c1.u.c
- assert c2.u.c
- assert not c3.u.c
def test_connection_made_no_interrupt():
--- 282,304 ----
assert c3.u.c
f()
! assert c1.u.c
! assert not c2.u.c
assert c3.u.c
f()
! assert c1.u.c
! assert not c2.u.c
assert c3.u.c
f()
! assert c1.u.c
! assert not c2.u.c
assert c3.u.c
! choker.connection_lost(c3)
assert c1.u.c
! assert not c2.u.c
f()
assert not c1.u.c
assert c2.u.c
! choker.connection_lost(c2)
assert not c1.u.c
def test_connection_made_no_interrupt():
***************
*** 365,375 ****
choker.connection_made(c2, 1)
f = s.s[0][0]
- f()
- assert not c1.u.c
- assert c2.u.c
- f()
- assert not c1.u.c
- assert c2.u.c
- f()
assert not c1.u.c
assert c2.u.c
--- 314,317 ----
***************
*** 399,409 ****
choker.connection_made(c2, 1)
f = s.s[0][0]
- f()
- assert not c1.u.c
- assert c2.u.c
- f()
- assert not c1.u.c
- assert c2.u.c
- f()
assert not c1.u.c
assert c2.u.c
--- 341,344 ----
***************
*** 473,486 ****
assert c11.u.c
- #uninterested
- #interested snubbed,
- #uninterested,
- #interested first priority,
- #uninterested,
- #interested snubbed,
- #uninterested,
- #interested third priority snubbed,
- #uninterested zeroth priority,
- #interested second priority
- #uninterested
--- 408,410 ----
Index: Connecter.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/Connecter.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** Connecter.py 19 Feb 2003 20:40:37 -0000 1.1
--- Connecter.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 40,43 ****
--- 40,46 ----
return self.connection.get_ip()
+ def get_id(self):
+ return self.connection.get_id()
+
def close(self):
self.connection.close()
Index: CurrentRateMeasure.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/CurrentRateMeasure.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** CurrentRateMeasure.py 19 Feb 2003 20:40:37 -0000 1.1
--- CurrentRateMeasure.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 57,61 ****
if self.ratesince < t - self.max_rate_period:
self.ratesince = t - self.max_rate_period
-
def add_measure(self, m):
self.Measures.append( m )
--- 57,60 ----
Index: DownloaderFeedback.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/DownloaderFeedback.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** DownloaderFeedback.py 19 Feb 2003 20:40:37 -0000 1.1
--- DownloaderFeedback.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 16,20 ****
file_length,
finflag,
! interval ):
self.choker = choker
self.add_task = add_task
--- 16,21 ----
file_length,
finflag,
! interval,
! sp ):
self.choker = choker
self.add_task = add_task
***************
*** 27,36 ****
self.finflag = finflag
self.interval = interval
self.display()
def spew(self):
s = StringIO()
! for c in self.choker.connections:
s.write('%20s ' % c.get_ip())
if c.is_locally_initiated():
s.write('l')
--- 28,53 ----
self.finflag = finflag
self.interval = interval
+ self.sp = sp
+ self.lastids = []
self.display()
+ def _rotate(self):
+ cs = self.choker.connections
+ for id in self.lastids:
+ for i in xrange(len(cs)):
+ if cs[i].get_id() == id:
+ return cs[i:] + cs[:i]
+ return cs
+
def spew(self):
s = StringIO()
! cs = self._rotate()
! self.lastids = [c.get_id() for c in cs]
! for c in cs:
s.write('%20s ' % c.get_ip())
+ if c is self.choker.connections[0]:
+ s.write('*')
+ else:
+ s.write(' ')
if c.is_locally_initiated():
s.write('l')
***************
*** 67,71 ****
def display(self):
self.add_task(self.display, self.interval)
! #self.spew()
if self.finflag.isSet():
self.statusfunc(upRate = self.upfunc())
--- 84,89 ----
def display(self):
self.add_task(self.display, self.interval)
! if self.sp:
! self.spew()
if self.finflag.isSet():
self.statusfunc(upRate = self.upfunc())
Index: Encrypter.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/Encrypter.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** Encrypter.py 19 Feb 2003 20:40:37 -0000 1.1
--- Encrypter.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 37,40 ****
--- 37,43 ----
return self.connection.get_ip()
+ def get_id(self):
+ return self.id
+
def is_locally_initiated(self):
return self.locally_initiated
Index: HTTPHandler.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/HTTPHandler.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** HTTPHandler.py 19 Feb 2003 20:40:36 -0000 1.1
--- HTTPHandler.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 5,11 ****
--- 5,14 ----
from sys import stdout
import time
+ from gzip import GzipFile
true = 1
false = 0
+ DEBUG = false
+
weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
***************
*** 65,68 ****
--- 68,87 ----
if data == '':
self.donereading = true
+ # check for Accept-Encoding: header, pick a
+ if self.headers.has_key('accept-encoding'):
+ ae = self.headers['accept-encoding']
+ if DEBUG:
+ print "Got Accept-Encoding: " + ae + "\n"
+ else:
+ #identity assumed if no header
+ ae = 'identity'
+ # this eventually needs to support multple acceptable types
+ # q-values and all that fancy HTTP crap
+ # for now assume we're only communicating with our own client
+ if ae == 'gzip':
+ self.encoding = 'gzip'
+ else:
+ #default to identity.
+ self.encoding = 'identity'
r = self.handler.getfunc(self, self.path, self.headers)
if r is not None:
***************
*** 74,77 ****
--- 93,98 ----
return None
self.headers[data[:i].strip().lower()] = data[i+1:].strip()
+ if DEBUG:
+ print data[:i].strip() + ": " + data[i+1:].strip()
return self.read_header
***************
*** 79,86 ****
if self.closed:
return
year, month, day, hour, minute, second, a, b, c = time.localtime(time.time())
! print '%s - - [%02d/%3s/%04d:%02d:%02d:%02d] "%s" %i %i' % (
! self.connection.get_ip(), day, months[month], year, hour, minute,
! second, self.header, responsecode, len(data))
t = time.time()
if t - self.handler.lastflush > self.handler.minflush:
--- 100,131 ----
if self.closed:
return
+ if self.encoding == 'gzip':
+ #transform data using gzip compression
+ #this is nasty but i'm unsure of a better way at the moment
+ compressed = StringIO()
+ gz = GzipFile(fileobj = compressed, mode = 'wb', compresslevel = 9)
+ gz.write(data)
+ gz.close()
+ compressed.seek(0,0)
+ cdata = compressed.read()
+ compressed.close()
+ if len(cdata) >= len(data):
+ self.encoding = 'identity'
+ else:
+ if DEBUG:
+ print "Compressed: %i Uncompressed: %i\n" % (len(cdata),len(data))
+ data = cdata
+ headers['Content-Encoding'] = 'gzip'
+
+ # i'm abusing the identd field here, but this should be ok
+ if self.encoding == 'identity':
+ ident = '-'
+ else:
+ ident = self.encoding
+
year, month, day, hour, minute, second, a, b, c = time.localtime(time.time())
! print '%s %s - [%02d/%3s/%04d:%02d:%02d:%02d] "%s" %i %i' % (
! self.connection.get_ip(), ident, day, months[month], year, hour,
! minute, second, self.header, responsecode, len(data))
t = time.time()
if t - self.handler.lastflush > self.handler.minflush:
Index: NatCheck.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/NatCheck.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** NatCheck.py 19 Feb 2003 20:40:36 -0000 1.1
--- NatCheck.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 17,20 ****
--- 17,22 ----
self.downloadid = downloadid
self.peerid = peerid
+ self.ip = ip
+ self.port = port
self.closed = false
self.buffer = StringIO()
***************
*** 32,37 ****
def answer(self, result):
self.closed = true
! self.connection.close()
! self.resultfunc(result)
def read_header_len(self, s):
--- 34,42 ----
def answer(self, result):
self.closed = true
! try:
! self.connection.close()
! except AttributeError:
! pass
! self.resultfunc(result, self.downloadid, self.peerid, self.ip, self.port)
def read_header_len(self, s):
***************
*** 82,86 ****
if not self.closed:
self.closed = true
! self.resultfunc(false)
def connection_flushed(self, connection):
--- 87,91 ----
if not self.closed:
self.closed = true
! self.resultfunc(false, self.downloadid, self.peerid, self.ip, self.port)
def connection_flushed(self, connection):
Index: PiecePicker.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/PiecePicker.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** PiecePicker.py 19 Feb 2003 20:40:36 -0000 1.1
--- PiecePicker.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 2,6 ****
# see LICENSE-BitTorrent.txt for license information
! from random import randrange
true = 1
false = 0
--- 2,6 ----
# see LICENSE-BitTorrent.txt for license information
! from random import randrange, shuffle
true = 1
false = 0
***************
*** 36,39 ****
--- 36,58 ----
return last
+ class RandomPicker:
+ def __init__(self, picker):
+ self.picker = picker
+ self.fixedpos = 0
+ self.l = None
+
+ def next(self):
+ if self.fixedpos < len(self.picker.fixed):
+ self.fixedpos += 1
+ return self.picker.fixed[self.fixedpos - 1]
+ if self.l is None:
+ self.l = []
+ for x in self.picker.interests[1:]:
+ self.l.extend(x)
+ shuffle(self.l)
+ if not self.l:
+ raise StopIteration
+ return self.l.pop()
+
class PiecePicker:
def __init__(self, numpieces):
***************
*** 43,51 ****
self.interestpos = range(numpieces)
self.fixed = []
# this is a total hack to support python2.1 but supports for ... in
def __getitem__(self, key):
if key == 0:
! self.picker = SinglePicker(self)
try:
return self.picker.next()
--- 62,71 ----
self.interestpos = range(numpieces)
self.fixed = []
+ self.got_any = false
# this is a total hack to support python2.1 but supports for ... in
def __getitem__(self, key):
if key == 0:
! self.picker = self.__iter__()
try:
return self.picker.next()
***************
*** 93,101 ****
def complete(self, piece):
self.came_in(piece)
self.fixed.remove(piece)
def __iter__(self):
! return SinglePicker(self)
def test_came_in():
--- 113,125 ----
def complete(self, piece):
+ self.got_any = true
self.came_in(piece)
self.fixed.remove(piece)
def __iter__(self):
! if self.got_any:
! return SinglePicker(self)
! else:
! return RandomPicker(self)
def test_came_in():
***************
*** 116,119 ****
--- 140,155 ----
def test_change_interest():
p = PiecePicker(8)
+ p.got_have(0)
+ p.got_have(2)
+ p.got_have(4)
+ p.got_have(6)
+ p.lost_have(2)
+ p.lost_have(6)
+ v = [i for i in p]
+ assert v == [0, 4] or v == [4, 0]
+
+ def test_change_interest2():
+ p = PiecePicker(9)
+ p.complete(8)
p.got_have(0)
p.got_have(2)
Index: RateMeasure.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/RateMeasure.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** RateMeasure.py 19 Feb 2003 20:40:36 -0000 1.1
--- RateMeasure.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 25,28 ****
--- 25,31 ----
self.update(time(), amount)
+ def data_rejected(self, amount):
+ self.left += amount
+
def get_time_left(self):
if not self.got_anything:
***************
*** 35,44 ****
def get_size_left(self):
return self.left
!
def update(self, t, amount):
self.left -= amount
- self.rate = ((self.rate * (self.last - self.start)) + amount) / (t - self.start)
- self.last = t
try:
self.remaining = self.left / self.rate
if self.start < self.last - self.remaining:
--- 38,47 ----
def get_size_left(self):
return self.left
!
def update(self, t, amount):
self.left -= amount
try:
+ self.rate = ((self.rate * (self.last - self.start)) + amount) / (t - self.start)
+ self.last = t
self.remaining = self.left / self.rate
if self.start < self.last - self.remaining:
Index: RawServer.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/RawServer.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** RawServer.py 19 Feb 2003 20:40:36 -0000 1.1
--- RawServer.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 210,215 ****
if self.doneflag.isSet():
return
- else:
- print_exc()
except KeyboardInterrupt:
print_exc()
--- 210,213 ----
Index: Rerequester.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/Rerequester.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** Rerequester.py 19 Feb 2003 20:40:35 -0000 1.1
--- Rerequester.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 2,6 ****
# see LICENSE-BitTorrent.txt for license information
! from urllib import urlopen, quote
from btformats import check_peers
from bencode import bdecode
--- 2,6 ----
# see LICENSE-BitTorrent.txt for license information
! from zurllib import urlopen, quote
from btformats import check_peers
from bencode import bdecode
***************
*** 13,17 ****
def __init__(self, url, interval, sched, howmany, minpeers,
connect, externalsched, amount_left, up, down,
! port, ip, myid, infohash, timeout, errorfunc):
self.url = ('%s?info_hash=%s&peer_id=%s&port=%s' %
(url, quote(infohash), quote(myid), str(port)))
--- 13,17 ----
def __init__(self, url, interval, sched, howmany, minpeers,
connect, externalsched, amount_left, up, down,
! port, ip, myid, infohash, timeout, errorfunc, maxpeers, doneflag):
self.url = ('%s?info_hash=%s&peer_id=%s&port=%s' %
(url, quote(infohash), quote(myid), str(port)))
***************
*** 19,22 ****
--- 19,24 ----
self.url += '&ip=' + quote(ip)
self.interval = interval
+ self.last = None
+ self.trackerid = None
self.announce_interval = 30 * 60
self.sched = sched
***************
*** 30,33 ****
--- 32,37 ----
self.timeout = timeout
self.errorfunc = errorfunc
+ self.maxpeers = maxpeers
+ self.doneflag = doneflag
self.last_failed = true
self.sched(self.c, interval / 2)
***************
*** 48,51 ****
--- 52,61 ----
(self.url, str(self.up()), str(self.down()),
str(self.amount_left())))
+ if self.last is not None:
+ s += '&last=' + quote(str(self.last))
+ if self.trackerid is not None:
+ s += '&trackerid=' + quote(str(self.trackerid))
+ if self.howmany() >= self.maxpeers:
+ s += '&numwant=0'
if event != 3:
s += '&event=' + ['started', 'completed', 'stopped'][event]
***************
*** 86,90 ****
self.errorfunc('rejected by tracker - ' + r['failure reason'])
else:
! self.announce_interval = r['interval']
for x in r['peers']:
self.connect((x['ip'], x['port']), x['peer id'])
--- 96,111 ----
self.errorfunc('rejected by tracker - ' + r['failure reason'])
else:
! self.announce_interval = r.get('interval', self.announce_interval)
! self.interval = r.get('min interval', self.interval)
! self.trackerid = r.get('tracker id', self.trackerid)
! self.last = r.get('last')
! ps = len(r['peers']) + self.howmany()
! if ps < self.maxpeers:
! if self.doneflag.isSet():
! if r.get('num peers', 1000) - r.get('done peers', 0) > ps * 1.2:
! self.last = None
! else:
! if r.get('num peers', 1000) > ps * 1.2:
! self.last = None
for x in r['peers']:
self.connect((x['ip'], x['port']), x['peer id'])
Index: Storage.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/Storage.py,v
retrieving revision 1.2
retrieving revision 1.3
diff -C2 -d -r1.2 -r1.3
*** Storage.py 22 Feb 2003 16:14:54 -0000 1.2
--- Storage.py 28 Mar 2003 12:44:28 -0000 1.3
***************
*** 3,7 ****
from sha import sha
- from threading import Event
from cStringIO import StringIO
from time import time
--- 3,6 ----
***************
*** 15,22 ****
def __init__(self, files, open, exists, getsize, statusfunc, alloc_pause = 3):
# can raise IOError and ValueError
- self.isopen = false
self.ranges = []
! total = 0
! so_far = 0
for file, length in files:
if length != 0:
--- 14,20 ----
def __init__(self, files, open, exists, getsize, statusfunc, alloc_pause = 3):
# can raise IOError and ValueError
self.ranges = []
! total = 0l
! so_far = 0l
for file, length in files:
if length != 0:
***************
*** 34,50 ****
else:
open(file, 'wb').close()
-
self.total_length = total
self.handles = {}
self.preexisting = false
for file, length in files:
if exists(file):
! self.handles[file] = open(file, 'rb+')
self.preexisting = true
else:
self.handles[file] = open(file, 'wb+')
!
! self.isopen = true
!
if total > so_far:
interval = max(2 ** 20, long(total / 100))
--- 32,46 ----
else:
open(file, 'wb').close()
self.total_length = total
self.handles = {}
+ self.whandles = {}
self.preexisting = false
for file, length in files:
if exists(file):
! self.handles[file] = open(file, 'rb')
self.preexisting = true
else:
self.handles[file] = open(file, 'wb+')
! self.whandles[file] = 1
if total > so_far:
interval = max(2 ** 20, long(total / 100))
***************
*** 57,70 ****
if l == length:
continue
h = self.handles[file]
! for i in range(l, length, interval)[1:] + [length-1]:
h.seek(i)
h.write(chr(1))
if time() - tstart > alloc_pause:
if not hit:
! statusfunc(activity = 'Allocating', fractionDone = -1)
hit = true
statusfunc(fractionDone = float(so_far + i - l)/total)
so_far += length - l
def set_readonly(self):
--- 53,70 ----
if l == length:
continue
+ if self.preexisting:
+ self.handles[file] = open(file,'rb+')
+ self.whandles[file] = 1
h = self.handles[file]
! for i in lrange(l, length, interval)[1:] + [length-1]:
h.seek(i)
h.write(chr(1))
if time() - tstart > alloc_pause:
if not hit:
! statusfunc(activity = 'allocating')
hit = true
statusfunc(fractionDone = float(so_far + i - l)/total)
so_far += length - l
+ statusfunc(fractionDone = 1.0)
def set_readonly(self):
***************
*** 92,98 ****
def read(self, pos, amount):
- if self.isopen == false:
- return
-
r = StringIO()
for file, pos, end in self._intervals(pos, amount):
--- 92,95 ----
***************
*** 104,112 ****
def write(self, pos, s):
# might raise an IOError
- if self.isopen == false:
- return
-
total = 0
for file, begin, end in self._intervals(pos, len(s)):
h = self.handles[file]
h.seek(begin)
--- 101,109 ----
def write(self, pos, s):
# might raise an IOError
total = 0
for file, begin, end in self._intervals(pos, len(s)):
+ if not self.whandles.has_key(file):
+ self.handles[file] = open(file, 'rb+')
+ self.whandles[file] = 1
h = self.handles[file]
h.seek(begin)
***************
*** 114,121 ****
total += end - begin
! def close(self):
! for file in self.handles:
! self.handles[file].close()
! self.isopen = false
# everything below is for testing
--- 111,120 ----
total += end - begin
! def lrange(a, b, c):
! r = []
! while a < b:
! r.append(a)
! a += c
! return r
# everything below is for testing
Index: StorageWrapper.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/StorageWrapper.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** StorageWrapper.py 19 Feb 2003 20:40:33 -0000 1.1
--- StorageWrapper.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 10,18 ****
pass
class StorageWrapper:
! def __init__( self, storage, request_size, hashes,
! piece_length, finished, failed,
! statusfunc = dummy_status, flag = Event(),
! check_hashes = true):
self.check_hashes = check_hashes
self.storage = storage
--- 10,21 ----
pass
+ def dummy_data_flunked(size):
+ pass
+
class StorageWrapper:
! def __init__(self, storage, request_size, hashes,
! piece_length, finished, failed,
! statusfunc = dummy_status, flag = Event(), check_hashes = true,
! data_flunked = dummy_data_flunked):
self.check_hashes = check_hashes
self.storage = storage
***************
*** 20,23 ****
--- 23,27 ----
self.hashes = hashes
self.piece_length = piece_length
+ self.data_flunked = data_flunked
self.total_length = storage.get_total_length()
self.amount_left = self.total_length
***************
*** 35,40 ****
finished()
return
if storage.was_preexisting():
! statusfunc(activity = 'Hashing', fractionDone = 0.0)
for i in xrange(len(hashes)):
self._check_single(i)
--- 39,46 ----
finished()
return
+ self.done_checking = false
if storage.was_preexisting():
! statusfunc(activity = 'Hashing',
! fractionDone = 0)
for i in xrange(len(hashes)):
self._check_single(i)
***************
*** 45,48 ****
--- 51,55 ----
for i in xrange(len(hashes)):
self._check_single(i, false)
+ self.done_checking = true
def get_amount_left(self):
***************
*** 58,75 ****
high = self.total_length
length = high - low
! if check and (not self.check_hashes or sha(self.storage.read(low, length)).digest() == self.hashes[index]):
! self.have[index] = true
! self.amount_left -= length
! if self.amount_left == 0:
! self.finished()
! else:
! l = self.inactive_requests[index]
! x = 0
! while x + self.request_size < length:
! l.append((x, self.request_size))
! self.total_inactive += 1
! x += self.request_size
! l.append((x, length - x))
self.total_inactive += 1
def is_everything_pending(self):
--- 65,86 ----
high = self.total_length
length = high - low
! if check:
! if not self.check_hashes or sha(self.storage.read(low, length)).digest() == self.hashes[index]:
! self.have[index] = true
! self.amount_left -= length
! if self.amount_left == 0:
! self.finished()
! return
! else:
! if self.done_checking:
! self.data_flunked(length)
! l = self.inactive_requests[index]
! x = 0
! while x + self.request_size < length:
! l.append((x, self.request_size))
self.total_inactive += 1
+ x += self.request_size
+ l.append((x, length - x))
+ self.total_inactive += 1
def is_everything_pending(self):
Index: Uploader.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/Uploader.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** Uploader.py 19 Feb 2003 20:40:25 -0000 1.1
--- Uploader.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 19,32 ****
self.buffer = []
self.measure = Measure(max_rate_period, fudge)
- self.hit = true
if storage.do_I_have_anything():
connection.send_bitfield(storage.get_have_list())
- def set_not_hit(self):
- self.hit = false
-
- def get_hit(self):
- return self.hit
-
def got_not_interested(self):
if self.interested:
--- 19,25 ----
***************
*** 70,74 ****
del self.buffer[:]
self.connection.send_choke()
- self.hit = true
def unchoke(self):
--- 63,66 ----
***************
*** 76,80 ****
self.choked = false
self.connection.send_unchoke()
- self.hit = true
def is_choked(self):
--- 68,71 ----
Index: __init__.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/__init__.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** __init__.py 19 Feb 2003 20:40:37 -0000 1.1
--- __init__.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 1 ****
! version = '3.1'
--- 1 ----
! version = '3.2'
Index: btformats.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/btformats.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** btformats.py 19 Feb 2003 20:40:37 -0000 1.1
--- btformats.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 80,85 ****
if type(id) != StringType or len(id) != 20:
raise ValueError
! interval = message.get('interval')
if type(interval) not in ints or interval <= 0:
raise ValueError
!
--- 80,98 ----
if type(id) != StringType or len(id) != 20:
raise ValueError
! interval = message.get('interval', 1)
if type(interval) not in ints or interval <= 0:
raise ValueError
! minint = message.get('min interval', 1)
! if type(minint) not in ints or minint <= 0:
! raise ValueError
! if type(message.get('tracker id', '')) != StringType:
! raise ValueError
! npeers = message.get('num peers', 0)
! if type(npeers) not in ints or npeers < 0:
! raise ValueError
! dpeers = message.get('done peers', 0)
! if type(dpeers) not in ints or dpeers < 0:
! raise ValueError
! last = message.get('last', 0)
! if type(last) not in ints or last < 0:
! raise ValueError
Index: download.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/download.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** download.py 19 Feb 2003 20:40:37 -0000 1.1
--- download.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 2,6 ****
# see LICENSE-BitTorrent.txt for license information
! from urllib import urlopen
from urlparse import urljoin
from btformats import check_message
--- 2,6 ----
# see LICENSE-BitTorrent.txt for license information
! from zurllib import urlopen
from urlparse import urljoin
from btformats import check_message
***************
*** 45,49 ****
"ip to report you have to the tracker."),
('minport', None, 6881, 'minimum port to listen on, counts up if unavailable'),
! ('maxport', None, 7881, 'maximum port to listen on'),
('responsefile', None, '',
'file the server response was stored in, alternative to url'),
--- 45,49 ----
"ip to report you have to the tracker."),
('minport', None, 6881, 'minimum port to listen on, counts up if unavailable'),
! ('maxport', None, 6999, 'maximum port to listen on'),
('responsefile', None, '',
'file the server response was stored in, alternative to url'),
***************
*** 78,85 ****
('max_upload_rate', None, 0,
'maximum kB/s to upload at, 0 means no limit'),
! ('alloc_pause', None, 0.0,
'seconds to wait before displaying allocation feedback'),
('snub_time', None, 60.0,
"seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"),
]
--- 78,87 ----
('max_upload_rate', None, 0,
'maximum kB/s to upload at, 0 means no limit'),
! ('alloc_pause', None, 3.0,
'seconds to wait before displaying allocation feedback'),
('snub_time', None, 60.0,
"seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"),
+ ('spew', None, 0,
+ "whether to display diagnostic info to stdout"),
]
***************
*** 88,92 ****
errorfunc('arguments are -\n' + formatDefinitions(defaults, cols))
return
-
try:
config, garbage = parseargs(params, defaults, 0, 0)
--- 90,93 ----
***************
*** 96,100 ****
errorfunc('error: ' + str(e) + '\nrun with no args for parameter explanations')
return
!
try:
if config['responsefile'] != '':
--- 97,101 ----
errorfunc('error: ' + str(e) + '\nrun with no args for parameter explanations')
return
!
try:
if config['responsefile'] != '':
***************
*** 167,171 ****
finflag = Event()
ann = [None]
! myid = sha(repr(time()) + ' ' + str(getpid())).digest()
seed(myid)
pieces = [info['pieces'][x:x+20] for x in xrange(0,
--- 168,172 ----
finflag = Event()
ann = [None]
! myid = (chr(0) * 12) + sha(repr(time()) + ' ' + str(getpid())).digest()[-8:]
seed(myid)
pieces = [info['pieces'][x:x+20] for x in xrange(0,
***************
*** 175,179 ****
if reason is not None:
errorfunc(reason)
-
try:
try:
--- 176,179 ----
***************
*** 193,205 ****
ann[0](1)
finfunc()
storagewrapper = StorageWrapper(storage,
config['download_slice_size'], pieces,
info['piece length'], finished, failed,
! statusfunc, doneflag, config['check_hashes'])
except ValueError, e:
failed('bad data - ' + str(e))
except IOError, e:
failed('IOError - ' + str(e))
-
if doneflag.isSet():
return
--- 193,209 ----
ann[0](1)
finfunc()
+ rm = [None]
+ def data_flunked(amount, rm = rm, errorfunc = errorfunc):
+ if rm[0] is not None:
+ rm[0](amount)
+ errorfunc('a piece failed hash check, re-downloading it')
storagewrapper = StorageWrapper(storage,
config['download_slice_size'], pieces,
info['piece length'], finished, failed,
! statusfunc, doneflag, config['check_hashes'], data_flunked)
except ValueError, e:
failed('bad data - ' + str(e))
except IOError, e:
failed('IOError - ' + str(e))
if doneflag.isSet():
return
***************
*** 217,221 ****
return
! choker = Choker(config['max_uploads'], rawserver.add_task)
upmeasure = Measure(config['max_rate_period'],
config['upload_rate_fudge'])
--- 221,225 ----
return
! choker = Choker(config['max_uploads'], rawserver.add_task, finflag.isSet)
upmeasure = Measure(config['max_rate_period'],
config['upload_rate_fudge'])
***************
*** 229,232 ****
--- 233,237 ----
max_slice_length, max_rate_period, fudge)
ratemeasure = RateMeasure(storagewrapper.get_amount_left())
+ rm[0] = ratemeasure.data_rejected
downloader = Downloader(storagewrapper, PiecePicker(len(pieces)),
config['request_backlog'], config['max_rate_period'],
***************
*** 245,255 ****
rawserver.external_add_task, storagewrapper.get_amount_left,
upmeasure.get_total, downmeasure.get_total, listen_port,
! config['ip'], myid, infohash, config['http_timeout'], errorfunc)
DownloaderFeedback(choker, rawserver.add_task, statusfunc,
upmeasure.get_rate, downmeasure.get_rate, ratemeasure.get_time_left,
ratemeasure.get_size_left, file_length, finflag,
! config['display_interval'])
! statusfunc(activity = 'Downloading')
ann[0] = rerequest.announce
rerequest.d(0)
--- 250,261 ----
rawserver.external_add_task, storagewrapper.get_amount_left,
upmeasure.get_total, downmeasure.get_total, listen_port,
! config['ip'], myid, infohash, config['http_timeout'], errorfunc,
! config['max_initiate'], doneflag)
DownloaderFeedback(choker, rawserver.add_task, statusfunc,
upmeasure.get_rate, downmeasure.get_rate, ratemeasure.get_time_left,
ratemeasure.get_size_left, file_length, finflag,
! config['display_interval'], config['spew'])
! statusfunc(activity = 'connecting to peers')
ann[0] = rerequest.announce
rerequest.d(0)
Index: testtest.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/testtest.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** testtest.py 19 Feb 2003 20:40:32 -0000 1.1
--- testtest.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 10,14 ****
# Written by Bram Cohen
! # see LICENSE.txt for license information
import traceback
--- 10,14 ----
# Written by Bram Cohen
! # see LICENSE-BitTorrent.txt for license information
import traceback
Index: track.py
===================================================================
RCS file: /cvsroot/btplusplus/BT++/src/core/track.py,v
retrieving revision 1.1
retrieving revision 1.2
diff -C2 -d -r1.1 -r1.2
*** track.py 19 Feb 2003 20:40:27 -0000 1.1
--- track.py 28 Mar 2003 12:44:28 -0000 1.2
***************
*** 8,12 ****
from threading import Event
from bencode import bencode, bdecode
! from urllib import urlopen, quote, unquote
from urlparse import urlparse
from os.path import exists
--- 8,12 ----
from threading import Event
from bencode import bencode, bdecode
! from zurllib import urlopen, quote, unquote
from urlparse import urlparse
from os.path import exists
***************
*** 33,37 ****
('timeout_check_interval', None, 5,
'time to wait between checking if any connections have timed out'),
! ('nat_check', None, 0,
'whether to check back and ban downloaders behind NAT'),
('min_time_between_log_flushes', None, 3.0,
--- 33,37 ----
('timeout_check_interval', None, 5,
'time to wait between checking if any connections have timed out'),
! ('nat_check', None, 1,
'whether to check back and ban downloaders behind NAT'),
('min_time_between_log_flushes', None, 3.0,
***************
*** 68,72 ****
if f[-8:] == '.torrent':
try:
! d = bdecode(open(os.path.join(dir,f)).read())
h = sha(bencode(d['info'])).digest()
a[h] = d['info'].get('name', f)
--- 68,72 ----
if f[-8:] == '.torrent':
try:
! d = bdecode(open(os.path.join(dir,f), 'rb').read())
h = sha(bencode(d['info'])).digest()
a[h] = d['info'].get('name', f)
***************
*** 191,195 ****
if self.allowed != None:
if not self.allowed.has_key(infohash):
! return (400, 'Not Authorized', {'Content-Type': 'text/plain'}, bencode({'failure reason':
'Requested download is not authorized for use with this tracker.'}))
ip = connection.get_ip()
--- 191,195 ----
if self.allowed != None:
if not self.allowed.has_key(infohash):
! return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason':
'Requested download is not authorized for use with this tracker.'}))
ip = connection.get_ip()
***************
*** 205,246 ****
if len(myid) != 20:
raise ValueError, 'id not of length 20'
except ValueError, e:
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
'you sent me garbage - ' + str(e))
! def respond(result, self = self, infohash = infohash, myid = myid,
! ip = ip, port = port, left = left, params = params,
! connection = connection):
! if not result:
! connection.answer((200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode({'failure reason':
! 'You are behind NAT. Please open port 6881 or download from elsewhere'})))
! return
! peers = self.downloads.setdefault(infohash, {})
! ts = self.times.setdefault(infohash, {})
! if params.get('event', '') != 'stopped':
! ts[myid] = time()
! if not peers.has_key(myid):
! peers[myid] = {'ip': ip, 'port': port, 'left': left}
! else:
! peers[myid]['left'] = left
else:
! if peers.has_key(myid) and peers[myid]['ip'] == ip:
! del peers[myid]
! del ts[myid]
! data = {'interval': self.reannounce_interval}
! cache = self.cached.setdefault(infohash, [])
! if len(cache) < self.response_size:
! for key, value in self.downloads.setdefault(
! infohash, {}).items():
cache.append({'peer id': key, 'ip': value['ip'],
'port': value['port']})
! shuffle(cache)
! data['peers'] = cache[-self.response_size:]
! del cache[-self.response_size:]
! connection.answer((200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data)))
! if (not self.natcheck or params.get('event') == 'stopped' or
! self.downloads.get(infohash, {}).has_key(myid)):
! respond(true)
! else:
! NatCheck(respond, infohash, myid, ip, port, self.rawserver)
def save_dfile(self):
--- 205,246 ----
if len(myid) != 20:
raise ValueError, 'id not of length 20'
+ rsize = self.response_size
+ if params.has_key('num want'):
+ rsize = int(params['num want'])
except ValueError, e:
return (400, 'Bad Request', {'Content-Type': 'text/plain'},
'you sent me garbage - ' + str(e))
! peers = self.downloads.setdefault(infohash, {})
! ts = self.times.setdefault(infohash, {})
! if params.get('event', '') != 'stopped':
! ts[myid] = time()
! if not peers.has_key(myid):
! peers[myid] = {'ip': ip, 'port': port, 'left': left}
else:
! peers[myid]['left'] = left
! else:
! if peers.has_key(myid) and peers[myid]['ip'] == ip:
! del peers[myid]
! del ts[myid]
! data = {'interval': self.reannounce_interval}
! cache = self.cached.setdefault(infohash, [])
! if len(cache) < rsize:
! for key, value in self.downloads.setdefault(
! infohash, {}).items():
! if not value.get('nat'):
cache.append({'peer id': key, 'ip': value['ip'],
'port': value['port']})
! shuffle(cache)
! data['peers'] = cache[-rsize:]
! del cache[-rsize:]
! connection.answer((200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data)))
! if self.natcheck:
! NatCheck(self.connectback_result, infohash, myid, ip, port, self.rawserver)
!
! def connectback_result(self, result, downloadid, peerid, ip, port):
! if not result:
! record = self.downloads.get(downloadid, {}).get(peerid)
! if record and record['ip'] == ip and record['port'] == port:
! record['nat'] = 1
def save_dfile(self):
|