home *** CD-ROM | disk | FTP | other *** search
/ PC Welt 2006 November (DVD) / PCWELT_11_2006.ISO / casper / filesystem.squashfs / usr / lib / python2.4 / site-packages / BitTorrent / track.pyc (.txt) < prev    next >
Encoding:
Python Compiled Bytecode  |  2006-08-31  |  19.6 KB  |  618 lines

  1. # Source Generated with Decompyle++
  2. # File: in.pyc (Python 2.4)
  3.  
  4. from parseargs import parseargs, formatDefinitions
  5. from RawServer import RawServer
  6. from HTTPHandler import HTTPHandler
  7. from NatCheck import NatCheck
  8. from threading import Event
  9. from bencode import bencode, bdecode, Bencached
  10. from zurllib import urlopen, quote, unquote
  11. from urlparse import urlparse
  12. from os import rename
  13. from os.path import exists, isfile
  14. from cStringIO import StringIO
  15. from time import time, gmtime, strftime
  16. from random import shuffle
  17. from sha import sha
  18. from types import StringType, IntType, LongType, ListType, DictType
  19. from binascii import b2a_hex, a2b_hex, a2b_base64
  20. import sys
  21. from __init__ import version
  22. defaults = [
  23.     ('port', 80, 'Port to listen on.'),
  24.     ('dfile', None, 'file to store recent downloader info in'),
  25.     ('bind', '', 'ip to bind to locally'),
  26.     ('socket_timeout', 15, 'timeout for closing connections'),
  27.     ('save_dfile_interval', 5 * 60, 'seconds between saving dfile'),
  28.     ('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'),
  29.     ('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'),
  30.     ('response_size', 50, 'number of peers to send in an info message'),
  31.     ('timeout_check_interval', 5, 'time to wait between checking if any connections have timed out'),
  32.     ('nat_check', 3, "how many times to check if a downloader is behind a NAT (0 = don't check)"),
  33.     ('min_time_between_log_flushes', 3.0, 'minimum time it must have been since the last flush to do another one'),
  34.     ('allowed_dir', '', 'only allow downloads for .torrents in this dir'),
  35.     ('parse_allowed_interval', 15, 'minutes between reloading of allowed_dir'),
  36.     ('show_names', 1, 'whether to display names from allowed dir'),
  37.     ('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'),
  38.     ('only_local_override_ip', 1, "ignore the ip GET parameter from machines which aren't on local network IPs"),
  39.     ('logfile', '', 'file to write the tracker logs, use - for stdout (default)'),
  40.     ('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'),
  41.     ('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'),
  42.     ('max_give', 200, 'maximum number of peers to give with any one request')]
  43.  
  44. def statefiletemplate(x):
  45.     if type(x) != DictType:
  46.         raise ValueError
  47.     
  48.     for cname, cinfo in x.items():
  49.         if cname == 'peers':
  50.             for y in cinfo.values():
  51.                 if type(y) != DictType:
  52.                     raise ValueError
  53.                 
  54.                 for id, info in y.items():
  55.                     if len(id) != 20:
  56.                         raise ValueError
  57.                     
  58.                     if type(info) != DictType:
  59.                         raise ValueError
  60.                     
  61.                     if type(info.get('ip', '')) != StringType:
  62.                         raise ValueError
  63.                     
  64.                     port = info.get('port')
  65.                     if type(port) not in (IntType, LongType) or port < 0:
  66.                         raise ValueError
  67.                     
  68.                     left = info.get('left')
  69.                     if type(left) not in (IntType, LongType) or left < 0:
  70.                         raise ValueError
  71.                         continue
  72.                 
  73.             
  74.         if cname == 'completed':
  75.             if type(cinfo) != DictType:
  76.                 raise ValueError
  77.             
  78.             for y in cinfo.values():
  79.                 if type(y) not in (IntType, LongType):
  80.                     raise ValueError
  81.                     continue
  82.             
  83.     
  84.  
  85.  
  86. def parseTorrents(dir):
  87.     import os
  88.     a = { }
  89.     for f in os.listdir(dir):
  90.         if f[-8:] == '.torrent':
  91.             
  92.             try:
  93.                 p = os.path.join(dir, f)
  94.                 d = bdecode(open(p, 'rb').read())
  95.                 h = sha(bencode(d['info'])).digest()
  96.                 i = d['info']
  97.                 a[h] = { }
  98.                 a[h]['name'] = i.get('name', f)
  99.                 a[h]['file'] = f
  100.                 a[h]['path'] = p
  101.                 l = 0
  102.                 if i.has_key('length'):
  103.                     l = i.get('length', 0)
  104.                 elif i.has_key('files'):
  105.                     for li in i['files']:
  106.                         if li.has_key('length'):
  107.                             l = l + li['length']
  108.                             continue
  109.                     
  110.                 
  111.                 a[h]['length'] = l
  112.             print 'Error parsing ' + f, sys.exc_info()[0]
  113.  
  114.             continue
  115.     
  116.     return a
  117.  
  118. alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
  119.  
  120. def isotime(secs = None):
  121.     if secs == None:
  122.         secs = time()
  123.     
  124.     return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
  125.  
  126.  
  127. def compact_peer_info(ip, port):
  128.     return []([ chr(int(i)) for i in ip.split('.') ]) + chr((port & 65280) >> 8) + chr(port & 255)
  129.  
  130.  
  131. class Tracker:
  132.     
  133.     def __init__(self, config, rawserver):
  134.         self.response_size = config['response_size']
  135.         self.dfile = config['dfile']
  136.         self.natcheck = config['nat_check']
  137.         self.max_give = config['max_give']
  138.         self.reannounce_interval = config['reannounce_interval']
  139.         self.save_dfile_interval = config['save_dfile_interval']
  140.         self.show_names = config['show_names']
  141.         self.only_local_override_ip = config['only_local_override_ip']
  142.         favicon = config['favicon']
  143.         self.favicon = None
  144.         if favicon:
  145.             if isfile(favicon):
  146.                 h = open(favicon, 'rb')
  147.                 self.favicon = h.read()
  148.                 h.close()
  149.             else:
  150.                 print '**warning** specified favicon file -- %s -- does not exist.' % favicon
  151.         
  152.         self.rawserver = rawserver
  153.         self.becache1 = { }
  154.         self.becache2 = { }
  155.         self.cache1 = { }
  156.         self.cache2 = { }
  157.         self.times = { }
  158.         if exists(self.dfile):
  159.             h = open(self.dfile, 'rb')
  160.             ds = h.read()
  161.             h.close()
  162.             tempstate = bdecode(ds)
  163.         else:
  164.             tempstate = { }
  165.         if tempstate.has_key('peers'):
  166.             self.state = tempstate
  167.         else:
  168.             self.state = { }
  169.             self.state['peers'] = tempstate
  170.         self.downloads = self.state.setdefault('peers', { })
  171.         self.completed = self.state.setdefault('completed', { })
  172.         statefiletemplate(self.state)
  173.         for x, dl in self.downloads.items():
  174.             self.times[x] = { }
  175.             for y, dat in dl.items():
  176.                 self.times[x][y] = 0
  177.                 if not dat.get('nat', 1):
  178.                     ip = dat['ip']
  179.                     gip = dat.get('given ip')
  180.                     if gip and is_valid_ipv4(gip):
  181.                         if not (self.only_local_override_ip) or is_local_ip(ip):
  182.                             ip = gip
  183.                         
  184.                     self.becache1.setdefault(x, { })[y] = Bencached(bencode({
  185.                         'ip': ip,
  186.                         'port': dat['port'],
  187.                         'peer id': y }))
  188.                     self.becache2.setdefault(x, { })[y] = compact_peer_info(ip, dat['port'])
  189.                     continue
  190.             
  191.         
  192.         rawserver.add_task(self.save_dfile, self.save_dfile_interval)
  193.         self.prevtime = time()
  194.         self.timeout_downloaders_interval = config['timeout_downloaders_interval']
  195.         rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
  196.         self.logfile = None
  197.         self.log = None
  198.         if config['logfile'] != '' and config['logfile'] != '-':
  199.             
  200.             try:
  201.                 self.logfile = config['logfile']
  202.                 self.log = open(self.logfile, 'a')
  203.                 sys.stdout = self.log
  204.                 print '# Log Started: ', isotime()
  205.             print 'Error trying to redirect stdout to log file:', sys.exc_info()[0]
  206.  
  207.         
  208.         self.allow_get = config['allow_get']
  209.         if config['allowed_dir'] != '':
  210.             self.allowed_dir = config['allowed_dir']
  211.             self.parse_allowed_interval = config['parse_allowed_interval']
  212.             self.parse_allowed()
  213.         else:
  214.             self.allowed = None
  215.         if unquote('+') != ' ':
  216.             self.uq_broken = 1
  217.         else:
  218.             self.uq_broken = 0
  219.         self.keep_dead = config['keep_dead']
  220.  
  221.     
  222.     def get(self, connection, path, headers):
  223.         
  224.         try:
  225.             (scheme, netloc, path, pars, query, fragment) = urlparse(path)
  226.             if self.uq_broken == 1:
  227.                 path = path.replace('+', ' ')
  228.                 query = query.replace('+', ' ')
  229.             
  230.             path = unquote(path)[1:]
  231.             params = { }
  232.             for s in query.split('&'):
  233.                 if s != '':
  234.                     i = s.index('=')
  235.                     params[unquote(s[:i])] = unquote(s[i + 1:])
  236.                     continue
  237.         except ValueError:
  238.             e = None
  239.             return (400, 'Bad Request', {
  240.                 'Content-Type': 'text/plain' }, 'you sent me garbage - ' + str(e))
  241.  
  242.         if path == '' or path == 'index.html':
  243.             s = StringIO()
  244.             s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n<html><head><title>BitTorrent download info</title>\n')
  245.             if self.favicon != None:
  246.                 s.write('<link rel="shortcut icon" href="/favicon.ico" />\n')
  247.             
  248.             s.write('</head>\n<body>\n<h3>BitTorrent download info</h3>\n<ul>\n<li><strong>tracker version:</strong> %s</li>\n<li><strong>server time:</strong> %s</li>\n</ul>\n' % (version, isotime()))
  249.             names = self.downloads.keys()
  250.             if names:
  251.                 names.sort()
  252.                 tn = 0
  253.                 tc = 0
  254.                 td = 0
  255.                 tt = 0
  256.                 ts = 0
  257.                 nf = 0
  258.                 uc = { }
  259.                 ud = { }
  260.                 if self.allowed != None and self.show_names:
  261.                     s.write('<table summary="files" border="1">\n<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
  262.                 else:
  263.                     s.write('<table summary="files">\n<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
  264.                 for name in names:
  265.                     l = self.downloads[name]
  266.                     n = self.completed.get(name, 0)
  267.                     tn = tn + n
  268.                     lc = []
  269.                     for i in l.values():
  270.                         if type(i) == DictType:
  271.                             if i['left'] == 0:
  272.                                 lc.append(1)
  273.                                 uc[i['ip']] = 1
  274.                             else:
  275.                                 ud[i['ip']] = 1
  276.                         i['left'] == 0
  277.                     
  278.                     c = len(lc)
  279.                     tc = tc + c
  280.                     d = len(l) - c
  281.                     td = td + d
  282.                     if self.allowed != None and self.show_names:
  283.                         if self.allowed.has_key(name):
  284.                             nf = nf + 1
  285.                             sz = self.allowed[name]['length']
  286.                             ts = ts + sz
  287.                             szt = sz * n
  288.                             tt = tt + szt
  289.                             if self.allow_get == 1:
  290.                                 linkname = '<a href="/file?info_hash=' + b2a_hex(name) + '">' + self.allowed[name]['name'] + '</a>'
  291.                             else:
  292.                                 linkname = self.allowed[name]['name']
  293.                             s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' % (b2a_hex(name), linkname, size_format(sz), c, d, n, size_format(szt)))
  294.                         
  295.                     self.allowed.has_key(name)
  296.                     s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' % (b2a_hex(name), c, d, n))
  297.                 
  298.                 ttn = 0
  299.                 for i in self.completed.values():
  300.                     ttn = ttn + i
  301.                 
  302.                 if self.allowed != None and self.show_names:
  303.                     s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i/%i</td><td align="right">%i/%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n' % (nf, size_format(ts), len(uc), tc, len(ud), td, tn, ttn, size_format(tt)))
  304.                 else:
  305.                     s.write('<tr><td align="right">%i files</td><td align="right">%i/%i</td><td align="right">%i/%i</td><td align="right">%i/%i</td></tr>\n' % (nf, len(uc), tc, len(ud), td, tn, ttn))
  306.                 s.write('</table>\n<ul>\n<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n<li><em>complete:</em> number of connected clients with the complete file (total: unique IPs/total connections)</li>\n<li><em>downloading:</em> number of connected clients still downloading (total: unique IPs/total connections)</li>\n<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n</ul>\n')
  307.             else:
  308.                 s.write('<p>not tracking any files yet...</p>\n')
  309.             s.write('</body>\n</html>\n')
  310.             return (200, 'OK', {
  311.                 'Content-Type': 'text/html; charset=iso-8859-1' }, s.getvalue())
  312.         elif path == 'scrape':
  313.             fs = { }
  314.             names = []
  315.             if params.has_key('info_hash'):
  316.                 if self.downloads.has_key(params['info_hash']):
  317.                     names = [
  318.                         params['info_hash']]
  319.                 
  320.             else:
  321.                 names = self.downloads.keys()
  322.                 names.sort()
  323.             for name in names:
  324.                 l = self.downloads[name]
  325.                 n = self.completed.get(name, 0)
  326.                 c = [](_[1])
  327.                 d = len(l) - c
  328.                 fs[name] = {
  329.                     'complete': c,
  330.                     'incomplete': d,
  331.                     'downloaded': n }
  332.                 if self.allowed is not None and self.allowed.has_key(name) and self.show_names:
  333.                     fs[name]['name'] = self.allowed[name]['name']
  334.                     continue
  335.                 []
  336.             
  337.             r = {
  338.                 'files': fs }
  339.             return (200, 'OK', {
  340.                 'Content-Type': 'text/plain' }, bencode(r))
  341.         elif path == 'file' and self.allow_get == 1 and params.has_key('info_hash') and self.allowed.has_key(a2b_hex(params['info_hash'])):
  342.             hash = a2b_hex(params['info_hash'])
  343.             fname = self.allowed[hash]['file']
  344.             fpath = self.allowed[hash]['path']
  345.             return (200, 'OK', {
  346.                 'Content-Type': 'application/x-bittorrent',
  347.                 'Content-Disposition': 'attachment; filename=' + fname }, open(fpath, 'rb').read())
  348.         elif path == 'favicon.ico' and self.favicon != None:
  349.             return (200, 'OK', {
  350.                 'Content-Type': 'image/x-icon' }, self.favicon)
  351.         
  352.         if path != 'announce':
  353.             return (404, 'Not Found', {
  354.                 'Content-Type': 'text/plain',
  355.                 'Pragma': 'no-cache' }, alas)
  356.         
  357.         
  358.         try:
  359.             if not params.has_key('info_hash'):
  360.                 raise ValueError, 'no info hash'
  361.             
  362.             if params.has_key('ip') and not is_valid_ipv4(params['ip']):
  363.                 raise ValueError('DNS name or invalid IP address given for IP')
  364.             
  365.             infohash = params['info_hash']
  366.             if self.allowed != None:
  367.                 if not self.allowed.has_key(infohash):
  368.                     return (200, 'OK', {
  369.                         'Content-Type': 'text/plain',
  370.                         'Pragma': 'no-cache' }, bencode({
  371.                         'failure reason': 'Requested download is not authorized for use with this tracker.' }))
  372.                 
  373.             
  374.             ip = connection.get_ip()
  375.             ip_override = 0
  376.             if params.has_key('ip') and is_valid_ipv4(params['ip']):
  377.                 if not (self.only_local_override_ip) or is_local_ip(ip):
  378.                     ip_override = 1
  379.                 
  380.             if params.has_key('event') and params['event'] not in [
  381.                 'started',
  382.                 'completed',
  383.                 'stopped']:
  384.                 raise ValueError, 'invalid event'
  385.             
  386.             port = long(params.get('port', ''))
  387.             uploaded = long(params.get('uploaded', ''))
  388.             downloaded = long(params.get('downloaded', ''))
  389.             left = long(params.get('left', ''))
  390.             myid = params.get('peer_id', '')
  391.             if len(myid) != 20:
  392.                 raise ValueError, 'id not of length 20'
  393.             
  394.             rsize = self.response_size
  395.             if params.has_key('numwant'):
  396.                 rsize = min(long(params['numwant']), self.max_give)
  397.         except ValueError:
  398.             e = None
  399.             return (400, 'Bad Request', {
  400.                 'Content-Type': 'text/plain' }, 'you sent me garbage - ' + str(e))
  401.  
  402.         peers = self.downloads.setdefault(infohash, { })
  403.         self.completed.setdefault(infohash, 0)
  404.         ts = self.times.setdefault(infohash, { })
  405.         confirm = 0
  406.         if peers.has_key(myid):
  407.             myinfo = peers[myid]
  408.             if myinfo.has_key('key'):
  409.                 if params.get('key') != myinfo['key']:
  410.                     return (200, 'OK', {
  411.                         'Content-Type': 'text/plain',
  412.                         'Pragma': 'no-cache' }, bencode({
  413.                         'failure reason': 'key did not match key supplied earlier' }))
  414.                 
  415.                 confirm = 1
  416.             elif myinfo['ip'] == ip:
  417.                 confirm = 1
  418.             
  419.         else:
  420.             confirm = 1
  421.         if params.get('event', '') != 'stopped' and confirm:
  422.             ts[myid] = time()
  423.             if not peers.has_key(myid):
  424.                 peers[myid] = {
  425.                     'ip': ip,
  426.                     'port': port,
  427.                     'left': left }
  428.                 if params.has_key('key'):
  429.                     peers[myid]['key'] = params['key']
  430.                 
  431.                 if params.has_key('ip') and is_valid_ipv4(params['ip']):
  432.                     peers[myid]['given ip'] = params['ip']
  433.                 
  434.                 mip = ip
  435.                 if ip_override:
  436.                     mip = params['ip']
  437.                 
  438.                 if not (self.natcheck) or ip_override:
  439.                     self.becache1.setdefault(infohash, { })[myid] = Bencached(bencode({
  440.                         'ip': mip,
  441.                         'port': port,
  442.                         'peer id': myid }))
  443.                     self.becache2.setdefault(infohash, { })[myid] = compact_peer_info(mip, port)
  444.                 
  445.             else:
  446.                 peers[myid]['left'] = left
  447.                 peers[myid]['ip'] = ip
  448.             if params.get('event', '') == 'completed':
  449.                 self.completed[infohash] = 1 + self.completed[infohash]
  450.             
  451.             if port == 0:
  452.                 peers[myid]['nat'] = 2 ** 30
  453.             elif self.natcheck and not ip_override:
  454.                 to_nat = peers[myid].get('nat', -1)
  455.                 if to_nat and to_nat < self.natcheck:
  456.                     NatCheck(self.connectback_result, infohash, myid, ip, port, self.rawserver)
  457.                 
  458.             else:
  459.                 peers[myid]['nat'] = 0
  460.         elif confirm:
  461.             if peers.has_key(myid):
  462.                 if self.becache1[infohash].has_key(myid):
  463.                     del self.becache1[infohash][myid]
  464.                     del self.becache2[infohash][myid]
  465.                 
  466.                 del peers[myid]
  467.                 del ts[myid]
  468.             
  469.         
  470.         data = {
  471.             'interval': self.reannounce_interval }
  472.         if params.get('compact', 0):
  473.             if rsize == 0:
  474.                 data['peers'] = ''
  475.             else:
  476.                 cache = self.cache2.setdefault(infohash, [])
  477.                 if len(cache) < rsize:
  478.                     del cache[:]
  479.                     cache.extend(self.becache2.setdefault(infohash, { }).values())
  480.                     shuffle(cache)
  481.                     del self.cache1.get(infohash, [])[:]
  482.                 
  483.                 data['peers'] = ''.join(cache[-rsize:])
  484.                 del cache[-rsize:]
  485.         elif rsize == 0:
  486.             data['peers'] = []
  487.         else:
  488.             cache = self.cache1.setdefault(infohash, [])
  489.             if len(cache) < rsize:
  490.                 del cache[:]
  491.                 cache.extend(self.becache1.setdefault(infohash, { }).values())
  492.                 shuffle(cache)
  493.                 del self.cache2.get(infohash, [])[:]
  494.             
  495.             data['peers'] = cache[-rsize:]
  496.             del cache[-rsize:]
  497.         connection.answer((200, 'OK', {
  498.             'Content-Type': 'text/plain',
  499.             'Pragma': 'no-cache' }, bencode(data)))
  500.  
  501.     
  502.     def connectback_result(self, result, downloadid, peerid, ip, port):
  503.         record = self.downloads.get(downloadid, { }).get(peerid)
  504.         if record is None and record['ip'] != ip or record['port'] != port:
  505.             return None
  506.         
  507.         if not record.has_key('nat'):
  508.             record['nat'] = int(not result)
  509.         elif result:
  510.             record['nat'] = 0
  511.         else:
  512.             record['nat'] += 1
  513.         if result:
  514.             self.becache1.setdefault(downloadid, { })[peerid] = Bencached(bencode({
  515.                 'ip': ip,
  516.                 'port': port,
  517.                 'peer id': peerid }))
  518.             self.becache2.setdefault(downloadid, { })[peerid] = compact_peer_info(ip, port)
  519.         
  520.  
  521.     
  522.     def save_dfile(self):
  523.         self.rawserver.add_task(self.save_dfile, self.save_dfile_interval)
  524.         h = open(self.dfile, 'wb')
  525.         h.write(bencode(self.state))
  526.         h.close()
  527.  
  528.     
  529.     def parse_allowed(self):
  530.         self.rawserver.add_task(self.parse_allowed, self.parse_allowed_interval * 60)
  531.         self.allowed = parseTorrents(self.allowed_dir)
  532.  
  533.     
  534.     def expire_downloaders(self):
  535.         for x in self.times.keys():
  536.             for myid, t in self.times[x].items():
  537.                 if t < self.prevtime:
  538.                     if self.becache1.get(x, { }).has_key(myid):
  539.                         del self.becache1[x][myid]
  540.                         del self.becache2[x][myid]
  541.                     
  542.                     del self.times[x][myid]
  543.                     del self.downloads[x][myid]
  544.                     continue
  545.             
  546.         
  547.         self.prevtime = time()
  548.         if self.keep_dead != 1:
  549.             for key, value in self.downloads.items():
  550.                 if len(value) == 0:
  551.                     del self.times[key]
  552.                     del self.downloads[key]
  553.                     continue
  554.             
  555.         
  556.         self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
  557.  
  558.  
  559.  
  560. def is_valid_ipv4(ip):
  561.     
  562.     try:
  563.         x = compact_peer_info(ip, 0)
  564.         if len(x) != 6:
  565.             return False
  566.     except (ValueError, IndexError):
  567.         return False
  568.  
  569.     return True
  570.  
  571.  
  572. def is_local_ip(ip):
  573.     
  574.     try:
  575.         v = [ long(x) for x in ip.split('.') ]
  576.         if v[0] == 172 and v[1] >= 16 and v[1] <= 31:
  577.             return 1
  578.     except ValueError:
  579.         return 0
  580.  
  581.  
  582.  
  583. def track(args):
  584.     if len(args) == 0:
  585.         print formatDefinitions(defaults, 80)
  586.         return None
  587.     
  588.     
  589.     try:
  590.         (config, files) = parseargs(args, defaults, 0, 0)
  591.     except ValueError:
  592.         e = None
  593.         print 'error: ' + str(e)
  594.         print 'run with no arguments for parameter explanations'
  595.         return None
  596.  
  597.     r = RawServer(Event(), config['timeout_check_interval'], config['socket_timeout'])
  598.     t = Tracker(config, r)
  599.     r.bind(config['port'], config['bind'], True)
  600.     r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes']))
  601.     t.save_dfile()
  602.     print '# Shutting down: ' + isotime()
  603.  
  604.  
  605. def size_format(s):
  606.     if s < 1024:
  607.         r = str(s) + 'B'
  608.     elif s < 1048576:
  609.         r = str(int(s / 1024)) + 'KiB'
  610.     elif s < 0x40000000L:
  611.         r = str(int(s / 1048576)) + 'MiB'
  612.     elif s < 0x10000000000L:
  613.         r = str(int((s / 1073741824.0) * 100.0) / 100.0) + 'GiB'
  614.     else:
  615.         r = str(int((s / 1099511627776.0) * 100.0) / 100.0) + 'TiB'
  616.     return r
  617.  
  618.