summaryrefslogtreecommitdiff
path: root/proxy.py
blob: 393082f4f8008668f69208d35c132ed1ccc9596f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
#!/usr/bin/python -t
import  os, sys, string, time, md5, random
import asynchat, asyncore, socket, httplib, urlparse
from heapq import heappush, heappop
import cStringIO as StringIO


ENDPOINTS = [
    ('10.2.2.11', 8888),
#    ('10.3.1.2', 8888),
    ('10.1.1.156', 8888),
]

kB = 1024
#minimum entity size to start a paralel fetch
THRESHOLD = 512 * kB
#first fetch-range blocksize
INIT_BLOCKSIZE = 512 * kB
#lower bound of fetch-range blocksize optimization
MIN_BLOCKSIZE = 512 * kB
#time each fetcher spent on his range, calculated using
#speed measured while using INIT_BLOCKSIZE
TIME_SLICE = 5
#start a new fetcher on a endpoint X-bytes before the
#old one finished
FETCHER_JUMPSTART = 32 * kB

#################

class Fetcher(asynchat.async_chat):
    def __init__(self, reader, proxy, url, headers, range):
        self.reader = reader
        self.proxy = proxy
        self.url = url
        self.headers = headers
        self.range = range

        self.pos = (self.range[0] != -1) and self.range[0] or 0
        self.start_time = 0
        self.stop_time = 0
        self.http_status = ""
        self.http_header = StringIO.StringIO()
        self.state = 0 #0=status, 1=header, 2=body
 
        asynchat.async_chat.__init__(self)
        self.set_terminator("\r\n")
        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
        self.connect(self.proxy)

    def __str__(self):
        return "<Fetcher proxy=%s url=%s range=%s" % (self.proxy, urlparse.urlunparse(self.url), self.range)

    def handle_connect (self):
        print self, "Start"
        self.send("GET http://%s:%s%s HTTP/1.0\r\n" % ( self.url.hostname, self.url.port or 80, self.url.path ))
        for key in filter(lambda k: k not in ("range"), self.headers.keys()): #send origin request headers
            self.send("%s: %s\r\n" % (key, self.headers[key]))
        if self.range != (-1,-1):
            self.send("Range: bytes=%s-%s\r\n" % (self.range[0], self.range[1]))
        self.send("\r\n")
        self.start_time = time.time()

    def time(self):
        if self.stop_time == 0:
            return time.time() - self.start_time
        else:
            return self.stop_time - self.start_time

    def speed(self):
        return (self.pos - self.range[0]) / self.time()

    def collect_incoming_data(self, data):
        if self.state==2: #body
            length = len(data)
            self.reader.handle_incoming_data(self, data, length)
            self.pos += length
            if self.range != (-1,-1) and self.pos >= self.range[1]:
                self.stop_time = time.time()
                print self, "finished with %s kB/s" % (self.speed() / 1024)
                #make sure the next fetcher will be started
                self.reader.handle_incoming_data(self)
                self.close()
        elif self.state ==1: #header
            self.http_header.write( data )
        else: #status
            self.http_status += data 

    def found_terminator(self):
        if self.state == 0: #got status-line
            self.state = 1
            self.set_terminator("\r\n\r\n")
        elif self.state == 1: #got headers
            self.state = 2
            self.set_terminator(None)
            self.reader.handle_incoming_http_header(self, self.http_header)

class MagicHTTPProxyClient(object):
    def __init__(self, channel, url, header):
        self.channel = channel
        self.url = url
        self.header = header

        self.content_length = -1
        self.header_sent = False
        self.fetch_pos = 0
        self.write_pos = 0
        self.buffer = ""
        self.blocks = list()
        self.fetchers = list()

        print self, "New Instance"

        proxy = ENDPOINTS[ random.randint(0, len(ENDPOINTS)-1) ]
        self.fetchers.append( Fetcher(self, proxy, self.url, self.header, (-1,-1)) )

    def __str__(self):
        return "<MagicHTTPProxyClient url=%s content_length=%s>" % (urlparse.urlunparse(self.url), self.content_length)

    def handle_incoming_data(self, fetcher, data=None, length=0):
        if not data:
            #fetcher is done, remove from list
            self.fetchers = filter(lambda f: f != fetcher, self.fetchers)
        else:
            heappush(self.blocks, (fetcher.pos, data, length))

        if not self.channel.connected:
            print self, "request side closed the connection"
            return

        if fetcher.range != (-1,-1) \
                and fetcher.range[1] - fetcher.pos < FETCHER_JUMPSTART \
                and self.fetch_pos + 1 < self.content_length and self.channel.connected \
                and len( filter(lambda f: f.proxy == fetcher.proxy, self.fetchers) ) < 2:
            #Start a new fetcher on this line if this fetchers is X-Bytes before finished his job
            blocksize = min(int(TIME_SLICE * fetcher.speed()), MIN_BLOCKSIZE)
            fetch_range = self.next_range(blocksize)
            print "Start new Fetcher, bs=%s range=%s" % (blocksize,fetch_range)
            self.fetchers.append( Fetcher(self, fetcher.proxy, self.url, self.header, fetch_range) )

        buf = ""
        while len(self.blocks)>0 and min(self.blocks)[0] == self.write_pos:
            item = heappop(self.blocks)
            buf += item[1]
            self.write_pos += item[2]
        if len(self.blocks)>0:
            print "missed: %s => %s" % (self.write_pos, min(self.blocks)[0])
        if buf != "":
            self.channel.push(buf)

        if self.write_pos + 1 >= self.content_length:
            print self, "job done %s blocks left" % len(self.blocks)
            self.channel.close_when_done()

    def next_range(self, suggested_blocksize):
        assert self.content_length != -1
        start = self.fetch_pos
        self.fetch_pos = min(self.fetch_pos + suggested_blocksize, self.content_length)
        return (start, self.fetch_pos-1)

    def handle_incoming_http_header(self, fetcher, header):
        if self.header_sent:
            pass
        else:
            self.header_sent = True

            # Sends header from first response
            header.seek(0)
            headers = httplib.HTTPMessage(header)

            content_length = filter(lambda i: i == "content-length", headers.dict.keys())
            if len(content_length) == 1:
                content_length = int(headers.dict["content-length"])
                if content_length >= THRESHOLD:
                    self.content_length = content_length
                    fetcher.range = self.next_range(INIT_BLOCKSIZE)
                    for proxy in filter(lambda p: fetcher.proxy != p, ENDPOINTS):
                        if self.fetch_pos == self.content_length -1:
                            break
                        self.fetchers.append(Fetcher( self, proxy, self.url, self.header, self.next_range(INIT_BLOCKSIZE)))

            else:
                content_length = None

            buf = "HTTP/1.1 200 OK\r\n"
            for key in filter(lambda k: k not in ("content-range", "content-length"), headers.dict.keys()):
                buf += "%s: %s\r\n" % (key, headers.dict[key])
            if content_length:
                buf += "Content-Length: %s\r\n" % content_length
                buf += "Content-Range: bytes %s-%s/%s\r\n" % (0, content_length-1, content_length)
            buf += "X-Proxy: Magicproxy (superpower activated)\r\n"
            buf += "\r\n"
            self.channel.push(buf)

        
class HTTPChannel(asynchat.async_chat):
    def __init__(self, server, sock, addr):
        self.server = server

        self.data = StringIO.StringIO()
        self.request = None

        asynchat.async_chat.__init__(self, sock)
        self.set_terminator("\r\n\r\n")

    def handle_close(self):
        self.close()

    def collect_incoming_data(self, data):
        self.data.write(data)
        if self.data.tell() > 16384:
            self.close_when_done()

    def found_terminator(self):
        # parse http header
        self.data.seek(0)
        self.request = string.split(self.data.readline(), None, 2)
        if len(self.request) != 3:
            # badly formed request; just shut down
            self.close_when_done()
        else:
            self.set_terminator(None)
            headers = httplib.HTTPMessage(self.data).dict
            self.server.handle_request(self, self.request[0], self.request[1], headers)
            

class HTTPProxyServer(asyncore.dispatcher):
    def __init__(self):
        self.port = 8080

        asyncore.dispatcher.__init__(self)
        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
        self.set_reuse_addr()
        self.bind(("", 8080))
        self.listen(15)

    def __str__(self):
        return "<HTTPProxyServer port=%s>" % self.port

    def handle_accept(self):
        conn, addr = self.accept()
        HTTPChannel(self, conn, addr)

    def handle_request(self, channel, method, path, headers):
        url = urlparse.urlparse(path)
        if method != "GET" or url.query != "":
            #do not handle non-GET or GET with Query (?foo=bla) requests
            return self._bypass_request(channel, method, url, headers)
        else:
            MagicHTTPProxyClient(channel, url, headers)
        
    def _bypass_request(self, channel, method, url, headers):
        proxy = ENDPOINTS[ int( md5.md5(url.hostname).hexdigest(),16 ) % len(ENDPOINTS) ]
        print self, "_bypass request via %s: %s %s" % (proxy, method, urlparse.urlunparse(url))
        HTTPProxyClient(proxy, channel, method, url, headers)
        
if __name__ == "__main__":
    proxy = HTTPProxyServer()
    asyncore.loop()