''' proxy.py - HTTP proxy daemon service for sagator. (c) 2004-2009 Jan ONDREJ (SAL) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. ''' import sys, os, re, socket from avlib import BytesIO from aglib import * __all__=['http_proxy'] BUF_SIZE=16386 HTTP_REPORT='''\ HTTP/1.1 450 Virus Found Server: SAGATOR Connection: close Content-Type: text/html; charset=iso-8859-2 450 Virus Found

Virus Found !!!

URL: %s
Virus name: %s [%f, %s]
Antivir output: %s

''' HTTP_ERROR='''\ HTTP/1.1 %03d %s Server: SAGATOR Connection: close Content-Type: text/html; charset=iso-8859-2 %03d %s

%s

%s

''' METHODS = r'(GET|HEAD|POST|PUT|DELETE)' HOST_PORT = r'([a-zA-Z0-9.-]+)(:[0-9]+|)' URI = r'(http|ftp)://'+HOST_PORT+'/([^ ]*)' DIR = r'(/[^ ]*)' HTTP_VER = r'HTTP/([0-9]+\.[0-9]+)' reg_http_whost=re.compile(r'^'+METHODS+r' +'+URI+r' +'+HTTP_VER+r'\r?$').search reg_http_nohost=re.compile(r'^'+METHODS+r' +'+DIR+r' +'+HTTP_VER+r'\r?$').search reg_hostport=re.compile(HOST_PORT).search reg_hdr_line=re.compile(r'^([!-9;-~]+): *(.+?)\r?$', re.M).search reg_chunk=re.compile(r'^([0-9A-Fa-f]+)(;.*?|)\r?$').search reg_response=re.compile(r'^'+HTTP_VER+r' +([0-9]{3}) +(.*)$').search class http_proxy(service): ''' HTTP proxy service (experimental). !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! !!! WARNING! This service is experimental! Use at your risk! !!! !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! This service can be used as filtering HTTP proxy. Usage: http_proxy(scanners, host, port, prefork=2) Where: scanners is an array of scanners (see README.scanners for more info) host is a an ip address to bind port is a port to bind prefork is a number, which defines preforked process count Example: http_proxy(SCANNERS, '127.0.0.1', 3128) Warning! Do not forget to block access to this port for non-local users. ''' name='http_proxy()' MAX_SIZE=2*1024*1024 def http_error(self,code=500,s='Internal Server Error',desc=''): self.conn.sendall(HTTP_ERROR % (code,s,code,s,s,desc)) self.conn.shutdown(socket.SHUT_RDWR) def scandata(self): globals.reset() for scnr in self.SCANNERS: level,virname,ret=scnr.scanbuffer(mail.data) scnr.destroy() if is_infected(level): debug.echo(3,'%s: STATUS: %s [%f,%s]' % \ (self.name,virname,level,globals.found_by.name)) break if is_infected(level): self.conn.sendall(HTTP_REPORT % \ (mail.sender,virname,level,globals.found_by.name,str(ret))) return False else: return True def accept(self,connects=0): if connects==0: self.conn,self.addr=self.s.accept() self.conn.settimeout(300) self.connf=self.conn.makefile('rw',0) else: debug.echo(7,"Persistent connection: ",connects) #self.conn.shutdown(socket.SHUT_RDWR) # PERSISTENT CONNECTIONS ARE NOT WORKING ??? req=self.connf.readline() # Connection closed? Last request? if not req: return debug.echo(3,self.name+": "+req.strip()) r_http=reg_http_whost(req) if r_http: method,proto,host,port,path,version_s=r_http.groups() if port: port=int(port[1:]) else: port=80 else: r_http=reg_http_nohost(req) if r_http: method,path,version_s=r_http.groups() proto='http' proto,host,port=('http',None,80) else: debug.echo(1,self.name+": BAD REQUEST! [%s]" % req.strip()) self.http_error(desc="Bad Request") return # method,proto,host,port,path,version defined now version=float(version_s) # Receive request header hdr = '' phdr = {'connection': '', 'proxy-connection': ''} while True: l=self.connf.readline() if not l: debug.echo(1,self.name+": client connection closed by peer") self.conn.shutdown(socket.SHUT_RDWR) return if (l=='\r') or (l=='\r\n'): # end of request header break hdr+=l r=reg_hdr_line(l) if r: phdr[r.group(1).lower()] = r.group(2) # is there an Host: in header add_into_header='' if 'host' in phdr: r=reg_hostport(phdr['host']) if r: host,port=r.groups() if port: port=int(port) else: port=80 else: if port==80: add_into_header+='Host: %s\r\n' % host else: add_into_header+='Host: %s:%d\r\n' % (host,port) # Is host defined? if not host: debug.echo(1,self.name+": BAD REQUEST! [%s]" % req.strip()) self.http_error(desc="Bad Request, host not defined!") return # check for content length and transfer encoding content_length=0 transfer_encoding=None if method.upper()=='POST': if 'content-length' in phdr: content_length=int(phdr['content-length']) if 'transfer-encoding' in phdr: transfer_encoding=phdr['transfer-encoding'].strip() # download this page newreq='%s /%s HTTP/%s\r\n%s%s\r\n' % \ (method,path,version_s,hdr,add_into_header) debug.echo(6,[req,newreq]) try: cli=socket.socket(socket.AF_INET,socket.SOCK_STREAM) cli.settimeout(180) cli.connect((host,port)) except socket.error as err: (ec,es) = err.args self.http_error(desc=es) return if content_length>0: post_data=self.conn.recv(content_length) debug.echo(4,self.name+": POST DATA: "+str(post_data)) else: post_data='' cli.send(newreq+post_data) cf=cli.makefile('rw',0) # read reply while True: rhdr=cf.readline() r_response=reg_response(rhdr.strip()) if not r_response: debug.echo(1,self.name+": Wrong response: %s " % rhdr.strip()) resp_code=r_response.group(2) debug.echo(4,"Response: ",rhdr.strip()) # read reply header preply_hdr={'connection':'', 'content-length':'0', 'transfer-encoding':'' } while True: l=cf.readline() rhdr+=l if (l=='\r') or (l=='\r\n'): # end of reply header break r=reg_hdr_line(l) if r: preply_hdr[r.group(1).lower()]=r.group(2) if resp_code=="100": # 100 Continue self.conn.sendall(rhdr) else: break content_length=int(preply_hdr['content-length']) oversized=0 debug.echo(6,"reply_header: ",rhdr) # it is encoded as chunked? if preply_hdr['transfer-encoding'][:7]=='chunked': debug.echo(5,"Transfer-encoding: chunked") data=BytesIO() while True: l=cf.readline() # connection closed? if not l: break # next chunk? r_chunk=reg_chunk(l.strip()) if r_chunk: chunk_size=int(r_chunk.group(1),16) debug.echo(7,'chunk: ',chunk_size) if chunk_size==0: cli.shutdown(socket.SHUT_RDWR) break data.write(cf.read(chunk_size)) # read a plain CRLF cf.readline() # MAX_SIZE reached? if (oversized+data.tell())>self.MAX_SIZE: oversized+=data.tell() self.conn.sendall("%s%X\r\n" % (rhdr,len(mail.data))) self.conn.sendall(data.getvalue()) data=BytesIO() else: debug.echo(1,self.name+": ERROR! Chunk expected! [%s]" % l.strip()) break if oversized==0: mail.data=data.getvalue() mail.sender="%s://%s:%d/%s" % (proto,host,port,path) mail.recip=[self.addr[0],str(self.addr[1])] del data debug.echo(9,'DATA:',mail.data) if self.scandata(): # clean? self.conn.sendall("%s%X\r\n" % (rhdr,len(mail.data))) self.conn.sendall(mail.data) else: debug.echo(4,self.name+": scanning skipped, oversized data!") # accept next REQUEST if connection was not closed if l: if preply_hdr['connection']!="close": if phdr['proxy-connection']!="close": self.accept(connects+1) else: mail.sender="%s://%s:%d/%s" % (proto,host,port,path) mail.recip=[self.addr[0],str(self.addr[1])] data=BytesIO() if preply_hdr['connection']=="close": debug.echo(5,"Connection: close") while True: d=cli.recv(BUF_SIZE) if not d: # connection closed break if (oversized+data.tell())<=self.MAX_SIZE: data.write(d) else: if oversized==0: self.conn.sendall(rhdr+data.getvalue()) oversized=data.tell() data=BytesIO() # clear buffer self.conn.sendall(d) oversized+=len(d) else: debug.echo(5,"Connection: length based") while data.tell()