thirdparty/google_appengine/google/appengine/api/urlfetch_stub.py
changeset 2413 d0b7dac5325c
parent 2309 be1b94099f2d
child 2864 2e0b0af889be
equal deleted inserted replaced
2412:c61d96e72e6f 2413:d0b7dac5325c
    17 
    17 
    18 """Stub version of the urlfetch API, based on httplib."""
    18 """Stub version of the urlfetch API, based on httplib."""
    19 
    19 
    20 
    20 
    21 
    21 
       
    22 import gzip
    22 import httplib
    23 import httplib
    23 import logging
    24 import logging
    24 import socket
    25 import socket
       
    26 import StringIO
    25 import urllib
    27 import urllib
    26 import urlparse
    28 import urlparse
    27 
    29 
    28 from google.appengine.api import apiproxy_stub
    30 from google.appengine.api import apiproxy_stub
    29 from google.appengine.api import urlfetch
    31 from google.appengine.api import urlfetch
   162       if not host and not protocol:
   164       if not host and not protocol:
   163         host = last_host
   165         host = last_host
   164         protocol = last_protocol
   166         protocol = last_protocol
   165 
   167 
   166       adjusted_headers = {
   168       adjusted_headers = {
   167         'Host': host,
   169           'User-Agent':
   168         'Accept': '*/*',
   170           'AppEngine-Google; (+http://code.google.com/appengine)',
       
   171           'Referer': 'http://localhost/',
       
   172           'Host': host,
       
   173           'Accept-Encoding': 'gzip',
   169       }
   174       }
   170       if payload is not None:
   175       if payload is not None:
   171         adjusted_headers['Content-Length'] = len(payload)
   176         adjusted_headers['Content-Length'] = len(payload)
   172       if method == 'POST' and payload:
   177       if method == 'POST' and payload:
   173         adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded'
   178         adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded'
   174 
   179 
   175       for header in headers:
   180       for header in headers:
   176         adjusted_headers[header.key().title()] = header.value()
   181         if header.key().title().lower() == 'user-agent':
       
   182           adjusted_headers['User-Agent'] = (
       
   183               '%s %s' %
       
   184               (header.value(), adjusted_headers['User-Agent']))
       
   185         else:
       
   186           adjusted_headers[header.key().title()] = header.value()
   177 
   187 
   178       logging.debug('Making HTTP request: host = %s, '
   188       logging.debug('Making HTTP request: host = %s, '
   179                     'url = %s, payload = %s, headers = %s',
   189                     'url = %s, payload = %s, headers = %s',
   180                     host, url, payload, adjusted_headers)
   190                     host, url, payload, adjusted_headers)
   181       try:
   191       try:
   217           logging.error(error_msg)
   227           logging.error(error_msg)
   218           raise apiproxy_errors.ApplicationError(
   228           raise apiproxy_errors.ApplicationError(
   219               urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
   229               urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
   220       else:
   230       else:
   221         response.set_statuscode(http_response.status)
   231         response.set_statuscode(http_response.status)
       
   232         if http_response.getheader('content-encoding') == 'gzip':
       
   233           gzip_stream = StringIO.StringIO(http_response_data)
       
   234           gzip_file = gzip.GzipFile(fileobj=gzip_stream)
       
   235           http_response_data = gzip_file.read()
   222         response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
   236         response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
   223         for header_key, header_value in http_response.getheaders():
   237         for header_key, header_value in http_response.getheaders():
       
   238           if (header_key.lower() == 'content-encoding' and
       
   239               header_value == 'gzip'):
       
   240             continue
       
   241           if header_key.lower() == 'content-length':
       
   242             header_value = len(response.content())
   224           header_proto = response.add_header()
   243           header_proto = response.add_header()
   225           header_proto.set_key(header_key)
   244           header_proto.set_key(header_key)
   226           header_proto.set_value(header_value)
   245           header_proto.set_value(header_value)
   227 
   246 
   228         if len(http_response_data) > MAX_RESPONSE_SIZE:
   247         if len(http_response_data) > MAX_RESPONSE_SIZE:
   243       headers: list of string pairs, first is header name and the second is header's value
   262       headers: list of string pairs, first is header name and the second is header's value
   244     """
   263     """
   245     prohibited_headers = [h.key() for h in headers
   264     prohibited_headers = [h.key() for h in headers
   246                           if h.key().lower() in untrusted_headers]
   265                           if h.key().lower() in untrusted_headers]
   247     if prohibited_headers:
   266     if prohibited_headers:
   248       logging.warn("Stripped prohibited headers from URLFetch request: %s",
   267       logging.warn('Stripped prohibited headers from URLFetch request: %s',
   249                    prohibited_headers)
   268                    prohibited_headers)
   250     return (h for h in headers if h.key().lower() not in untrusted_headers)
   269     return (h for h in headers if h.key().lower() not in untrusted_headers)