Package googleapiclient :: Module http
[hide private]
[frames] | no frames]

Source Code for Module googleapiclient.http

   1  # Copyright 2014 Google Inc. All Rights Reserved. 
   2  # 
   3  # Licensed under the Apache License, Version 2.0 (the "License"); 
   4  # you may not use this file except in compliance with the License. 
   5  # You may obtain a copy of the License at 
   6  # 
   7  #      http://www.apache.org/licenses/LICENSE-2.0 
   8  # 
   9  # Unless required by applicable law or agreed to in writing, software 
  10  # distributed under the License is distributed on an "AS IS" BASIS, 
  11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
  12  # See the License for the specific language governing permissions and 
  13  # limitations under the License. 
  14   
  15  """Classes to encapsulate a single HTTP request. 
  16   
  17  The classes implement a command pattern, with every 
  18  object supporting an execute() method that does the 
  19  actuall HTTP request. 
  20  """ 
  21  from __future__ import absolute_import 
  22  import six 
  23  from six.moves import range 
  24   
  25  __author__ = 'jcgregorio@google.com (Joe Gregorio)' 
  26   
  27  from six import BytesIO, StringIO 
  28  from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote 
  29   
  30  import base64 
  31  import copy 
  32  import gzip 
  33  import httplib2 
  34  import json 
  35  import logging 
  36  import mimetypes 
  37  import os 
  38  import random 
  39  import ssl 
  40  import sys 
  41  import time 
  42  import uuid 
  43   
  44  from email.generator import Generator 
  45  from email.mime.multipart import MIMEMultipart 
  46  from email.mime.nonmultipart import MIMENonMultipart 
  47  from email.parser import FeedParser 
  48   
  49  from googleapiclient import mimeparse 
  50  from googleapiclient.errors import BatchError 
  51  from googleapiclient.errors import HttpError 
  52  from googleapiclient.errors import InvalidChunkSizeError 
  53  from googleapiclient.errors import ResumableUploadError 
  54  from googleapiclient.errors import UnexpectedBodyError 
  55  from googleapiclient.errors import UnexpectedMethodError 
  56  from googleapiclient.model import JsonModel 
  57  from oauth2client import util 
  58   
  59   
  60  DEFAULT_CHUNK_SIZE = 512*1024 
  61   
  62  MAX_URI_LENGTH = 2048 
63 64 65 -def _retry_request(http, num_retries, req_type, sleep, rand, uri, method, *args, 66 **kwargs):
67 """Retries an HTTP request multiple times while handling errors. 68 69 If after all retries the request still fails, last error is either returned as 70 return value (for HTTP 5xx errors) or thrown (for ssl.SSLError). 71 72 Args: 73 http: Http object to be used to execute request. 74 num_retries: Maximum number of retries. 75 req_type: Type of the request (used for logging retries). 76 sleep, rand: Functions to sleep for random time between retries. 77 uri: URI to be requested. 78 method: HTTP method to be used. 79 args, kwargs: Additional arguments passed to http.request. 80 81 Returns: 82 resp, content - Response from the http request (may be HTTP 5xx). 83 """ 84 resp = None 85 for retry_num in range(num_retries + 1): 86 if retry_num > 0: 87 sleep(rand() * 2**retry_num) 88 logging.warning( 89 'Retry #%d for %s: %s %s%s' % (retry_num, req_type, method, uri, 90 ', following status: %d' % resp.status if resp else '')) 91 92 try: 93 resp, content = http.request(uri, method, *args, **kwargs) 94 except ssl.SSLError: 95 if retry_num == num_retries: 96 raise 97 else: 98 continue 99 if resp.status < 500: 100 break 101 102 return resp, content
103
104 105 -class MediaUploadProgress(object):
106 """Status of a resumable upload.""" 107
108 - def __init__(self, resumable_progress, total_size):
109 """Constructor. 110 111 Args: 112 resumable_progress: int, bytes sent so far. 113 total_size: int, total bytes in complete upload, or None if the total 114 upload size isn't known ahead of time. 115 """ 116 self.resumable_progress = resumable_progress 117 self.total_size = total_size
118
119 - def progress(self):
120 """Percent of upload completed, as a float. 121 122 Returns: 123 the percentage complete as a float, returning 0.0 if the total size of 124 the upload is unknown. 125 """ 126 if self.total_size is not None: 127 return float(self.resumable_progress) / float(self.total_size) 128 else: 129 return 0.0
130
131 132 -class MediaDownloadProgress(object):
133 """Status of a resumable download.""" 134
135 - def __init__(self, resumable_progress, total_size):
136 """Constructor. 137 138 Args: 139 resumable_progress: int, bytes received so far. 140 total_size: int, total bytes in complete download. 141 """ 142 self.resumable_progress = resumable_progress 143 self.total_size = total_size
144
145 - def progress(self):
146 """Percent of download completed, as a float. 147 148 Returns: 149 the percentage complete as a float, returning 0.0 if the total size of 150 the download is unknown. 151 """ 152 if self.total_size is not None: 153 return float(self.resumable_progress) / float(self.total_size) 154 else: 155 return 0.0
156
157 158 -class MediaUpload(object):
159 """Describes a media object to upload. 160 161 Base class that defines the interface of MediaUpload subclasses. 162 163 Note that subclasses of MediaUpload may allow you to control the chunksize 164 when uploading a media object. It is important to keep the size of the chunk 165 as large as possible to keep the upload efficient. Other factors may influence 166 the size of the chunk you use, particularly if you are working in an 167 environment where individual HTTP requests may have a hardcoded time limit, 168 such as under certain classes of requests under Google App Engine. 169 170 Streams are io.Base compatible objects that support seek(). Some MediaUpload 171 subclasses support using streams directly to upload data. Support for 172 streaming may be indicated by a MediaUpload sub-class and if appropriate for a 173 platform that stream will be used for uploading the media object. The support 174 for streaming is indicated by has_stream() returning True. The stream() method 175 should return an io.Base object that supports seek(). On platforms where the 176 underlying httplib module supports streaming, for example Python 2.6 and 177 later, the stream will be passed into the http library which will result in 178 less memory being used and possibly faster uploads. 179 180 If you need to upload media that can't be uploaded using any of the existing 181 MediaUpload sub-class then you can sub-class MediaUpload for your particular 182 needs. 183 """ 184
185 - def chunksize(self):
186 """Chunk size for resumable uploads. 187 188 Returns: 189 Chunk size in bytes. 190 """ 191 raise NotImplementedError()
192
193 - def mimetype(self):
194 """Mime type of the body. 195 196 Returns: 197 Mime type. 198 """ 199 return 'application/octet-stream'
200
201 - def size(self):
202 """Size of upload. 203 204 Returns: 205 Size of the body, or None of the size is unknown. 206 """ 207 return None
208
209 - def resumable(self):
210 """Whether this upload is resumable. 211 212 Returns: 213 True if resumable upload or False. 214 """ 215 return False
216
217 - def getbytes(self, begin, end):
218 """Get bytes from the media. 219 220 Args: 221 begin: int, offset from beginning of file. 222 length: int, number of bytes to read, starting at begin. 223 224 Returns: 225 A string of bytes read. May be shorter than length if EOF was reached 226 first. 227 """ 228 raise NotImplementedError()
229
230 - def has_stream(self):
231 """Does the underlying upload support a streaming interface. 232 233 Streaming means it is an io.IOBase subclass that supports seek, i.e. 234 seekable() returns True. 235 236 Returns: 237 True if the call to stream() will return an instance of a seekable io.Base 238 subclass. 239 """ 240 return False
241
242 - def stream(self):
243 """A stream interface to the data being uploaded. 244 245 Returns: 246 The returned value is an io.IOBase subclass that supports seek, i.e. 247 seekable() returns True. 248 """ 249 raise NotImplementedError()
250 251 @util.positional(1)
252 - def _to_json(self, strip=None):
253 """Utility function for creating a JSON representation of a MediaUpload. 254 255 Args: 256 strip: array, An array of names of members to not include in the JSON. 257 258 Returns: 259 string, a JSON representation of this instance, suitable to pass to 260 from_json(). 261 """ 262 t = type(self) 263 d = copy.copy(self.__dict__) 264 if strip is not None: 265 for member in strip: 266 del d[member] 267 d['_class'] = t.__name__ 268 d['_module'] = t.__module__ 269 return json.dumps(d)
270
271 - def to_json(self):
272 """Create a JSON representation of an instance of MediaUpload. 273 274 Returns: 275 string, a JSON representation of this instance, suitable to pass to 276 from_json(). 277 """ 278 return self._to_json()
279 280 @classmethod
281 - def new_from_json(cls, s):
282 """Utility class method to instantiate a MediaUpload subclass from a JSON 283 representation produced by to_json(). 284 285 Args: 286 s: string, JSON from to_json(). 287 288 Returns: 289 An instance of the subclass of MediaUpload that was serialized with 290 to_json(). 291 """ 292 data = json.loads(s) 293 # Find and call the right classmethod from_json() to restore the object. 294 module = data['_module'] 295 m = __import__(module, fromlist=module.split('.')[:-1]) 296 kls = getattr(m, data['_class']) 297 from_json = getattr(kls, 'from_json') 298 return from_json(s)
299
300 301 -class MediaIoBaseUpload(MediaUpload):
302 """A MediaUpload for a io.Base objects. 303 304 Note that the Python file object is compatible with io.Base and can be used 305 with this class also. 306 307 fh = BytesIO('...Some data to upload...') 308 media = MediaIoBaseUpload(fh, mimetype='image/png', 309 chunksize=1024*1024, resumable=True) 310 farm.animals().insert( 311 id='cow', 312 name='cow.png', 313 media_body=media).execute() 314 315 Depending on the platform you are working on, you may pass -1 as the 316 chunksize, which indicates that the entire file should be uploaded in a single 317 request. If the underlying platform supports streams, such as Python 2.6 or 318 later, then this can be very efficient as it avoids multiple connections, and 319 also avoids loading the entire file into memory before sending it. Note that 320 Google App Engine has a 5MB limit on request size, so you should never set 321 your chunksize larger than 5MB, or to -1. 322 """ 323 324 @util.positional(3)
325 - def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, 326 resumable=False):
327 """Constructor. 328 329 Args: 330 fd: io.Base or file object, The source of the bytes to upload. MUST be 331 opened in blocking mode, do not use streams opened in non-blocking mode. 332 The given stream must be seekable, that is, it must be able to call 333 seek() on fd. 334 mimetype: string, Mime-type of the file. 335 chunksize: int, File will be uploaded in chunks of this many bytes. Only 336 used if resumable=True. Pass in a value of -1 if the file is to be 337 uploaded as a single chunk. Note that Google App Engine has a 5MB limit 338 on request size, so you should never set your chunksize larger than 5MB, 339 or to -1. 340 resumable: bool, True if this is a resumable upload. False means upload 341 in a single request. 342 """ 343 super(MediaIoBaseUpload, self).__init__() 344 self._fd = fd 345 self._mimetype = mimetype 346 if not (chunksize == -1 or chunksize > 0): 347 raise InvalidChunkSizeError() 348 self._chunksize = chunksize 349 self._resumable = resumable 350 351 self._fd.seek(0, os.SEEK_END) 352 self._size = self._fd.tell()
353
354 - def chunksize(self):
355 """Chunk size for resumable uploads. 356 357 Returns: 358 Chunk size in bytes. 359 """ 360 return self._chunksize
361
362 - def mimetype(self):
363 """Mime type of the body. 364 365 Returns: 366 Mime type. 367 """ 368 return self._mimetype
369
370 - def size(self):
371 """Size of upload. 372 373 Returns: 374 Size of the body, or None of the size is unknown. 375 """ 376 return self._size
377
378 - def resumable(self):
379 """Whether this upload is resumable. 380 381 Returns: 382 True if resumable upload or False. 383 """ 384 return self._resumable
385
386 - def getbytes(self, begin, length):
387 """Get bytes from the media. 388 389 Args: 390 begin: int, offset from beginning of file. 391 length: int, number of bytes to read, starting at begin. 392 393 Returns: 394 A string of bytes read. May be shorted than length if EOF was reached 395 first. 396 """ 397 self._fd.seek(begin) 398 return self._fd.read(length)
399
400 - def has_stream(self):
401 """Does the underlying upload support a streaming interface. 402 403 Streaming means it is an io.IOBase subclass that supports seek, i.e. 404 seekable() returns True. 405 406 Returns: 407 True if the call to stream() will return an instance of a seekable io.Base 408 subclass. 409 """ 410 return True
411
412 - def stream(self):
413 """A stream interface to the data being uploaded. 414 415 Returns: 416 The returned value is an io.IOBase subclass that supports seek, i.e. 417 seekable() returns True. 418 """ 419 return self._fd
420
421 - def to_json(self):
422 """This upload type is not serializable.""" 423 raise NotImplementedError('MediaIoBaseUpload is not serializable.')
424
425 426 -class MediaFileUpload(MediaIoBaseUpload):
427 """A MediaUpload for a file. 428 429 Construct a MediaFileUpload and pass as the media_body parameter of the 430 method. For example, if we had a service that allowed uploading images: 431 432 433 media = MediaFileUpload('cow.png', mimetype='image/png', 434 chunksize=1024*1024, resumable=True) 435 farm.animals().insert( 436 id='cow', 437 name='cow.png', 438 media_body=media).execute() 439 440 Depending on the platform you are working on, you may pass -1 as the 441 chunksize, which indicates that the entire file should be uploaded in a single 442 request. If the underlying platform supports streams, such as Python 2.6 or 443 later, then this can be very efficient as it avoids multiple connections, and 444 also avoids loading the entire file into memory before sending it. Note that 445 Google App Engine has a 5MB limit on request size, so you should never set 446 your chunksize larger than 5MB, or to -1. 447 """ 448 449 @util.positional(2)
450 - def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, 451 resumable=False):
452 """Constructor. 453 454 Args: 455 filename: string, Name of the file. 456 mimetype: string, Mime-type of the file. If None then a mime-type will be 457 guessed from the file extension. 458 chunksize: int, File will be uploaded in chunks of this many bytes. Only 459 used if resumable=True. Pass in a value of -1 if the file is to be 460 uploaded in a single chunk. Note that Google App Engine has a 5MB limit 461 on request size, so you should never set your chunksize larger than 5MB, 462 or to -1. 463 resumable: bool, True if this is a resumable upload. False means upload 464 in a single request. 465 """ 466 self._filename = filename 467 fd = open(self._filename, 'rb') 468 if mimetype is None: 469 # No mimetype provided, make a guess. 470 mimetype, _ = mimetypes.guess_type(filename) 471 if mimetype is None: 472 # Guess failed, use octet-stream. 473 mimetype = 'application/octet-stream' 474 super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize, 475 resumable=resumable)
476
477 - def to_json(self):
478 """Creating a JSON representation of an instance of MediaFileUpload. 479 480 Returns: 481 string, a JSON representation of this instance, suitable to pass to 482 from_json(). 483 """ 484 return self._to_json(strip=['_fd'])
485 486 @staticmethod
487 - def from_json(s):
488 d = json.loads(s) 489 return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'], 490 chunksize=d['_chunksize'], resumable=d['_resumable'])
491
492 493 -class MediaInMemoryUpload(MediaIoBaseUpload):
494 """MediaUpload for a chunk of bytes. 495 496 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for 497 the stream. 498 """ 499 500 @util.positional(2)
501 - def __init__(self, body, mimetype='application/octet-stream', 502 chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
503 """Create a new MediaInMemoryUpload. 504 505 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for 506 the stream. 507 508 Args: 509 body: string, Bytes of body content. 510 mimetype: string, Mime-type of the file or default of 511 'application/octet-stream'. 512 chunksize: int, File will be uploaded in chunks of this many bytes. Only 513 used if resumable=True. 514 resumable: bool, True if this is a resumable upload. False means upload 515 in a single request. 516 """ 517 fd = BytesIO(body) 518 super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize, 519 resumable=resumable)
520
521 522 -class MediaIoBaseDownload(object):
523 """"Download media resources. 524 525 Note that the Python file object is compatible with io.Base and can be used 526 with this class also. 527 528 529 Example: 530 request = farms.animals().get_media(id='cow') 531 fh = io.FileIO('cow.png', mode='wb') 532 downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024) 533 534 done = False 535 while done is False: 536 status, done = downloader.next_chunk() 537 if status: 538 print "Download %d%%." % int(status.progress() * 100) 539 print "Download Complete!" 540 """ 541 542 @util.positional(3)
543 - def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
544 """Constructor. 545 546 Args: 547 fd: io.Base or file object, The stream in which to write the downloaded 548 bytes. 549 request: googleapiclient.http.HttpRequest, the media request to perform in 550 chunks. 551 chunksize: int, File will be downloaded in chunks of this many bytes. 552 """ 553 self._fd = fd 554 self._request = request 555 self._uri = request.uri 556 self._chunksize = chunksize 557 self._progress = 0 558 self._total_size = None 559 self._done = False 560 561 # Stubs for testing. 562 self._sleep = time.sleep 563 self._rand = random.random
564 565 @util.positional(1)
566 - def next_chunk(self, num_retries=0):
567 """Get the next chunk of the download. 568 569 Args: 570 num_retries: Integer, number of times to retry 500's with randomized 571 exponential backoff. If all retries fail, the raised HttpError 572 represents the last request. If zero (default), we attempt the 573 request only once. 574 575 Returns: 576 (status, done): (MediaDownloadStatus, boolean) 577 The value of 'done' will be True when the media has been fully 578 downloaded. 579 580 Raises: 581 googleapiclient.errors.HttpError if the response was not a 2xx. 582 httplib2.HttpLib2Error if a transport error has occured. 583 """ 584 headers = { 585 'range': 'bytes=%d-%d' % ( 586 self._progress, self._progress + self._chunksize) 587 } 588 http = self._request.http 589 590 resp, content = _retry_request( 591 http, num_retries, 'media download', self._sleep, self._rand, self._uri, 592 'GET', headers=headers) 593 594 if resp.status in [200, 206]: 595 if 'content-location' in resp and resp['content-location'] != self._uri: 596 self._uri = resp['content-location'] 597 self._progress += len(content) 598 self._fd.write(content) 599 600 if 'content-range' in resp: 601 content_range = resp['content-range'] 602 length = content_range.rsplit('/', 1)[1] 603 self._total_size = int(length) 604 elif 'content-length' in resp: 605 self._total_size = int(resp['content-length']) 606 607 if self._progress == self._total_size: 608 self._done = True 609 return MediaDownloadProgress(self._progress, self._total_size), self._done 610 else: 611 raise HttpError(resp, content, uri=self._uri)
612
613 614 -class _StreamSlice(object):
615 """Truncated stream. 616 617 Takes a stream and presents a stream that is a slice of the original stream. 618 This is used when uploading media in chunks. In later versions of Python a 619 stream can be passed to httplib in place of the string of data to send. The 620 problem is that httplib just blindly reads to the end of the stream. This 621 wrapper presents a virtual stream that only reads to the end of the chunk. 622 """ 623
624 - def __init__(self, stream, begin, chunksize):
625 """Constructor. 626 627 Args: 628 stream: (io.Base, file object), the stream to wrap. 629 begin: int, the seek position the chunk begins at. 630 chunksize: int, the size of the chunk. 631 """ 632 self._stream = stream 633 self._begin = begin 634 self._chunksize = chunksize 635 self._stream.seek(begin)
636
637 - def read(self, n=-1):
638 """Read n bytes. 639 640 Args: 641 n, int, the number of bytes to read. 642 643 Returns: 644 A string of length 'n', or less if EOF is reached. 645 """ 646 # The data left available to read sits in [cur, end) 647 cur = self._stream.tell() 648 end = self._begin + self._chunksize 649 if n == -1 or cur + n > end: 650 n = end - cur 651 return self._stream.read(n)
652
653 654 -class HttpRequest(object):
655 """Encapsulates a single HTTP request.""" 656 657 @util.positional(4)
658 - def __init__(self, http, postproc, uri, 659 method='GET', 660 body=None, 661 headers=None, 662 methodId=None, 663 resumable=None):
664 """Constructor for an HttpRequest. 665 666 Args: 667 http: httplib2.Http, the transport object to use to make a request 668 postproc: callable, called on the HTTP response and content to transform 669 it into a data object before returning, or raising an exception 670 on an error. 671 uri: string, the absolute URI to send the request to 672 method: string, the HTTP method to use 673 body: string, the request body of the HTTP request, 674 headers: dict, the HTTP request headers 675 methodId: string, a unique identifier for the API method being called. 676 resumable: MediaUpload, None if this is not a resumbale request. 677 """ 678 self.uri = uri 679 self.method = method 680 self.body = body 681 self.headers = headers or {} 682 self.methodId = methodId 683 self.http = http 684 self.postproc = postproc 685 self.resumable = resumable 686 self.response_callbacks = [] 687 self._in_error_state = False 688 689 # Pull the multipart boundary out of the content-type header. 690 major, minor, params = mimeparse.parse_mime_type( 691 self.headers.get('content-type', 'application/json')) 692 693 # The size of the non-media part of the request. 694 self.body_size = len(self.body or '') 695 696 # The resumable URI to send chunks to. 697 self.resumable_uri = None 698 699 # The bytes that have been uploaded. 700 self.resumable_progress = 0 701 702 # Stubs for testing. 703 self._rand = random.random 704 self._sleep = time.sleep
705 706 @util.positional(1)
707 - def execute(self, http=None, num_retries=0):
708 """Execute the request. 709 710 Args: 711 http: httplib2.Http, an http object to be used in place of the 712 one the HttpRequest request object was constructed with. 713 num_retries: Integer, number of times to retry 500's with randomized 714 exponential backoff. If all retries fail, the raised HttpError 715 represents the last request. If zero (default), we attempt the 716 request only once. 717 718 Returns: 719 A deserialized object model of the response body as determined 720 by the postproc. 721 722 Raises: 723 googleapiclient.errors.HttpError if the response was not a 2xx. 724 httplib2.HttpLib2Error if a transport error has occured. 725 """ 726 if http is None: 727 http = self.http 728 729 if self.resumable: 730 body = None 731 while body is None: 732 _, body = self.next_chunk(http=http, num_retries=num_retries) 733 return body 734 735 # Non-resumable case. 736 737 if 'content-length' not in self.headers: 738 self.headers['content-length'] = str(self.body_size) 739 # If the request URI is too long then turn it into a POST request. 740 if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET': 741 self.method = 'POST' 742 self.headers['x-http-method-override'] = 'GET' 743 self.headers['content-type'] = 'application/x-www-form-urlencoded' 744 parsed = urlparse(self.uri) 745 self.uri = urlunparse( 746 (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, 747 None) 748 ) 749 self.body = parsed.query 750 self.headers['content-length'] = str(len(self.body)) 751 752 # Handle retries for server-side errors. 753 resp, content = _retry_request( 754 http, num_retries, 'request', self._sleep, self._rand, str(self.uri), 755 method=str(self.method), body=self.body, headers=self.headers) 756 757 for callback in self.response_callbacks: 758 callback(resp) 759 if resp.status >= 300: 760 raise HttpError(resp, content, uri=self.uri) 761 return self.postproc(resp, content)
762 763 @util.positional(2)
764 - def add_response_callback(self, cb):
765 """add_response_headers_callback 766 767 Args: 768 cb: Callback to be called on receiving the response headers, of signature: 769 770 def cb(resp): 771 # Where resp is an instance of httplib2.Response 772 """ 773 self.response_callbacks.append(cb)
774 775 @util.positional(1)
776 - def next_chunk(self, http=None, num_retries=0):
777 """Execute the next step of a resumable upload. 778 779 Can only be used if the method being executed supports media uploads and 780 the MediaUpload object passed in was flagged as using resumable upload. 781 782 Example: 783 784 media = MediaFileUpload('cow.png', mimetype='image/png', 785 chunksize=1000, resumable=True) 786 request = farm.animals().insert( 787 id='cow', 788 name='cow.png', 789 media_body=media) 790 791 response = None 792 while response is None: 793 status, response = request.next_chunk() 794 if status: 795 print "Upload %d%% complete." % int(status.progress() * 100) 796 797 798 Args: 799 http: httplib2.Http, an http object to be used in place of the 800 one the HttpRequest request object was constructed with. 801 num_retries: Integer, number of times to retry 500's with randomized 802 exponential backoff. If all retries fail, the raised HttpError 803 represents the last request. If zero (default), we attempt the 804 request only once. 805 806 Returns: 807 (status, body): (ResumableMediaStatus, object) 808 The body will be None until the resumable media is fully uploaded. 809 810 Raises: 811 googleapiclient.errors.HttpError if the response was not a 2xx. 812 httplib2.HttpLib2Error if a transport error has occured. 813 """ 814 if http is None: 815 http = self.http 816 817 if self.resumable.size() is None: 818 size = '*' 819 else: 820 size = str(self.resumable.size()) 821 822 if self.resumable_uri is None: 823 start_headers = copy.copy(self.headers) 824 start_headers['X-Upload-Content-Type'] = self.resumable.mimetype() 825 if size != '*': 826 start_headers['X-Upload-Content-Length'] = size 827 start_headers['content-length'] = str(self.body_size) 828 829 resp, content = _retry_request( 830 http, num_retries, 'resumable URI request', self._sleep, self._rand, 831 self.uri, method=self.method, body=self.body, headers=start_headers) 832 833 if resp.status == 200 and 'location' in resp: 834 self.resumable_uri = resp['location'] 835 else: 836 raise ResumableUploadError(resp, content) 837 elif self._in_error_state: 838 # If we are in an error state then query the server for current state of 839 # the upload by sending an empty PUT and reading the 'range' header in 840 # the response. 841 headers = { 842 'Content-Range': 'bytes */%s' % size, 843 'content-length': '0' 844 } 845 resp, content = http.request(self.resumable_uri, 'PUT', 846 headers=headers) 847 status, body = self._process_response(resp, content) 848 if body: 849 # The upload was complete. 850 return (status, body) 851 852 if self.resumable.has_stream(): 853 data = self.resumable.stream() 854 if self.resumable.chunksize() == -1: 855 data.seek(self.resumable_progress) 856 chunk_end = self.resumable.size() - self.resumable_progress - 1 857 else: 858 # Doing chunking with a stream, so wrap a slice of the stream. 859 data = _StreamSlice(data, self.resumable_progress, 860 self.resumable.chunksize()) 861 chunk_end = min( 862 self.resumable_progress + self.resumable.chunksize() - 1, 863 self.resumable.size() - 1) 864 else: 865 data = self.resumable.getbytes( 866 self.resumable_progress, self.resumable.chunksize()) 867 868 # A short read implies that we are at EOF, so finish the upload. 869 if len(data) < self.resumable.chunksize(): 870 size = str(self.resumable_progress + len(data)) 871 872 chunk_end = self.resumable_progress + len(data) - 1 873 874 headers = { 875 'Content-Range': 'bytes %d-%d/%s' % ( 876 self.resumable_progress, chunk_end, size), 877 # Must set the content-length header here because httplib can't 878 # calculate the size when working with _StreamSlice. 879 'Content-Length': str(chunk_end - self.resumable_progress + 1) 880 } 881 882 for retry_num in range(num_retries + 1): 883 if retry_num > 0: 884 self._sleep(self._rand() * 2**retry_num) 885 logging.warning( 886 'Retry #%d for media upload: %s %s, following status: %d' 887 % (retry_num, self.method, self.uri, resp.status)) 888 889 try: 890 resp, content = http.request(self.resumable_uri, method='PUT', 891 body=data, 892 headers=headers) 893 except: 894 self._in_error_state = True 895 raise 896 if resp.status < 500: 897 break 898 899 return self._process_response(resp, content)
900
901 - def _process_response(self, resp, content):
902 """Process the response from a single chunk upload. 903 904 Args: 905 resp: httplib2.Response, the response object. 906 content: string, the content of the response. 907 908 Returns: 909 (status, body): (ResumableMediaStatus, object) 910 The body will be None until the resumable media is fully uploaded. 911 912 Raises: 913 googleapiclient.errors.HttpError if the response was not a 2xx or a 308. 914 """ 915 if resp.status in [200, 201]: 916 self._in_error_state = False 917 return None, self.postproc(resp, content) 918 elif resp.status == 308: 919 self._in_error_state = False 920 # A "308 Resume Incomplete" indicates we are not done. 921 self.resumable_progress = int(resp['range'].split('-')[1]) + 1 922 if 'location' in resp: 923 self.resumable_uri = resp['location'] 924 else: 925 self._in_error_state = True 926 raise HttpError(resp, content, uri=self.uri) 927 928 return (MediaUploadProgress(self.resumable_progress, self.resumable.size()), 929 None)
930
931 - def to_json(self):
932 """Returns a JSON representation of the HttpRequest.""" 933 d = copy.copy(self.__dict__) 934 if d['resumable'] is not None: 935 d['resumable'] = self.resumable.to_json() 936 del d['http'] 937 del d['postproc'] 938 del d['_sleep'] 939 del d['_rand'] 940 941 return json.dumps(d)
942 943 @staticmethod
944 - def from_json(s, http, postproc):
945 """Returns an HttpRequest populated with info from a JSON object.""" 946 d = json.loads(s) 947 if d['resumable'] is not None: 948 d['resumable'] = MediaUpload.new_from_json(d['resumable']) 949 return HttpRequest( 950 http, 951 postproc, 952 uri=d['uri'], 953 method=d['method'], 954 body=d['body'], 955 headers=d['headers'], 956 methodId=d['methodId'], 957 resumable=d['resumable'])
958
959 960 -class BatchHttpRequest(object):
961 """Batches multiple HttpRequest objects into a single HTTP request. 962 963 Example: 964 from googleapiclient.http import BatchHttpRequest 965 966 def list_animals(request_id, response, exception): 967 \"\"\"Do something with the animals list response.\"\"\" 968 if exception is not None: 969 # Do something with the exception. 970 pass 971 else: 972 # Do something with the response. 973 pass 974 975 def list_farmers(request_id, response, exception): 976 \"\"\"Do something with the farmers list response.\"\"\" 977 if exception is not None: 978 # Do something with the exception. 979 pass 980 else: 981 # Do something with the response. 982 pass 983 984 service = build('farm', 'v2') 985 986 batch = BatchHttpRequest() 987 988 batch.add(service.animals().list(), list_animals) 989 batch.add(service.farmers().list(), list_farmers) 990 batch.execute(http=http) 991 """ 992 993 @util.positional(1)
994 - def __init__(self, callback=None, batch_uri=None):
995 """Constructor for a BatchHttpRequest. 996 997 Args: 998 callback: callable, A callback to be called for each response, of the 999 form callback(id, response, exception). The first parameter is the 1000 request id, and the second is the deserialized response object. The 1001 third is an googleapiclient.errors.HttpError exception object if an HTTP error 1002 occurred while processing the request, or None if no error occurred. 1003 batch_uri: string, URI to send batch requests to. 1004 """ 1005 if batch_uri is None: 1006 batch_uri = 'https://www.googleapis.com/batch' 1007 self._batch_uri = batch_uri 1008 1009 # Global callback to be called for each individual response in the batch. 1010 self._callback = callback 1011 1012 # A map from id to request. 1013 self._requests = {} 1014 1015 # A map from id to callback. 1016 self._callbacks = {} 1017 1018 # List of request ids, in the order in which they were added. 1019 self._order = [] 1020 1021 # The last auto generated id. 1022 self._last_auto_id = 0 1023 1024 # Unique ID on which to base the Content-ID headers. 1025 self._base_id = None 1026 1027 # A map from request id to (httplib2.Response, content) response pairs 1028 self._responses = {} 1029 1030 # A map of id(Credentials) that have been refreshed. 1031 self._refreshed_credentials = {}
1032
1033 - def _refresh_and_apply_credentials(self, request, http):
1034 """Refresh the credentials and apply to the request. 1035 1036 Args: 1037 request: HttpRequest, the request. 1038 http: httplib2.Http, the global http object for the batch. 1039 """ 1040 # For the credentials to refresh, but only once per refresh_token 1041 # If there is no http per the request then refresh the http passed in 1042 # via execute() 1043 creds = None 1044 if request.http is not None and hasattr(request.http.request, 1045 'credentials'): 1046 creds = request.http.request.credentials 1047 elif http is not None and hasattr(http.request, 'credentials'): 1048 creds = http.request.credentials 1049 if creds is not None: 1050 if id(creds) not in self._refreshed_credentials: 1051 creds.refresh(http) 1052 self._refreshed_credentials[id(creds)] = 1 1053 1054 # Only apply the credentials if we are using the http object passed in, 1055 # otherwise apply() will get called during _serialize_request(). 1056 if request.http is None or not hasattr(request.http.request, 1057 'credentials'): 1058 creds.apply(request.headers)
1059
1060 - def _id_to_header(self, id_):
1061 """Convert an id to a Content-ID header value. 1062 1063 Args: 1064 id_: string, identifier of individual request. 1065 1066 Returns: 1067 A Content-ID header with the id_ encoded into it. A UUID is prepended to 1068 the value because Content-ID headers are supposed to be universally 1069 unique. 1070 """ 1071 if self._base_id is None: 1072 self._base_id = uuid.uuid4() 1073 1074 return '<%s+%s>' % (self._base_id, quote(id_))
1075
1076 - def _header_to_id(self, header):
1077 """Convert a Content-ID header value to an id. 1078 1079 Presumes the Content-ID header conforms to the format that _id_to_header() 1080 returns. 1081 1082 Args: 1083 header: string, Content-ID header value. 1084 1085 Returns: 1086 The extracted id value. 1087 1088 Raises: 1089 BatchError if the header is not in the expected format. 1090 """ 1091 if header[0] != '<' or header[-1] != '>': 1092 raise BatchError("Invalid value for Content-ID: %s" % header) 1093 if '+' not in header: 1094 raise BatchError("Invalid value for Content-ID: %s" % header) 1095 base, id_ = header[1:-1].rsplit('+', 1) 1096 1097 return unquote(id_)
1098
1099 - def _serialize_request(self, request):
1100 """Convert an HttpRequest object into a string. 1101 1102 Args: 1103 request: HttpRequest, the request to serialize. 1104 1105 Returns: 1106 The request as a string in application/http format. 1107 """ 1108 # Construct status line 1109 parsed = urlparse(request.uri) 1110 request_line = urlunparse( 1111 ('', '', parsed.path, parsed.params, parsed.query, '') 1112 ) 1113 status_line = request.method + ' ' + request_line + ' HTTP/1.1\n' 1114 major, minor = request.headers.get('content-type', 'application/json').split('/') 1115 msg = MIMENonMultipart(major, minor) 1116 headers = request.headers.copy() 1117 1118 if request.http is not None and hasattr(request.http.request, 1119 'credentials'): 1120 request.http.request.credentials.apply(headers) 1121 1122 # MIMENonMultipart adds its own Content-Type header. 1123 if 'content-type' in headers: 1124 del headers['content-type'] 1125 1126 for key, value in six.iteritems(headers): 1127 msg[key] = value 1128 msg['Host'] = parsed.netloc 1129 msg.set_unixfrom(None) 1130 1131 if request.body is not None: 1132 msg.set_payload(request.body) 1133 msg['content-length'] = str(len(request.body)) 1134 1135 # Serialize the mime message. 1136 fp = StringIO() 1137 # maxheaderlen=0 means don't line wrap headers. 1138 g = Generator(fp, maxheaderlen=0) 1139 g.flatten(msg, unixfrom=False) 1140 body = fp.getvalue() 1141 1142 return status_line + body
1143
1144 - def _deserialize_response(self, payload):
1145 """Convert string into httplib2 response and content. 1146 1147 Args: 1148 payload: string, headers and body as a string. 1149 1150 Returns: 1151 A pair (resp, content), such as would be returned from httplib2.request. 1152 """ 1153 # Strip off the status line 1154 status_line, payload = payload.split('\n', 1) 1155 protocol, status, reason = status_line.split(' ', 2) 1156 1157 # Parse the rest of the response 1158 parser = FeedParser() 1159 parser.feed(payload) 1160 msg = parser.close() 1161 msg['status'] = status 1162 1163 # Create httplib2.Response from the parsed headers. 1164 resp = httplib2.Response(msg) 1165 resp.reason = reason 1166 resp.version = int(protocol.split('/', 1)[1].replace('.', '')) 1167 1168 content = payload.split('\r\n\r\n', 1)[1] 1169 1170 return resp, content
1171
1172 - def _new_id(self):
1173 """Create a new id. 1174 1175 Auto incrementing number that avoids conflicts with ids already used. 1176 1177 Returns: 1178 string, a new unique id. 1179 """ 1180 self._last_auto_id += 1 1181 while str(self._last_auto_id) in self._requests: 1182 self._last_auto_id += 1 1183 return str(self._last_auto_id)
1184 1185 @util.positional(2)
1186 - def add(self, request, callback=None, request_id=None):
1187 """Add a new request. 1188 1189 Every callback added will be paired with a unique id, the request_id. That 1190 unique id will be passed back to the callback when the response comes back 1191 from the server. The default behavior is to have the library generate it's 1192 own unique id. If the caller passes in a request_id then they must ensure 1193 uniqueness for each request_id, and if they are not an exception is 1194 raised. Callers should either supply all request_ids or nevery supply a 1195 request id, to avoid such an error. 1196 1197 Args: 1198 request: HttpRequest, Request to add to the batch. 1199 callback: callable, A callback to be called for this response, of the 1200 form callback(id, response, exception). The first parameter is the 1201 request id, and the second is the deserialized response object. The 1202 third is an googleapiclient.errors.HttpError exception object if an HTTP error 1203 occurred while processing the request, or None if no errors occurred. 1204 request_id: string, A unique id for the request. The id will be passed to 1205 the callback with the response. 1206 1207 Returns: 1208 None 1209 1210 Raises: 1211 BatchError if a media request is added to a batch. 1212 KeyError is the request_id is not unique. 1213 """ 1214 if request_id is None: 1215 request_id = self._new_id() 1216 if request.resumable is not None: 1217 raise BatchError("Media requests cannot be used in a batch request.") 1218 if request_id in self._requests: 1219 raise KeyError("A request with this ID already exists: %s" % request_id) 1220 self._requests[request_id] = request 1221 self._callbacks[request_id] = callback 1222 self._order.append(request_id)
1223
1224 - def _execute(self, http, order, requests):
1225 """Serialize batch request, send to server, process response. 1226 1227 Args: 1228 http: httplib2.Http, an http object to be used to make the request with. 1229 order: list, list of request ids in the order they were added to the 1230 batch. 1231 request: list, list of request objects to send. 1232 1233 Raises: 1234 httplib2.HttpLib2Error if a transport error has occured. 1235 googleapiclient.errors.BatchError if the response is the wrong format. 1236 """ 1237 message = MIMEMultipart('mixed') 1238 # Message should not write out it's own headers. 1239 setattr(message, '_write_headers', lambda self: None) 1240 1241 # Add all the individual requests. 1242 for request_id in order: 1243 request = requests[request_id] 1244 1245 msg = MIMENonMultipart('application', 'http') 1246 msg['Content-Transfer-Encoding'] = 'binary' 1247 msg['Content-ID'] = self._id_to_header(request_id) 1248 1249 body = self._serialize_request(request) 1250 msg.set_payload(body) 1251 message.attach(msg) 1252 1253 # encode the body: note that we can't use `as_string`, because 1254 # it plays games with `From ` lines. 1255 fp = StringIO() 1256 g = Generator(fp, mangle_from_=False) 1257 g.flatten(message, unixfrom=False) 1258 body = fp.getvalue() 1259 1260 headers = {} 1261 headers['content-type'] = ('multipart/mixed; ' 1262 'boundary="%s"') % message.get_boundary() 1263 1264 resp, content = http.request(self._batch_uri, method='POST', body=body, 1265 headers=headers) 1266 1267 if resp.status >= 300: 1268 raise HttpError(resp, content, uri=self._batch_uri) 1269 1270 # Prepend with a content-type header so FeedParser can handle it. 1271 header = 'content-type: %s\r\n\r\n' % resp['content-type'] 1272 # PY3's FeedParser only accepts unicode. So we should decode content 1273 # here, and encode each payload again. 1274 if six.PY3: 1275 content = content.decode('utf-8') 1276 for_parser = header + content 1277 1278 parser = FeedParser() 1279 parser.feed(for_parser) 1280 mime_response = parser.close() 1281 1282 if not mime_response.is_multipart(): 1283 raise BatchError("Response not in multipart/mixed format.", resp=resp, 1284 content=content) 1285 1286 for part in mime_response.get_payload(): 1287 request_id = self._header_to_id(part['Content-ID']) 1288 response, content = self._deserialize_response(part.get_payload()) 1289 # We encode content here to emulate normal http response. 1290 if isinstance(content, six.text_type): 1291 content = content.encode('utf-8') 1292 self._responses[request_id] = (response, content)
1293 1294 @util.positional(1)
1295 - def execute(self, http=None):
1296 """Execute all the requests as a single batched HTTP request. 1297 1298 Args: 1299 http: httplib2.Http, an http object to be used in place of the one the 1300 HttpRequest request object was constructed with. If one isn't supplied 1301 then use a http object from the requests in this batch. 1302 1303 Returns: 1304 None 1305 1306 Raises: 1307 httplib2.HttpLib2Error if a transport error has occured. 1308 googleapiclient.errors.BatchError if the response is the wrong format. 1309 """ 1310 # If we have no requests return 1311 if len(self._order) == 0: 1312 return None 1313 1314 # If http is not supplied use the first valid one given in the requests. 1315 if http is None: 1316 for request_id in self._order: 1317 request = self._requests[request_id] 1318 if request is not None: 1319 http = request.http 1320 break 1321 1322 if http is None: 1323 raise ValueError("Missing a valid http object.") 1324 1325 self._execute(http, self._order, self._requests) 1326 1327 # Loop over all the requests and check for 401s. For each 401 request the 1328 # credentials should be refreshed and then sent again in a separate batch. 1329 redo_requests = {} 1330 redo_order = [] 1331 1332 for request_id in self._order: 1333 resp, content = self._responses[request_id] 1334 if resp['status'] == '401': 1335 redo_order.append(request_id) 1336 request = self._requests[request_id] 1337 self._refresh_and_apply_credentials(request, http) 1338 redo_requests[request_id] = request 1339 1340 if redo_requests: 1341 self._execute(http, redo_order, redo_requests) 1342 1343 # Now process all callbacks that are erroring, and raise an exception for 1344 # ones that return a non-2xx response? Or add extra parameter to callback 1345 # that contains an HttpError? 1346 1347 for request_id in self._order: 1348 resp, content = self._responses[request_id] 1349 1350 request = self._requests[request_id] 1351 callback = self._callbacks[request_id] 1352 1353 response = None 1354 exception = None 1355 try: 1356 if resp.status >= 300: 1357 raise HttpError(resp, content, uri=request.uri) 1358 response = request.postproc(resp, content) 1359 except HttpError as e: 1360 exception = e 1361 1362 if callback is not None: 1363 callback(request_id, response, exception) 1364 if self._callback is not None: 1365 self._callback(request_id, response, exception)
1366
1367 1368 -class HttpRequestMock(object):
1369 """Mock of HttpRequest. 1370 1371 Do not construct directly, instead use RequestMockBuilder. 1372 """ 1373
1374 - def __init__(self, resp, content, postproc):
1375 """Constructor for HttpRequestMock 1376 1377 Args: 1378 resp: httplib2.Response, the response to emulate coming from the request 1379 content: string, the response body 1380 postproc: callable, the post processing function usually supplied by 1381 the model class. See model.JsonModel.response() as an example. 1382 """ 1383 self.resp = resp 1384 self.content = content 1385 self.postproc = postproc 1386 if resp is None: 1387 self.resp = httplib2.Response({'status': 200, 'reason': 'OK'}) 1388 if 'reason' in self.resp: 1389 self.resp.reason = self.resp['reason']
1390
1391 - def execute(self, http=None):
1392 """Execute the request. 1393 1394 Same behavior as HttpRequest.execute(), but the response is 1395 mocked and not really from an HTTP request/response. 1396 """ 1397 return self.postproc(self.resp, self.content)
1398
1399 1400 -class RequestMockBuilder(object):
1401 """A simple mock of HttpRequest 1402 1403 Pass in a dictionary to the constructor that maps request methodIds to 1404 tuples of (httplib2.Response, content, opt_expected_body) that should be 1405 returned when that method is called. None may also be passed in for the 1406 httplib2.Response, in which case a 200 OK response will be generated. 1407 If an opt_expected_body (str or dict) is provided, it will be compared to 1408 the body and UnexpectedBodyError will be raised on inequality. 1409 1410 Example: 1411 response = '{"data": {"id": "tag:google.c...' 1412 requestBuilder = RequestMockBuilder( 1413 { 1414 'plus.activities.get': (None, response), 1415 } 1416 ) 1417 googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder) 1418 1419 Methods that you do not supply a response for will return a 1420 200 OK with an empty string as the response content or raise an excpetion 1421 if check_unexpected is set to True. The methodId is taken from the rpcName 1422 in the discovery document. 1423 1424 For more details see the project wiki. 1425 """ 1426
1427 - def __init__(self, responses, check_unexpected=False):
1428 """Constructor for RequestMockBuilder 1429 1430 The constructed object should be a callable object 1431 that can replace the class HttpResponse. 1432 1433 responses - A dictionary that maps methodIds into tuples 1434 of (httplib2.Response, content). The methodId 1435 comes from the 'rpcName' field in the discovery 1436 document. 1437 check_unexpected - A boolean setting whether or not UnexpectedMethodError 1438 should be raised on unsupplied method. 1439 """ 1440 self.responses = responses 1441 self.check_unexpected = check_unexpected
1442
1443 - def __call__(self, http, postproc, uri, method='GET', body=None, 1444 headers=None, methodId=None, resumable=None):
1445 """Implements the callable interface that discovery.build() expects 1446 of requestBuilder, which is to build an object compatible with 1447 HttpRequest.execute(). See that method for the description of the 1448 parameters and the expected response. 1449 """ 1450 if methodId in self.responses: 1451 response = self.responses[methodId] 1452 resp, content = response[:2] 1453 if len(response) > 2: 1454 # Test the body against the supplied expected_body. 1455 expected_body = response[2] 1456 if bool(expected_body) != bool(body): 1457 # Not expecting a body and provided one 1458 # or expecting a body and not provided one. 1459 raise UnexpectedBodyError(expected_body, body) 1460 if isinstance(expected_body, str): 1461 expected_body = json.loads(expected_body) 1462 body = json.loads(body) 1463 if body != expected_body: 1464 raise UnexpectedBodyError(expected_body, body) 1465 return HttpRequestMock(resp, content, postproc) 1466 elif self.check_unexpected: 1467 raise UnexpectedMethodError(methodId=methodId) 1468 else: 1469 model = JsonModel(False) 1470 return HttpRequestMock(None, '{}', model.response)
1471
1472 1473 -class HttpMock(object):
1474 """Mock of httplib2.Http""" 1475
1476 - def __init__(self, filename=None, headers=None):
1477 """ 1478 Args: 1479 filename: string, absolute filename to read response from 1480 headers: dict, header to return with response 1481 """ 1482 if headers is None: 1483 headers = {'status': '200'} 1484 if filename: 1485 f = open(filename, 'rb') 1486 self.data = f.read() 1487 f.close() 1488 else: 1489 self.data = None 1490 self.response_headers = headers 1491 self.headers = None 1492 self.uri = None 1493 self.method = None 1494 self.body = None 1495 self.headers = None
1496 1497
1498 - def request(self, uri, 1499 method='GET', 1500 body=None, 1501 headers=None, 1502 redirections=1, 1503 connection_type=None):
1504 self.uri = uri 1505 self.method = method 1506 self.body = body 1507 self.headers = headers 1508 return httplib2.Response(self.response_headers), self.data
1509
1510 1511 -class HttpMockSequence(object):
1512 """Mock of httplib2.Http 1513 1514 Mocks a sequence of calls to request returning different responses for each 1515 call. Create an instance initialized with the desired response headers 1516 and content and then use as if an httplib2.Http instance. 1517 1518 http = HttpMockSequence([ 1519 ({'status': '401'}, ''), 1520 ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), 1521 ({'status': '200'}, 'echo_request_headers'), 1522 ]) 1523 resp, content = http.request("http://examples.com") 1524 1525 There are special values you can pass in for content to trigger 1526 behavours that are helpful in testing. 1527 1528 'echo_request_headers' means return the request headers in the response body 1529 'echo_request_headers_as_json' means return the request headers in 1530 the response body 1531 'echo_request_body' means return the request body in the response body 1532 'echo_request_uri' means return the request uri in the response body 1533 """ 1534
1535 - def __init__(self, iterable):
1536 """ 1537 Args: 1538 iterable: iterable, a sequence of pairs of (headers, body) 1539 """ 1540 self._iterable = iterable 1541 self.follow_redirects = True
1542
1543 - def request(self, uri, 1544 method='GET', 1545 body=None, 1546 headers=None, 1547 redirections=1, 1548 connection_type=None):
1549 resp, content = self._iterable.pop(0) 1550 if content == 'echo_request_headers': 1551 content = headers 1552 elif content == 'echo_request_headers_as_json': 1553 content = json.dumps(headers) 1554 elif content == 'echo_request_body': 1555 if hasattr(body, 'read'): 1556 content = body.read() 1557 else: 1558 content = body 1559 elif content == 'echo_request_uri': 1560 content = uri 1561 if isinstance(content, six.text_type): 1562 content = content.encode('utf-8') 1563 return httplib2.Response(resp), content
1564
1565 1566 -def set_user_agent(http, user_agent):
1567 """Set the user-agent on every request. 1568 1569 Args: 1570 http - An instance of httplib2.Http 1571 or something that acts like it. 1572 user_agent: string, the value for the user-agent header. 1573 1574 Returns: 1575 A modified instance of http that was passed in. 1576 1577 Example: 1578 1579 h = httplib2.Http() 1580 h = set_user_agent(h, "my-app-name/6.0") 1581 1582 Most of the time the user-agent will be set doing auth, this is for the rare 1583 cases where you are accessing an unauthenticated endpoint. 1584 """ 1585 request_orig = http.request 1586 1587 # The closure that will replace 'httplib2.Http.request'. 1588 def new_request(uri, method='GET', body=None, headers=None, 1589 redirections=httplib2.DEFAULT_MAX_REDIRECTS, 1590 connection_type=None): 1591 """Modify the request headers to add the user-agent.""" 1592 if headers is None: 1593 headers = {} 1594 if 'user-agent' in headers: 1595 headers['user-agent'] = user_agent + ' ' + headers['user-agent'] 1596 else: 1597 headers['user-agent'] = user_agent 1598 resp, content = request_orig(uri, method, body, headers, 1599 redirections, connection_type) 1600 return resp, content
1601 1602 http.request = new_request 1603 return http 1604
1605 1606 -def tunnel_patch(http):
1607 """Tunnel PATCH requests over POST. 1608 Args: 1609 http - An instance of httplib2.Http 1610 or something that acts like it. 1611 1612 Returns: 1613 A modified instance of http that was passed in. 1614 1615 Example: 1616 1617 h = httplib2.Http() 1618 h = tunnel_patch(h, "my-app-name/6.0") 1619 1620 Useful if you are running on a platform that doesn't support PATCH. 1621 Apply this last if you are using OAuth 1.0, as changing the method 1622 will result in a different signature. 1623 """ 1624 request_orig = http.request 1625 1626 # The closure that will replace 'httplib2.Http.request'. 1627 def new_request(uri, method='GET', body=None, headers=None, 1628 redirections=httplib2.DEFAULT_MAX_REDIRECTS, 1629 connection_type=None): 1630 """Modify the request headers to add the user-agent.""" 1631 if headers is None: 1632 headers = {} 1633 if method == 'PATCH': 1634 if 'oauth_token' in headers.get('authorization', ''): 1635 logging.warning( 1636 'OAuth 1.0 request made with Credentials after tunnel_patch.') 1637 headers['x-http-method-override'] = "PATCH" 1638 method = 'POST' 1639 resp, content = request_orig(uri, method, body, headers, 1640 redirections, connection_type) 1641 return resp, content
1642 1643 http.request = new_request 1644 return http 1645