Skip to content

Commit 87b5a4a

Browse files
authored
Merge pull request #2679 from ploxiln/curl_no_decompress
curl_httpclient: fix disabled decompress_response
2 parents fbd1a8f + 1bd626b commit 87b5a4a

File tree

3 files changed

+19
-18
lines changed

3 files changed

+19
-18
lines changed

tornado/curl_httpclient.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -383,7 +383,7 @@ def write_function(b: Union[bytes, bytearray]) -> int:
383383
if request.decompress_response:
384384
curl.setopt(pycurl.ENCODING, "gzip,deflate")
385385
else:
386-
curl.setopt(pycurl.ENCODING, "none")
386+
curl.setopt(pycurl.ENCODING, None)
387387
if request.proxy_host and request.proxy_port:
388388
curl.setopt(pycurl.PROXY, request.proxy_host)
389389
curl.setopt(pycurl.PROXYPORT, request.proxy_port)

tornado/test/httpclient_test.py

+18
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import binascii
44
from contextlib import closing
55
import copy
6+
import gzip
67
import threading
78
import datetime
89
from io import BytesIO
@@ -396,6 +397,23 @@ def test_types(self):
396397
self.assertEqual(type(response.code), int)
397398
self.assertEqual(type(response.effective_url), str)
398399

400+
def test_gzip(self):
401+
# All the tests in this file should be using gzip, but this test
402+
# ensures that it is in fact getting compressed, and also tests
403+
# the httpclient's decompress=False option.
404+
# Setting Accept-Encoding manually bypasses the client's
405+
# decompression so we can see the raw data.
406+
response = self.fetch(
407+
"/chunk", decompress_response=False, headers={"Accept-Encoding": "gzip"}
408+
)
409+
self.assertEqual(response.headers["Content-Encoding"], "gzip")
410+
self.assertNotEqual(response.body, b"asdfqwer")
411+
# Our test data gets bigger when gzipped. Oops. :)
412+
# Chunked encoding bypasses the MIN_LENGTH check.
413+
self.assertEqual(len(response.body), 34)
414+
f = gzip.GzipFile(mode="r", fileobj=response.buffer)
415+
self.assertEqual(f.read(), b"asdfqwer")
416+
399417
def test_header_callback(self):
400418
first_line = []
401419
headers = {}

tornado/test/simple_httpclient_test.py

-17
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import collections
22
from contextlib import closing
33
import errno
4-
import gzip
54
import logging
65
import os
76
import re
@@ -226,22 +225,6 @@ def test_redirect_connection_limit(self):
226225
response = yield client.fetch(self.get_url("/countdown/3"), max_redirects=3)
227226
response.rethrow()
228227

229-
def test_gzip(self):
230-
# All the tests in this file should be using gzip, but this test
231-
# ensures that it is in fact getting compressed.
232-
# Setting Accept-Encoding manually bypasses the client's
233-
# decompression so we can see the raw data.
234-
response = self.fetch(
235-
"/chunk", use_gzip=False, headers={"Accept-Encoding": "gzip"}
236-
)
237-
self.assertEqual(response.headers["Content-Encoding"], "gzip")
238-
self.assertNotEqual(response.body, b"asdfqwer")
239-
# Our test data gets bigger when gzipped. Oops. :)
240-
# Chunked encoding bypasses the MIN_LENGTH check.
241-
self.assertEqual(len(response.body), 34)
242-
f = gzip.GzipFile(mode="r", fileobj=response.buffer)
243-
self.assertEqual(f.read(), b"asdfqwer")
244-
245228
def test_max_redirects(self):
246229
response = self.fetch("/countdown/5", max_redirects=3)
247230
self.assertEqual(302, response.code)

0 commit comments

Comments
 (0)