Skip to content

Commit 769aae4

Browse files
Fix broken TLS interception & CacheResponsesPlugin because UID is no longer a UUID (#866)
* Fix broken TLS interception because uid is now no longer a UUID * Give enough context to work id for them to be unique within a `proxy.py` instance * Use --port=0 by default within `proxy.TestCase` * Attempt to fix weird buildx issue * Add makefile targets within workflow * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Order? * Write scm file for make * Fetch depth * Quote patch * Try with sudo? * docker/buildx#850 * Remove sudo hack * https://github.com/docker/buildx/issues/850\#issuecomment-973270625 * Add explicit deps * Add `requirements-testing.txt` during linting phase * Pin buildx to v0.7.1 * Pin buildx to v0.7.0 * Revert back unnecessary change to dockerignore * Skip container within make workflow (because GHA lacks support for docker on macOS by default) * Repurpose make into developer workflow Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 0d12873 commit 769aae4

21 files changed

+131
-54
lines changed

.github/workflows/test-library.yml

+36-5
Original file line numberDiff line numberDiff line change
@@ -631,6 +631,34 @@ jobs:
631631
npm run build
632632
cd ..
633633
634+
developer:
635+
runs-on: ${{ matrix.os }}-latest
636+
name: Developer setup ${{ matrix.node }} @ ${{ matrix.os }}
637+
strategy:
638+
matrix:
639+
os: [ubuntu, macOS]
640+
python: ['3.10']
641+
fail-fast: false
642+
steps:
643+
- uses: actions/checkout@v2
644+
with:
645+
fetch-depth: 0
646+
- name: Setup Python
647+
uses: actions/setup-python@v2
648+
with:
649+
python-version: ${{ matrix.python }}
650+
- name: Install Pip Dependencies
651+
run: |
652+
make lib-dep
653+
- name: Run essentials
654+
run: |
655+
./write-scm-version.sh
656+
python3 check.py
657+
make https-certificates
658+
make sign-https-certificates
659+
make ca-certificates
660+
python3 -m proxy --version
661+
634662
docker:
635663
# TODO: To build our docker container, we must wait for check,
636664
# so that we can use the same distribution available.
@@ -658,18 +686,20 @@ jobs:
658686
steps:
659687
- name: Checkout
660688
uses: actions/checkout@v2
689+
- name: Download all the dists
690+
uses: actions/download-artifact@v2
691+
with:
692+
name: python-package-distributions
693+
path: dist/
661694
- name: Set up Docker Buildx
662695
id: buildx
663696
uses: docker/setup-buildx-action@v1
664697
with:
698+
# FIXME: See https://github.com/docker/buildx/issues/850#issuecomment-996408167
699+
version: v0.7.0
665700
buildkitd-flags: --debug
666701
config: .github/buildkitd.toml
667702
install: true
668-
- name: Download all the dists
669-
uses: actions/download-artifact@v2
670-
with:
671-
name: python-package-distributions
672-
path: dist/
673703
- name: Enable Multiarch # This slows down arm build by 4-5x
674704
run: |
675705
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
@@ -699,6 +729,7 @@ jobs:
699729
- docker
700730
- dashboard
701731
- brew
732+
- developer
702733

703734
runs-on: Ubuntu-latest
704735

Makefile

+7-7
Original file line numberDiff line numberDiff line change
@@ -126,11 +126,11 @@ lib-release: lib-package
126126

127127
lib-doc:
128128
python -m tox -e build-docs && \
129-
$(OPEN) .tox/build-docs/docs_out/index.html
129+
$(OPEN) .tox/build-docs/docs_out/index.html || true
130130

131131
lib-coverage:
132132
pytest --cov=proxy --cov=tests --cov-report=html tests/ && \
133-
$(OPEN) htmlcov/index.html
133+
$(OPEN) htmlcov/index.html || true
134134

135135
lib-profile:
136136
ulimit -n 65536 && \
@@ -177,6 +177,11 @@ dashboard-clean:
177177
container: lib-package
178178
$(MAKE) container-build -e PROXYPY_PKG_PATH=$$(ls dist/*.whl)
179179

180+
container-build:
181+
docker build \
182+
-t $(PROXYPY_CONTAINER_TAG) \
183+
--build-arg PROXYPY_PKG_PATH=$(PROXYPY_PKG_PATH) .
184+
180185
# Usage:
181186
#
182187
# make container-buildx \
@@ -189,10 +194,5 @@ container-buildx:
189194
-t $(PROXYPY_CONTAINER_TAG) \
190195
--build-arg PROXYPY_PKG_PATH=$(PROXYPY_PKG_PATH) .
191196

192-
container-build:
193-
docker build \
194-
-t $(PROXYPY_CONTAINER_TAG) \
195-
--build-arg PROXYPY_PKG_PATH=$(PROXYPY_PKG_PATH) .
196-
197197
container-run:
198198
docker run -it -p 8899:8899 --rm $(PROXYPY_CONTAINER_TAG)

README.md

+4-4
Original file line numberDiff line numberDiff line change
@@ -1322,10 +1322,10 @@ import proxy
13221322

13231323
if __name__ == '__main__':
13241324
with proxy.Proxy([]) as p:
1325-
print(p.acceptors.flags.port)
1325+
print(p.flags.port)
13261326
```
13271327

1328-
`acceptors.flags.port` will give you access to the random port allocated by the kernel.
1328+
`flags.port` will give you access to the random port allocated by the kernel.
13291329

13301330
## Loading Plugins
13311331

@@ -1384,7 +1384,7 @@ Note that:
13841384

13851385
1. `proxy.TestCase` overrides `unittest.TestCase.run()` method to setup and tear down `proxy.py`.
13861386
2. `proxy.py` server will listen on a random available port on the system.
1387-
This random port is available as `self.PROXY.acceptors.flags.port` within your test cases.
1387+
This random port is available as `self.PROXY.flags.port` within your test cases.
13881388
3. Only a single acceptor and worker is started by default (`--num-workers 1 --num-acceptors 1`) for faster setup and tear down.
13891389
4. Most importantly, `proxy.TestCase` also ensures `proxy.py` server
13901390
is up and running before proceeding with execution of tests. By default,
@@ -2073,7 +2073,7 @@ usage: -m [-h] [--enable-events] [--enable-conn-pool] [--threadless]
20732073
[--filtered-url-regex-config FILTERED_URL_REGEX_CONFIG]
20742074
[--cloudflare-dns-mode CLOUDFLARE_DNS_MODE]
20752075

2076-
proxy.py v2.3.2.dev190+ge60d80d.d20211124
2076+
proxy.py v2.4.0rc2.dev21+g20b3eb1.d20211203
20772077

20782078
options:
20792079
-h, --help show this help message and exit

check.py

+4-3
Original file line numberDiff line numberDiff line change
@@ -60,11 +60,12 @@
6060
with open('README.md', 'rb+') as f:
6161
c = f.read()
6262
pre_flags, post_flags = c.split(b'# Flags')
63-
help_text, post_changelog = post_flags.split(b'# Changelog')
6463
f.seek(0)
6564
f.write(
66-
pre_flags + b'# Flags\n\n```console\n\xe2\x9d\xaf proxy -h\n' + lib_help + b'```' +
67-
b'\n\n# Changelog' + post_changelog,
65+
pre_flags +
66+
b'# Flags\n\n```console\n\xe2\x9d\xaf proxy -h\n' +
67+
lib_help +
68+
b'```\n',
6869
)
6970

7071
# Version is also hardcoded in README.md flags section

proxy/core/acceptor/acceptor.py

+1
Original file line numberDiff line numberDiff line change
@@ -191,6 +191,7 @@ def _start_local(self) -> None:
191191
assert self.sock
192192
self._local_work_queue = NonBlockingQueue()
193193
self._local = LocalExecutor(
194+
iid=self.idd,
194195
work_queue=self._local_work_queue,
195196
flags=self.flags,
196197
event_queue=self.event_queue,

proxy/core/acceptor/executors.py

+1
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,7 @@ def _start_worker(self, index: int) -> None:
175175
pipe = multiprocessing.Pipe()
176176
self.work_queues.append(pipe[0])
177177
w = RemoteExecutor(
178+
iid=index,
178179
work_queue=pipe[1],
179180
flags=self.flags,
180181
event_queue=self.event_queue,

proxy/core/acceptor/threadless.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -59,11 +59,13 @@ class Threadless(ABC, Generic[T]):
5959

6060
def __init__(
6161
self,
62+
iid: str,
6263
work_queue: T,
6364
flags: argparse.Namespace,
6465
event_queue: Optional[EventQueue] = None,
6566
) -> None:
6667
super().__init__()
68+
self.iid = iid
6769
self.work_queue = work_queue
6870
self.flags = flags
6971
self.event_queue = event_queue
@@ -84,6 +86,7 @@ def __init__(
8486
] = {}
8587
self.wait_timeout: float = DEFAULT_WAIT_FOR_TASKS_TIMEOUT
8688
self.cleanup_inactive_timeout: float = DEFAULT_INACTIVE_CONN_CLEANUP_TIMEOUT
89+
self._total: int = 0
8790

8891
@property
8992
@abstractmethod
@@ -122,14 +125,15 @@ def work_on_tcp_conn(
122125
fileno, family=socket.AF_INET if self.flags.hostname.version == 4 else socket.AF_INET6,
123126
type=socket.SOCK_STREAM,
124127
)
128+
uid = '%s-%s-%s' % (self.iid, self._total, fileno)
125129
self.works[fileno] = self.flags.work_klass(
126130
TcpClientConnection(
127131
conn=conn,
128132
addr=addr,
129133
),
130134
flags=self.flags,
131135
event_queue=self.event_queue,
132-
uid=fileno,
136+
uid=uid,
133137
)
134138
self.works[fileno].publish_event(
135139
event_name=eventNames.WORK_STARTED,
@@ -138,6 +142,7 @@ def work_on_tcp_conn(
138142
)
139143
try:
140144
self.works[fileno].initialize()
145+
self._total += 1
141146
except Exception as e:
142147
logger.exception(
143148
'Exception occurred during initialization',

proxy/core/acceptor/work.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import argparse
1616

1717
from abc import ABC, abstractmethod
18-
from uuid import uuid4, UUID
18+
from uuid import uuid4
1919
from typing import Optional, Dict, Any
2020

2121
from ..event import eventNames, EventQueue
@@ -31,10 +31,10 @@ def __init__(
3131
work: TcpClientConnection,
3232
flags: argparse.Namespace,
3333
event_queue: Optional[EventQueue] = None,
34-
uid: Optional[UUID] = None,
34+
uid: Optional[str] = None,
3535
) -> None:
3636
# Work uuid
37-
self.uid: UUID = uid if uid is not None else uuid4()
37+
self.uid: str = uid if uid is not None else uuid4().hex
3838
self.flags = flags
3939
# Eventing core queue
4040
self.event_queue = event_queue
@@ -92,7 +92,7 @@ def publish_event(
9292
return
9393
assert self.event_queue
9494
self.event_queue.publish(
95-
self.uid.hex,
95+
self.uid,
9696
event_name,
9797
event_payload,
9898
publisher_id,

proxy/http/plugin.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
import socket
1212
import argparse
1313

14-
from uuid import UUID
1514
from abc import ABC, abstractmethod
1615
from typing import Tuple, List, Union, Optional
1716

@@ -46,13 +45,13 @@ class HttpProtocolHandlerPlugin(ABC):
4645

4746
def __init__(
4847
self,
49-
uid: UUID,
48+
uid: str,
5049
flags: argparse.Namespace,
5150
client: TcpClientConnection,
5251
request: HttpParser,
5352
event_queue: EventQueue,
5453
):
55-
self.uid: UUID = uid
54+
self.uid: str = uid
5655
self.flags: argparse.Namespace = flags
5756
self.client: TcpClientConnection = client
5857
self.request: HttpParser = request

proxy/http/proxy/plugin.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
import argparse
1212

1313
from abc import ABC
14-
from uuid import UUID
1514
from typing import Any, Dict, List, Optional, Tuple
1615

1716
from ..parser import HttpParser
@@ -28,7 +27,7 @@ class HttpProxyBasePlugin(ABC):
2827

2928
def __init__(
3029
self,
31-
uid: UUID,
30+
uid: str,
3231
flags: argparse.Namespace,
3332
client: TcpClientConnection,
3433
event_queue: EventQueue,

proxy/http/proxy/server.py

+7-10
Original file line numberDiff line numberDiff line change
@@ -307,11 +307,8 @@ async def read_from_descriptors(self, r: Readables) -> bool:
307307
# parse incoming response packet
308308
# only for non-https requests and when
309309
# tls interception is enabled
310-
if not self.request.is_https_tunnel:
311-
# See https://github.com/abhinavsingh/proxy.py/issues/127 for why
312-
# currently response parsing is disabled when TLS interception is enabled.
313-
#
314-
# or self.tls_interception_enabled():
310+
if not self.request.is_https_tunnel \
311+
or self.tls_interception_enabled():
315312
if self.response.is_complete:
316313
self.handle_pipeline_response(raw)
317314
else:
@@ -733,7 +730,7 @@ def gen_ca_signed_certificate(
733730
ca_key_path = self.flags.ca_key_file
734731
ca_key_password = ''
735732
ca_crt_path = self.flags.ca_cert_file
736-
serial = self.uid.int
733+
serial = self.uid
737734

738735
# Sign generated CSR
739736
if not os.path.isfile(cert_file_path):
@@ -903,7 +900,7 @@ def emit_request_complete(self) -> None:
903900
return
904901
assert self.request.port
905902
self.event_queue.publish(
906-
request_id=self.uid.hex,
903+
request_id=self.uid,
907904
event_name=eventNames.REQUEST_COMPLETE,
908905
event_payload={
909906
'url': text_(self.request.path)
@@ -937,7 +934,7 @@ def emit_response_headers_complete(self) -> None:
937934
if not self.flags.enable_events:
938935
return
939936
self.event_queue.publish(
940-
request_id=self.uid.hex,
937+
request_id=self.uid,
941938
event_name=eventNames.RESPONSE_HEADERS_COMPLETE,
942939
event_payload={
943940
'headers': {}
@@ -954,7 +951,7 @@ def emit_response_chunk_received(self, chunk_size: int) -> None:
954951
if not self.flags.enable_events:
955952
return
956953
self.event_queue.publish(
957-
request_id=self.uid.hex,
954+
request_id=self.uid,
958955
event_name=eventNames.RESPONSE_CHUNK_RECEIVED,
959956
event_payload={
960957
'chunk_size': chunk_size,
@@ -967,7 +964,7 @@ def emit_response_complete(self) -> None:
967964
if not self.flags.enable_events:
968965
return
969966
self.event_queue.publish(
970-
request_id=self.uid.hex,
967+
request_id=self.uid,
971968
event_name=eventNames.RESPONSE_COMPLETE,
972969
event_payload={
973970
'encoded_response_size': self.response.total_size,

proxy/http/server/plugin.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
"""
1111
import argparse
1212

13-
from uuid import UUID
1413
from abc import ABC, abstractmethod
1514
from typing import Any, Dict, List, Optional, Tuple
1615

@@ -27,7 +26,7 @@ class HttpWebServerBasePlugin(ABC):
2726

2827
def __init__(
2928
self,
30-
uid: UUID,
29+
uid: str,
3130
flags: argparse.Namespace,
3231
client: TcpClientConnection,
3332
event_queue: EventQueue,

proxy/plugin/cache/store/base.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,13 @@
1010
"""
1111
from abc import ABC, abstractmethod
1212
from typing import Optional
13-
from uuid import UUID
13+
1414
from ....http.parser import HttpParser
1515

1616

1717
class CacheStore(ABC):
1818

19-
def __init__(self, uid: UUID) -> None:
19+
def __init__(self, uid: str) -> None:
2020
self.uid = uid
2121

2222
@abstractmethod

0 commit comments

Comments
 (0)