Skip to content

Commit

Permalink
fix: fix multiple / detection in urls
Browse files Browse the repository at this point in the history
Switch to regex instead of replace to handle `///`
  • Loading branch information
alandtse committed Nov 25, 2023
1 parent ea4447b commit 2352e22
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 8 deletions.
12 changes: 7 additions & 5 deletions authcaptureproxy/auth_capture_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,16 +325,16 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
elif json_data:
self.data.update(json_data)
_LOGGER.debug("Storing json %s", json_data)
if URL(str(request.url)).path == self._proxy_url.with_path(
f"{self._proxy_url.path}/stop"
).path.replace("//", "/"):
if URL(str(request.url)).path == re.sub(
r"/+", "/", self._proxy_url.with_path(f"{self._proxy_url.path}/stop").path
):
self.all_handler_active = False
if self.active:
asyncio.create_task(self.stop_proxy(3))
return web.Response(text="Proxy stopped.")
elif (
URL(str(request.url)).path
== self._proxy_url.with_path(f"{self._proxy_url.path}/resume").path.replace("//", "/")
== re.sub(r"/+", "/", self._proxy_url.with_path(f"{self._proxy_url.path}/resume").path)
and self.last_resp
and isinstance(self.last_resp, httpx.Response)
):
Expand All @@ -344,7 +344,9 @@ async def _process_multipart(reader: MultipartReader, writer: MultipartWriter) -
else:
if URL(str(request.url)).path in [
self._proxy_url.path,
self._proxy_url.with_path(f"{self._proxy_url.path}/resume").path.replace("//", "/"),
re.sub(
r"/+", "/", self._proxy_url.with_path(f"{self._proxy_url.path}/resume").path
),
]:
# either base path or resume without anything to resume
site = str(URL(self._host_url))
Expand Down
7 changes: 4 additions & 3 deletions authcaptureproxy/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import ast
import json
import logging
import re
from asyncio import iscoroutinefunction
from http.cookies import SimpleCookie
from typing import Any, Callable, Dict, List, Mapping, Text, Union
Expand Down Expand Up @@ -169,13 +170,13 @@ def swap_url(
if ignore_query:
result = URL(url_string.replace(old_url_string, new_url_string))
# clean up any // in path
return result.with_path(result.path.replace("//", "/")).with_query(old_query)
return result.with_path(re.sub(r"/+", "/", result.path)).with_query(old_query)
new_query = {}
for key, value in old_query.items():
if value:
new_query[key] = value.replace(old_url_string, new_url_string)
result = URL(url_string.replace(old_url_string, new_url_string))
return result.with_path(result.path.replace("//", "/")).update_query(new_query)
return result.with_path(re.sub(r"/+", "/", result.path)).update_query(new_query)


def prepend_url(base_url: URL, url: URL, encoded: bool = False) -> URL:
Expand All @@ -194,7 +195,7 @@ def prepend_url(base_url: URL, url: URL, encoded: bool = False) -> URL:
query = url.query
path = url.path
return base_url.with_path(
f"{base_url.path}{path}".replace("//", "/"), encoded=encoded
re.sub(r"/+", "/", f"{base_url.path}{path}"), encoded=encoded
).with_query(query)
return url

Expand Down

0 comments on commit 2352e22

Please sign in to comment.