Skip to content

Commit

Permalink
test_url_query: Add tests for double-decoding of query values
Browse files Browse the repository at this point in the history
Add test for the problem we're trying to solve in encoding and decoding
query values. (aio-libsGH-210)

Also drop FIXME for the case of `a` vs `a=` in query string, as we don't
have plans to differentiate beetween them for the time being.
  • Loading branch information
besfahbod committed Aug 15, 2018
1 parent f128d31 commit de773e9
Showing 1 changed file with 58 additions and 9 deletions.
67 changes: 58 additions & 9 deletions tests/test_url_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
# ========================================

URLS_WITH_BASIC_QUERY_VALUES = [

# Empty strings, keys and values
(
URL('http://example.com'),
MultiDict(),
Expand All @@ -26,8 +24,9 @@
URL('http://example.com?a='),
MultiDict([('a', '')]),
),
] # type: List[Tuple[URL, MultiDict]]

# ASCII chars
URLS_WITH_ASCII_QUERY_VALUES = [
(
# TODO: Double check if key is expected as `a b` or `a+b`.
URL('http://example.com?a+b=c+d'),
Expand All @@ -41,8 +40,10 @@
URL('http://example.com?a=1&b=2&a=3'),
MultiDict([('a', '1'), ('b', '2'), ('a', '3')]),
),
] # type: List[Tuple[URL, MultiDict]]

# Non-ASCI BMP chars
URLS_WITH_NON_ASCII_QUERY_VALUES = [
# BMP chars
(
URL('http://example.com?ΠΊΠ»ΡŽΡ‡=Π·Π½Π°Ρ‡'),
MultiDict({'ΠΊΠ»ΡŽΡ‡': 'Π·Π½Π°Ρ‡'}),
Expand All @@ -62,7 +63,9 @@

@pytest.mark.parametrize(
'original_url, expected_query',
URLS_WITH_BASIC_QUERY_VALUES,
URLS_WITH_BASIC_QUERY_VALUES
+ URLS_WITH_ASCII_QUERY_VALUES
+ URLS_WITH_NON_ASCII_QUERY_VALUES,
)
def test_query_basic_parsing(original_url, expected_query):
assert isinstance(original_url.query, MultiDictProxy)
Expand All @@ -71,13 +74,11 @@ def test_query_basic_parsing(original_url, expected_query):

@pytest.mark.parametrize(
'original_url, expected_query',
URLS_WITH_BASIC_QUERY_VALUES,
URLS_WITH_ASCII_QUERY_VALUES + URLS_WITH_NON_ASCII_QUERY_VALUES,
)
def test_query_basic_update_query(original_url, expected_query):
new_url = original_url.update_query({})
# FIXME: `?a` becomes `?a=` right now. Maybe support `None` values?
# assert new_url == original_url
assert new_url is not None
assert new_url == original_url


def test_query_dont_unqoute_twice():
Expand Down Expand Up @@ -167,3 +168,51 @@ def test_query_from_empty_update_query(

# FIXME: Broken because of asymmetric query encoding
# assert new_url == original_url


# ========================================
# Setting and getting query values
# ========================================

def test_query_set_encoded_url_as_value():
original_url = URL('http://example.com')

new_url_1 = original_url.update_query({'foo': 'π•¦π•Ÿπ•š'})
assert 'π•¦π•Ÿπ•š' == new_url_1.query['foo']

new_url_1_str = str(new_url_1)
assert (
'http://example.com/?foo=%F0%9D%95%A6%F0%9D%95%9F%F0%9D%95%9A'
) == new_url_1_str

new_url_2 = original_url.update_query({'bar': new_url_1_str})
# FIXME: Double-decoding query value
# assert new_url_1_str == new_url_2.query['bar']
assert 'http://example.com/?foo=π•¦π•Ÿπ•š' == new_url_2.query['bar']

new_url_2_str = str(new_url_2)
# FIXME: Double-decoding query value
# assert (
# 'http://example.com/?bar=http%3A//example.com/%3Ffoo%3D'
# '%25F0%259D%2595%25A6%25F0%259D%2595%259F%25F0%259D%2595%259A'
# ) == new_url_2_str
assert (
'http://example.com/?bar=http://example.com/?foo%3D'
'%F0%9D%95%A6%F0%9D%95%9F%F0%9D%95%9A'
) == new_url_2_str

new_url_3 = original_url.with_query({'bar': new_url_1_str})
# FIXME: Double-decoding query value
# assert new_url_1_str == new_url_3.query['bar']
assert 'http://example.com/?foo=π•¦π•Ÿπ•š' == new_url_3.query['bar']

new_url_3_str = str(new_url_3)
# FIXME: Double-decoding query value
# assert (
# 'http://example.com/?bar=http%3A//example.com/%3Ffoo%3D'
# '%25F0%259D%2595%25A6%25F0%259D%2595%259F%25F0%259D%2595%259A'
# ) == new_url_3_str
assert (
'http://example.com/?bar=http://example.com/?foo%3D'
'%F0%9D%95%A6%F0%9D%95%9F%F0%9D%95%9A'
) == new_url_3_str

0 comments on commit de773e9

Please sign in to comment.