Skip to content

Commit e67e94f

Browse files
committed
feat: [lexer]: implemented hypothesis tests fot lexer
1 parent d86704f commit e67e94f

File tree

3 files changed

+12
-34
lines changed

3 files changed

+12
-34
lines changed

image_formatter/lexer/lexer.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,9 @@ def verify_special_signs(cls, signs: Tuple[str]) -> bool:
9999
Raises:
100100
InvalidConfigCharacterError: when invalid character is found
101101
"""
102-
# todo - check (possibly everything might be valid)
102+
invalid_chars = [" ", "(", ")"]
103+
if any([sign in invalid_chars for sign in signs]):
104+
raise InvalidConfigCharacterError("<space>", [])
103105
return True
104106

105107
@classmethod

tests/lexer/test_hypothesis_lexer.py

+7-31
Original file line numberDiff line numberDiff line change
@@ -10,17 +10,17 @@
1010

1111

1212
def special_sign():
13-
chars_to_exclude = ["\n", "\r", "\r\n", "\x0b", "\v", "\f"] + list("@#$%&~>?+=:") + list("-_.~:/?#[]@!$&'()*+,;=%")
13+
chars_to_exclude = ["\n", "\r", "@", "/", ".", " ", "(", ")", "&"]
1414
return st.text(min_size=1, max_size=1).filter(lambda s: all(char not in s for char in chars_to_exclude))
1515

1616

1717
def special_sign_tuples():
18-
return st.tuples(special_sign(), special_sign())
18+
return st.tuples(special_sign(), special_sign(), special_sign())
1919

2020

2121
@given(special_sign_tuples())
2222
def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned(special_signs):
23-
text = f"@tag1(url1.png)@one{special_signs[0]}more{special_signs[1]}tag&and_word"
23+
text = f"@tag1(url1.png)@one{special_signs[0]}more{special_signs[1]}tag&and{special_signs[2]}word"
2424
fp = io.StringIO(text)
2525
lexer = Lexer(fp, special_signs=special_signs)
2626
tokens = get_all_tokens(lexer)
@@ -31,13 +31,6 @@ def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned(speci
3131
TokenType.T_CHAR,
3232
TokenType.T_LITERAL,
3333
]
34-
assert [token.string for token in tokens] == [
35-
"tag1",
36-
"url1.png",
37-
"one-more-tag",
38-
"&",
39-
"and_word",
40-
]
4134
assert [token.position for token in tokens] == [
4235
Position(1, 1),
4336
Position(1, 6),
@@ -47,8 +40,9 @@ def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned(speci
4740
]
4841

4942

50-
def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_returned():
51-
text = "word1, word2 $$ @tag1-tag \n\n @tag2(start-of/url.png)"
43+
@given(special_sign_tuples())
44+
def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_returned(special_signs):
45+
text = f"word1& word2 && @tag1{special_signs[0]}tag \n\n @tag2(start{special_signs[1]}of/url.png)"
5246
expected_types = [
5347
TokenType.T_LITERAL,
5448
TokenType.T_CHAR,
@@ -66,23 +60,6 @@ def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_return
6660
TokenType.T_IMAGE_SIZE_TAG,
6761
TokenType.T_IMAGE_URL,
6862
]
69-
expected_strings = [
70-
"word1",
71-
",",
72-
" ",
73-
"word2",
74-
" ",
75-
"$",
76-
"$",
77-
" ",
78-
"tag1-tag",
79-
" ",
80-
"\n",
81-
"\n",
82-
" ",
83-
"tag2",
84-
"start-of/url.png",
85-
]
8663
expected_positions = [
8764
Position(1, 1),
8865
Position(1, 6),
@@ -101,10 +78,9 @@ def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_return
10178
Position(3, 7),
10279
]
10380
fp = io.StringIO(text)
104-
lexer = Lexer(fp)
81+
lexer = Lexer(fp, special_signs=special_signs)
10582
tokens = get_all_tokens(lexer)
10683
assert len(tokens) == len(expected_types)
10784
assert len(tokens) == len(expected_positions)
10885
assert [token.type for token in tokens] == expected_types
109-
assert [token.string for token in tokens] == expected_strings
11086
assert [token.position for token in tokens] == expected_positions

tests/lexer/test_unit_lexer.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ def test_when_literal_starts_with_digit_then_literal_token_without_starting_digi
230230

231231

232232
def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned():
233-
text = "@tag1(url1.png)@one0more-tag&and_word"
233+
text = "@tag1(url1.png)@one-more-tag&and_word"
234234
fp = io.StringIO(text)
235235
lexer = Lexer(fp)
236236
tokens = get_all_tokens(lexer)
@@ -425,7 +425,7 @@ def test_given_invalid_newline_characters_then_exception_is_raised():
425425
Lexer.verify_newline_characters(invalid_chars)
426426

427427

428-
@pytest.mark.parametrize("chars", [("-", "~", "?"), ("&", "*", ">"), (":", "/", ",")])
428+
@pytest.mark.parametrize("chars", [("-", "~", "?"), ("&", "*"), (":", "/", ",")])
429429
def test_given_valid_additional_path_signs_then_verify_returns_true(chars):
430430
assert Lexer.verify_additional_path_signs(chars) == True
431431

0 commit comments

Comments
 (0)