2
2
from image_formatter .lexer .token import TokenType
3
3
from image_formatter .lexer .position import Position
4
4
from tests .test_helpers import get_all_tokens
5
- import sys
6
5
import io
7
- import pytest
8
6
from hypothesis import strategies as st
9
7
from hypothesis import given
10
8
9
+ """
10
+ Lexer has some configurations that depend on user input (tag, special_signs, newline_characters and
11
+ additional_path_signs). Most of them have strict list of available symbols. Special_signs doesn't
12
+ have that. The following tests are to check if such freedom doesn't break lexer's logic.
13
+ They are similar to unit tests in test_unit_lexer.py, but we decided to keep both, because one
14
+ might want to execute unit tests without hypothesis tests as they are a bit more time consuming.
15
+ """
16
+
11
17
12
18
def special_sign ():
13
19
chars_to_exclude = ["\n " , "\r " , "@" , "/" , "." , " " , "(" , ")" , "&" ]
@@ -19,7 +25,9 @@ def special_sign_tuples():
19
25
20
26
21
27
@given (special_sign_tuples ())
22
- def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned (special_signs ):
28
+ def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned (
29
+ special_signs ,
30
+ ):
23
31
text = f"@tag1(url1.png)@one{ special_signs [0 ]} more{ special_signs [1 ]} tag&and{ special_signs [2 ]} word"
24
32
fp = io .StringIO (text )
25
33
lexer = Lexer (fp , special_signs = special_signs )
@@ -41,7 +49,9 @@ def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned(speci
41
49
42
50
43
51
@given (special_sign_tuples ())
44
- def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_returned (special_signs ):
52
+ def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_returned (
53
+ special_signs ,
54
+ ):
45
55
text = f"word1& word2 && @tag1{ special_signs [0 ]} tag \n \n @tag2(start{ special_signs [1 ]} of/url.png)"
46
56
expected_types = [
47
57
TokenType .T_LITERAL ,
0 commit comments