Skip to content

Commit 72491e0

Browse files
committed
feat: [tests]: Added comment to hypothesis tests
1 parent 420e0a1 commit 72491e0

File tree

2 files changed

+17
-7
lines changed

2 files changed

+17
-7
lines changed

image_formatter/image_properties_tag_replacer/image_properties_tag_replacer.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ def __init__(self, lex: Lexer, image_tags_properties: dict, error_handler: Error
2727
self.image_tags_properties = image_tags_properties
2828
self.error_handler = error_handler
2929

30-
@staticmethod
31-
def name() -> str:
32-
return __class__.__name__
30+
@classmethod
31+
def name(cls) -> str:
32+
return cls.__name__
3333

3434
def next_token(self):
3535
self.curr_token = self.lexer.get_token()

tests/lexer/test_hypothesis_lexer.py

+14-4
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,18 @@
22
from image_formatter.lexer.token import TokenType
33
from image_formatter.lexer.position import Position
44
from tests.test_helpers import get_all_tokens
5-
import sys
65
import io
7-
import pytest
86
from hypothesis import strategies as st
97
from hypothesis import given
108

9+
"""
10+
Lexer has some configurations that depend on user input (tag, special_signs, newline_characters and
11+
additional_path_signs). Most of them have strict list of available symbols. Special_signs doesn't
12+
have that. The following tests are to check if such freedom doesn't break lexer's logic.
13+
They are similar to unit tests in test_unit_lexer.py, but we decided to keep both, because one
14+
might want to execute unit tests without hypothesis tests as they are a bit more time consuming.
15+
"""
16+
1117

1218
def special_sign():
1319
chars_to_exclude = ["\n", "\r", "@", "/", ".", " ", "(", ")", "&"]
@@ -19,7 +25,9 @@ def special_sign_tuples():
1925

2026

2127
@given(special_sign_tuples())
22-
def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned(special_signs):
28+
def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned(
29+
special_signs,
30+
):
2331
text = f"@tag1(url1.png)@one{special_signs[0]}more{special_signs[1]}tag&and{special_signs[2]}word"
2432
fp = io.StringIO(text)
2533
lexer = Lexer(fp, special_signs=special_signs)
@@ -41,7 +49,9 @@ def test_given_text_when_tags_not_separated_by_spaces_then_tokens_returned(speci
4149

4250

4351
@given(special_sign_tuples())
44-
def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_returned(special_signs):
52+
def test_given_complex_text_with_special_chars_then_sequence_of_tokens_is_returned(
53+
special_signs,
54+
):
4555
text = f"word1& word2 && @tag1{special_signs[0]}tag \n\n @tag2(start{special_signs[1]}of/url.png)"
4656
expected_types = [
4757
TokenType.T_LITERAL,

0 commit comments

Comments
 (0)