Skip to content

Commit c5b8ec1

Browse files
author
Mateusz Bronisław Wasilewski
committed
Merge branch 'token-to-string-converter' into 'main'
Create Token to string converter See merge request mwasilew/2023-ZPRP!19
2 parents 26f185b + 2b09311 commit c5b8ec1

File tree

9 files changed

+130
-10
lines changed

9 files changed

+130
-10
lines changed

image_formatter/image_properties_tag_replacer/image_properties_tag_replacer.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,15 @@
22

33
from image_formatter.lexer.lexer import Lexer
44
from image_formatter.lexer.token import TokenType, Token
5+
from image_formatter.lexer.token_stream_processor import TokenStreamProcessor
56
from image_formatter.error_handler.error_handler import ErrorHandler
67
from image_formatter.error_handler.errors import UnexpectedTagException
78
from mkdocs.plugins import get_plugin_logger
89

910
log = get_plugin_logger(__name__)
1011

1112

12-
class ImagePropertiesTagReplacer:
13+
class ImagePropertiesTagReplacer(TokenStreamProcessor):
1314
"""
1415
Class ImagePropertiesTagReplacer responsible for replacing image size tags with properties after the image URL.
1516
Focuses only on the plugin's purpose - images with added size tags

image_formatter/lexer/lexer.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
from image_formatter.lexer.token import Token, TokenType, IntegerToken
1+
from image_formatter.lexer.token import Token, TokenType, IntegerToken, TagToken
22
from image_formatter.lexer.position import Position
3+
from image_formatter.lexer.token_stream_processor import TokenStreamProcessor
34
from image_formatter.error_handler.errors import InvalidConfigCharacterError
45
import io
56
import sys
@@ -10,7 +11,7 @@
1011
log = get_plugin_logger(__name__)
1112

1213

13-
class Lexer:
14+
class Lexer(TokenStreamProcessor):
1415
"""
1516
Class representing Lexer.
1617
Responsible for going through the characters from source input one by one and
@@ -262,11 +263,12 @@ def build_integer(self) -> IntegerToken or None:
262263
def _is_number_in_range(self, number):
263264
return number * 10 + int(self.current_char) <= self.max_int
264265

265-
def build_tag(self) -> Token or None:
266+
def build_tag(self) -> TagToken or None:
266267
"""
267268
Tries to build an image tag token according to:
268269
```
269-
image_size_tag = '@', literal
270+
image_size_tag = tag_character, literal
271+
tag_character by default is '@'
270272
```
271273
272274
Returns:
@@ -284,7 +286,7 @@ def build_tag(self) -> Token or None:
284286
log.info(f"{Lexer.name()}: Failed to build a tag. Missing token 'T_LITERAL'.")
285287
return None
286288
log.info(f"{Lexer.name()}: Tag built successfully. Returning 'T_IMAGE_SIZE_TAG' token.")
287-
return Token(TokenType.T_IMAGE_SIZE_TAG, position, token.string)
289+
return TagToken(TokenType.T_IMAGE_SIZE_TAG, position, token.string, self.tag)
288290

289291
def get_url_ending(self, string: str) -> str or None:
290292
"""

image_formatter/lexer/token.py

+17
Original file line numberDiff line numberDiff line change
@@ -55,3 +55,20 @@ def __init__(self, type: TokenType, position: Position, integer: int):
5555
"""
5656
super(IntegerToken, self).__init__(type, position, str(integer))
5757
self.integer = integer
58+
59+
60+
class TagToken(Token):
61+
"""
62+
Class representing token of type tag.
63+
"""
64+
65+
def __init__(self, type: TokenType, position: Position, string: str = "", tag_character: str = ""):
66+
"""
67+
Args:
68+
type: type of the token
69+
position: position of the first character of the token
70+
string: final version of token's text
71+
tag_character: characteristic character for the tag
72+
"""
73+
super(TagToken, self).__init__(type, position, string)
74+
self.tag_character = tag_character
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
from image_formatter.lexer.token import Token
2+
3+
4+
class TokenStreamProcessor:
5+
"""
6+
Base class for tokens stream processing.
7+
"""
8+
9+
def get_token(self) -> Token:
10+
"""
11+
Returns Token.
12+
"""
13+
raise NotImplementedError

image_formatter/token_to_string_converter/__init__.py

Whitespace-only changes.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
from image_formatter.lexer.token_stream_processor import TokenStreamProcessor
2+
from image_formatter.lexer.token import TagToken
3+
4+
5+
class TokenToStringConverter:
6+
def __init__(self, token_stream_processor: TokenStreamProcessor):
7+
self.token_stream_processor = token_stream_processor
8+
9+
def to_text(self) -> str:
10+
text = ""
11+
for token in self.get_all_tokens():
12+
if type(token) == TagToken:
13+
text += f"{token.tag_character}"
14+
text += token.string
15+
return text
16+
17+
def get_all_tokens(self) -> list:
18+
tokens = []
19+
for t in self.token_stream_processor.get_token():
20+
tokens.append(t)
21+
return tokens

tests/image_properties_tag_replacer/test_integration_image_properties_tag_replacer_lexer.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from image_formatter.lexer.lexer import Lexer
2-
from image_formatter.lexer.token import Token, TokenType
2+
from image_formatter.lexer.token import Token, TokenType, TagToken
33
from image_formatter.lexer.position import Position
44
from image_formatter.image_properties_tag_replacer.image_properties_tag_replacer import ImagePropertiesTagReplacer
55
import io
@@ -30,12 +30,12 @@ def test_given_no_image_links_then_nothing_is_replaced():
3030
Token(TokenType.T_CHAR, Position(1, 14), "$"),
3131
Token(TokenType.T_CHAR, Position(1, 15), "$"),
3232
Token(TokenType.T_WHITE_CHAR, Position(1, 16), " "),
33-
Token(TokenType.T_WHITE_CHAR, Position(1, 17), "tag1-tag"),
33+
TagToken(TokenType.T_IMAGE_SIZE_TAG, Position(1, 17), "tag1-tag"),
3434
Token(TokenType.T_WHITE_CHAR, Position(1, 26), " "),
3535
Token(TokenType.T_WHITE_CHAR, Position(1, 27), "\n"),
3636
Token(TokenType.T_WHITE_CHAR, Position(2, 1), "\n"),
3737
Token(TokenType.T_WHITE_CHAR, Position(3, 1), " "),
38-
Token(TokenType.T_WHITE_CHAR, Position(3, 2), "tag2"),
38+
TagToken(TokenType.T_IMAGE_SIZE_TAG, Position(3, 2), "tag2"),
3939
Token(TokenType.T_WHITE_CHAR, Position(3, 7), " "),
4040
Token(TokenType.T_CHAR, Position(3, 8), "x"),
4141
Token(TokenType.T_WHITE_CHAR, Position(3, 9), " "),
@@ -138,7 +138,7 @@ def test_given_sequence_of_tokens_with_one_valid_image_tag_then_one_image_tag_is
138138
Token(TokenType.T_WHITE_CHAR, Position(1, 26), " "),
139139
Token(TokenType.T_LITERAL, Position(1, 27), "word"),
140140
Token(TokenType.T_WHITE_CHAR, Position(1, 31), " "),
141-
Token(TokenType.T_IMAGE_SIZE_TAG, Position(1, 32), "big"),
141+
TagToken(TokenType.T_IMAGE_SIZE_TAG, Position(1, 32), "big"),
142142
Token(TokenType.T_WHITE_CHAR, Position(1, 36), " "),
143143
Token(TokenType.T_CHAR, Position(1, 37), "*"),
144144
Token(TokenType.T_IMAGE_URL, Position(1, 38), "(next/longer.url.jpg)"),

tests/token_to_string_converter/__init__.py

Whitespace-only changes.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import io
2+
3+
from image_formatter.lexer.lexer import Lexer
4+
from image_formatter.image_properties_tag_replacer.image_properties_tag_replacer import ImagePropertiesTagReplacer
5+
from image_formatter.token_to_string_converter.token_to_string_converter import TokenToStringConverter
6+
7+
# @ TODO inline global var
8+
image_tags_properties = {
9+
"small": {"height": "100px", "width": "100px"},
10+
"small2": {"height": "110px", "width": "110px"},
11+
}
12+
13+
14+
def test_file1_literals():
15+
filename = "./resources/test_files/test1.txt"
16+
with open(filename, "r") as fp:
17+
expected = fp.read()
18+
with open(filename, "r") as fp:
19+
lexer = Lexer(fp) # noqa
20+
image_tag_replacer = ImagePropertiesTagReplacer(lexer, image_tags_properties)
21+
converter = TokenToStringConverter(image_tag_replacer)
22+
result = converter.to_text()
23+
assert expected == result
24+
25+
26+
def test_file2_mock():
27+
text = """
28+
1hello1 &&@small2
29+
@small2(some/url.com)+word
30+
31+
"""
32+
expected = """
33+
1hello1 &&@small2
34+
(some/url.com){: style="height:110px;width:110px"}+word
35+
36+
"""
37+
fp = io.StringIO(text)
38+
lexer = Lexer(fp) # noqa
39+
image_tag_replacer = ImagePropertiesTagReplacer(lexer, image_tags_properties)
40+
converter = TokenToStringConverter(image_tag_replacer)
41+
result = converter.to_text()
42+
assert expected == result
43+
44+
45+
def test_file3_classic_macos_newline():
46+
filename = "./resources/test_files/test3_classic_macos_newline.txt"
47+
with open(filename, "r") as fp:
48+
expected = fp.read()
49+
with open(filename, "r") as fp:
50+
lexer = Lexer(fp) # noqa
51+
image_tag_replacer = ImagePropertiesTagReplacer(lexer, image_tags_properties)
52+
converter = TokenToStringConverter(image_tag_replacer)
53+
result = converter.to_text()
54+
assert expected == result
55+
56+
57+
def test_file4_unix_and_macos_newline():
58+
filename = "./resources/test_files/test4_unix_and_macos_newline.txt"
59+
with open(filename, "r") as fp:
60+
expected = fp.read()
61+
with open(filename, "r") as fp:
62+
lexer = Lexer(fp) # noqa
63+
image_tag_replacer = ImagePropertiesTagReplacer(lexer, image_tags_properties)
64+
converter = TokenToStringConverter(image_tag_replacer)
65+
result = converter.to_text()
66+
assert expected == result

0 commit comments

Comments
 (0)