Skip to content

Commit b126ba5

Browse files
committed
Fix tokenization of order keywords (fixes #532).
1 parent 0c4902f commit b126ba5

File tree

3 files changed

+13
-8
lines changed

3 files changed

+13
-8
lines changed

CHANGELOG

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,11 @@ Bug Fixes
1313
Note: In some cases you might want to add `strip_whitespace=True` where you
1414
previously used just `strip_comments=True`. `strip_comments` did some of the
1515
work that `strip_whitespace` should do.
16-
1716
* Fix error when splitting statements that contain multiple CASE clauses
1817
within a BEGIN block (issue784).
19-
2018
* Fix whitespace removal with nested expressions (issue782).
19+
* Fix parsing and formatting of ORDER clauses containing NULLS FIRST or
20+
NULLS LAST (issue532).
2121

2222

2323
Release 0.5.0 (Apr 13, 2024)

sqlparse/keywords.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,9 @@
7171
r'|(CROSS\s+|NATURAL\s+)?)?JOIN\b', tokens.Keyword),
7272
(r'END(\s+IF|\s+LOOP|\s+WHILE)?\b', tokens.Keyword),
7373
(r'NOT\s+NULL\b', tokens.Keyword),
74-
(r'NULLS\s+(FIRST|LAST)\b', tokens.Keyword),
74+
(r'(ASC|DESC)(\s+NULLS\s+(FIRST|LAST))?\b', tokens.Keyword.Order),
75+
(r'(ASC|DESC)\b', tokens.Keyword.Order),
76+
(r'NULLS\s+(FIRST|LAST)\b', tokens.Keyword.Order),
7577
(r'UNION\s+ALL\b', tokens.Keyword),
7678
(r'CREATE(\s+OR\s+REPLACE)?\b', tokens.Keyword.DDL),
7779
(r'DOUBLE\s+PRECISION\b', tokens.Name.Builtin),
@@ -114,7 +116,6 @@
114116
'ANY': tokens.Keyword,
115117
'ARRAYLEN': tokens.Keyword,
116118
'ARE': tokens.Keyword,
117-
'ASC': tokens.Keyword.Order,
118119
'ASENSITIVE': tokens.Keyword,
119120
'ASSERTION': tokens.Keyword,
120121
'ASSIGNMENT': tokens.Keyword,
@@ -227,7 +228,6 @@
227228
'DELIMITER': tokens.Keyword,
228229
'DELIMITERS': tokens.Keyword,
229230
'DEREF': tokens.Keyword,
230-
'DESC': tokens.Keyword.Order,
231231
'DESCRIBE': tokens.Keyword,
232232
'DESCRIPTOR': tokens.Keyword,
233233
'DESTROY': tokens.Keyword,

tests/test_tokenize.py

+8-3
Original file line numberDiff line numberDiff line change
@@ -169,11 +169,16 @@ def test_parse_endifloop(s):
169169
assert p.tokens[0].ttype is T.Keyword
170170

171171

172-
@pytest.mark.parametrize('s', ['NULLS FIRST', 'NULLS LAST'])
173-
def test_parse_nulls(s): # issue487
172+
@pytest.mark.parametrize('s', [
173+
'ASC', 'DESC',
174+
'NULLS FIRST', 'NULLS LAST',
175+
'ASC NULLS FIRST', 'ASC NULLS LAST',
176+
'DESC NULLS FIRST', 'DESC NULLS LAST',
177+
])
178+
def test_parse_order(s): # issue487
174179
p = sqlparse.parse(s)[0]
175180
assert len(p.tokens) == 1
176-
assert p.tokens[0].ttype is T.Keyword
181+
assert p.tokens[0].ttype is T.Keyword.Order
177182

178183

179184
@pytest.mark.parametrize('s', [

0 commit comments

Comments
 (0)