diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7ad829c94d50f3..3632cf392039f5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,6 +47,8 @@ repos: exclude: Lib/test/tokenizedata/coding20731.py - id: trailing-whitespace types_or: [c, inc, python, rst] + - id: trailing-whitespace + files: '\.(gram)$' - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.33.0 diff --git a/Grammar/python.gram b/Grammar/python.gram index f3ef990923eec3..528075b8bc0d71 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -96,12 +96,12 @@ func_type[mod_ty]: '(' a=[type_expressions] ')' '->' b=expression NEWLINE* ENDMA statements[asdl_stmt_seq*]: a=statement+ { _PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_seq_flatten(p, a)) } -statement[asdl_stmt_seq*]: - | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) } +statement[asdl_stmt_seq*]: + | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) } | a[asdl_stmt_seq*]=simple_stmts { a } single_compound_stmt[asdl_stmt_seq*]: - | a=compound_stmt { + | a=compound_stmt { _PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a)) } statement_newline[asdl_stmt_seq*]: @@ -449,9 +449,9 @@ except_block[excepthandler_ty]: _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) } | 'except' e=expressions ':' b=block { CHECK_VERSION( - excepthandler_ty, - 14, - "except expressions without parentheses are", + excepthandler_ty, + 14, + "except expressions without parentheses are", _PyAST_ExceptHandler(e, NULL, b, EXTRA)) } | 'except' ':' b=block { _PyAST_ExceptHandler(NULL, NULL, b, EXTRA) } | invalid_except_stmt @@ -463,9 +463,9 @@ except_star_block[excepthandler_ty]: _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) } | 'except' '*' e=expressions ':' b=block { CHECK_VERSION( - excepthandler_ty, - 14, - "except expressions without parentheses are", + excepthandler_ty, + 14, + "except expressions without parentheses are", _PyAST_ExceptHandler(e, NULL, b, EXTRA)) } | invalid_except_star_stmt finally_block[asdl_stmt_seq*]: @@ -977,11 +977,11 @@ tstring_middle[expr_ty]: | tstring_replacement_field | t=TSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) } tstring[expr_ty] (memo): - | a=TSTRING_START b=tstring_middle* c=TSTRING_END { + | a=TSTRING_START b=tstring_middle* c=TSTRING_END { CHECK_VERSION( - expr_ty, - 14, - "t-strings are", + expr_ty, + 14, + "t-strings are", _PyPegen_template_str(p, a, (asdl_expr_seq*)b, c)) } string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) } @@ -1383,11 +1383,11 @@ invalid_import: RAISE_SYNTAX_ERROR_STARTING_FROM(token, "Expected one or more names after 'import'") } invalid_dotted_as_name: | dotted_name 'as' !(NAME (',' | ')' | NEWLINE)) a=expression { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use %s as import target", _PyPegen_get_expr_name(a)) } invalid_import_from_as_name: | NAME 'as' !(NAME (',' | ')' | NEWLINE)) a=expression { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use %s as import target", _PyPegen_get_expr_name(a)) } invalid_import_from_targets: