@@ -4,7 +4,7 @@ use std::fmt;
4
4
5
5
use rustc_hash:: { FxHashMap , FxHashSet } ;
6
6
use span:: { Edition , SpanAnchor , SpanData , SpanMap } ;
7
- use stdx:: { format_to, itertools :: Itertools , never, non_empty_vec:: NonEmptyVec } ;
7
+ use stdx:: { format_to, never, non_empty_vec:: NonEmptyVec } ;
8
8
use syntax:: {
9
9
ast:: { self , make:: tokens:: doc_comment} ,
10
10
format_smolstr, AstToken , Parse , PreorderWithTokens , SmolStr , SyntaxElement ,
@@ -14,6 +14,7 @@ use syntax::{
14
14
use tt:: {
15
15
buffer:: { Cursor , TokenBuffer } ,
16
16
iter:: TtIter ,
17
+ token_to_literal,
17
18
} ;
18
19
19
20
use crate :: to_parser_input:: to_parser_input;
@@ -400,56 +401,6 @@ where
400
401
}
401
402
}
402
403
403
- pub fn token_to_literal < S > ( text : SmolStr , span : S ) -> tt:: Literal < S >
404
- where
405
- S : Copy ,
406
- {
407
- use rustc_lexer:: LiteralKind ;
408
-
409
- let token = rustc_lexer:: tokenize ( & text) . next_tuple ( ) ;
410
- let Some ( ( rustc_lexer:: Token {
411
- kind : rustc_lexer:: TokenKind :: Literal { kind, suffix_start } ,
412
- ..
413
- } , ) ) = token
414
- else {
415
- return tt:: Literal { span, text, kind : tt:: LitKind :: Err ( ( ) ) , suffix : None } ;
416
- } ;
417
-
418
- let ( kind, start_offset, end_offset) = match kind {
419
- LiteralKind :: Int { .. } => ( tt:: LitKind :: Integer , 0 , 0 ) ,
420
- LiteralKind :: Float { .. } => ( tt:: LitKind :: Float , 0 , 0 ) ,
421
- LiteralKind :: Char { terminated } => ( tt:: LitKind :: Char , 1 , terminated as usize ) ,
422
- LiteralKind :: Byte { terminated } => ( tt:: LitKind :: Byte , 2 , terminated as usize ) ,
423
- LiteralKind :: Str { terminated } => ( tt:: LitKind :: Str , 1 , terminated as usize ) ,
424
- LiteralKind :: ByteStr { terminated } => ( tt:: LitKind :: ByteStr , 2 , terminated as usize ) ,
425
- LiteralKind :: CStr { terminated } => ( tt:: LitKind :: CStr , 2 , terminated as usize ) ,
426
- LiteralKind :: RawStr { n_hashes } => (
427
- tt:: LitKind :: StrRaw ( n_hashes. unwrap_or_default ( ) ) ,
428
- 2 + n_hashes. unwrap_or_default ( ) as usize ,
429
- 1 + n_hashes. unwrap_or_default ( ) as usize ,
430
- ) ,
431
- LiteralKind :: RawByteStr { n_hashes } => (
432
- tt:: LitKind :: ByteStrRaw ( n_hashes. unwrap_or_default ( ) ) ,
433
- 3 + n_hashes. unwrap_or_default ( ) as usize ,
434
- 1 + n_hashes. unwrap_or_default ( ) as usize ,
435
- ) ,
436
- LiteralKind :: RawCStr { n_hashes } => (
437
- tt:: LitKind :: CStrRaw ( n_hashes. unwrap_or_default ( ) ) ,
438
- 3 + n_hashes. unwrap_or_default ( ) as usize ,
439
- 1 + n_hashes. unwrap_or_default ( ) as usize ,
440
- ) ,
441
- } ;
442
-
443
- let ( lit, suffix) = text. split_at ( suffix_start as usize ) ;
444
- let lit = & lit[ start_offset..lit. len ( ) - end_offset] ;
445
- let suffix = match suffix {
446
- "" | "_" => None ,
447
- suffix => Some ( Box :: new ( suffix. into ( ) ) ) ,
448
- } ;
449
-
450
- tt:: Literal { span, text : lit. into ( ) , kind, suffix }
451
- }
452
-
453
404
fn is_single_token_op ( kind : SyntaxKind ) -> bool {
454
405
matches ! (
455
406
kind,
0 commit comments