Skip to content

Commit 4231fbc

Browse files
committed
Condense StringReader's API to a single function
1 parent 08deb86 commit 4231fbc

File tree

4 files changed

+25
-33
lines changed

4 files changed

+25
-33
lines changed

compiler/rustc_parse/src/lexer/mod.rs

+16-29
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,19 @@
11
use rustc_ast::ast::AttrStyle;
22
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
3-
use rustc_ast::tokenstream::IsJoint;
4-
use rustc_data_structures::sync::Lrc;
5-
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError};
6-
use rustc_lexer::Base;
7-
use rustc_lexer::{unescape, RawStrError};
3+
use rustc_ast::tokenstream::{IsJoint, TokenStream};
4+
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult};
5+
use rustc_lexer::unescape::{self, Mode};
6+
use rustc_lexer::{Base, DocStyle, RawStrError};
87
use rustc_session::parse::ParseSess;
98
use rustc_span::symbol::{sym, Symbol};
109
use rustc_span::{BytePos, Pos, Span};
1110

12-
use std::char;
1311
use tracing::debug;
1412

1513
mod tokentrees;
1614
mod unescape_error_reporting;
1715
mod unicode_chars;
1816

19-
use rustc_lexer::{unescape::Mode, DocStyle};
2017
use unescape_error_reporting::{emit_unescape_error, push_escaped_char};
2118

2219
#[derive(Clone, Debug)]
@@ -28,7 +25,17 @@ pub struct UnmatchedBrace {
2825
pub candidate_span: Option<Span>,
2926
}
3027

31-
crate struct StringReader<'a> {
28+
crate fn parse_token_trees<'a>(
29+
sess: &'a ParseSess,
30+
src: &'a str,
31+
start_pos: BytePos,
32+
override_span: Option<Span>,
33+
) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
34+
StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span }
35+
.into_token_trees()
36+
}
37+
38+
struct StringReader<'a> {
3239
sess: &'a ParseSess,
3340
/// Initial position, read-only.
3441
start_pos: BytePos,
@@ -37,31 +44,11 @@ crate struct StringReader<'a> {
3744
/// Stop reading src at this index.
3845
end_src_index: usize,
3946
/// Source text to tokenize.
40-
src: Lrc<String>,
47+
src: &'a str,
4148
override_span: Option<Span>,
4249
}
4350

4451
impl<'a> StringReader<'a> {
45-
crate fn new(
46-
sess: &'a ParseSess,
47-
source_file: Lrc<rustc_span::SourceFile>,
48-
override_span: Option<Span>,
49-
) -> Self {
50-
let src = source_file.src.clone().unwrap_or_else(|| {
51-
sess.span_diagnostic
52-
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
53-
});
54-
55-
StringReader {
56-
sess,
57-
start_pos: source_file.start_pos,
58-
pos: source_file.start_pos,
59-
end_src_index: src.len(),
60-
src,
61-
override_span,
62-
}
63-
}
64-
6552
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
6653
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
6754
}

compiler/rustc_parse/src/lexer/tokentrees.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use rustc_errors::PResult;
1212
use rustc_span::Span;
1313

1414
impl<'a> StringReader<'a> {
15-
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
15+
pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
1616
let mut tt_reader = TokenTreesReader {
1717
string_reader: self,
1818
token: Token::dummy(),

compiler/rustc_parse/src/lexer/unicode_chars.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,7 @@ const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
332332
('"', "Quotation Mark", None),
333333
];
334334

335-
crate fn check_for_substitution<'a>(
335+
pub(super) fn check_for_substitution<'a>(
336336
reader: &StringReader<'a>,
337337
pos: BytePos,
338338
ch: char,

compiler/rustc_parse/src/lib.rs

+7-2
Original file line numberDiff line numberDiff line change
@@ -200,8 +200,13 @@ pub fn maybe_file_to_stream(
200200
source_file: Lrc<SourceFile>,
201201
override_span: Option<Span>,
202202
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
203-
let srdr = lexer::StringReader::new(sess, source_file, override_span);
204-
let (token_trees, unmatched_braces) = srdr.into_token_trees();
203+
let src = source_file.src.as_ref().unwrap_or_else(|| {
204+
sess.span_diagnostic
205+
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
206+
});
207+
208+
let (token_trees, unmatched_braces) =
209+
lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
205210

206211
match token_trees {
207212
Ok(stream) => Ok((stream, unmatched_braces)),

0 commit comments

Comments
 (0)