Replace Rc with Lrc for shared data
This commit is contained in:
@@ -20,7 +20,7 @@ use std_unicode::property::Pattern_White_Space;
|
||||
use std::borrow::Cow;
|
||||
use std::char;
|
||||
use std::mem::replace;
|
||||
use std::rc::Rc;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
pub mod comments;
|
||||
mod tokentrees;
|
||||
@@ -48,7 +48,7 @@ pub struct StringReader<'a> {
|
||||
pub col: CharPos,
|
||||
/// The current character (which has been read from self.pos)
|
||||
pub ch: Option<char>,
|
||||
pub filemap: Rc<syntax_pos::FileMap>,
|
||||
pub filemap: Lrc<syntax_pos::FileMap>,
|
||||
/// If Some, stop reading the source at this position (inclusive).
|
||||
pub terminator: Option<BytePos>,
|
||||
/// Whether to record new-lines and multibyte chars in filemap.
|
||||
@@ -61,7 +61,7 @@ pub struct StringReader<'a> {
|
||||
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
|
||||
// cache a direct reference to the source text, so that we don't have to
|
||||
// retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
|
||||
source_text: Rc<String>,
|
||||
source_text: Lrc<String>,
|
||||
/// Stack of open delimiters and their spans. Used for error message.
|
||||
token: token::Token,
|
||||
span: Span,
|
||||
@@ -152,13 +152,13 @@ impl<'a> StringReader<'a> {
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
/// For comments.rs, which hackily pokes into next_pos and ch
|
||||
pub fn new_raw(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
|
||||
pub fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>) -> Self {
|
||||
let mut sr = StringReader::new_raw_internal(sess, filemap);
|
||||
sr.bump();
|
||||
sr
|
||||
}
|
||||
|
||||
fn new_raw_internal(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
|
||||
fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>) -> Self {
|
||||
if filemap.src.is_none() {
|
||||
sess.span_diagnostic.bug(&format!("Cannot lex filemap without source: {}",
|
||||
filemap.name));
|
||||
@@ -187,7 +187,7 @@ impl<'a> StringReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
|
||||
pub fn new(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>) -> Self {
|
||||
let mut sr = StringReader::new_raw(sess, filemap);
|
||||
if sr.advance_token().is_err() {
|
||||
sr.emit_fatal_errors();
|
||||
@@ -1747,9 +1747,7 @@ mod tests {
|
||||
use std::collections::HashSet;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
|
||||
fn mk_sess(cm: Rc<CodeMap>) -> ParseSess {
|
||||
fn mk_sess(cm: Lrc<CodeMap>) -> ParseSess {
|
||||
let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
|
||||
Some(cm.clone()),
|
||||
false,
|
||||
@@ -1776,7 +1774,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn t1() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
let mut string_reader = setup(&cm,
|
||||
&sh,
|
||||
@@ -1820,7 +1818,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn doublecolonparsing() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
check_tokenization(setup(&cm, &sh, "a b".to_string()),
|
||||
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
|
||||
@@ -1828,7 +1826,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn dcparsing_2() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
|
||||
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
|
||||
@@ -1836,7 +1834,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn dcparsing_3() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
|
||||
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
|
||||
@@ -1844,7 +1842,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn dcparsing_4() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
|
||||
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
|
||||
@@ -1852,7 +1850,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn character_a() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
|
||||
token::Literal(token::Char(Symbol::intern("a")), None));
|
||||
@@ -1860,7 +1858,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn character_space() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
|
||||
token::Literal(token::Char(Symbol::intern(" ")), None));
|
||||
@@ -1868,7 +1866,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn character_escaped() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
|
||||
token::Literal(token::Char(Symbol::intern("\\n")), None));
|
||||
@@ -1876,7 +1874,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn lifetime_name() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
|
||||
token::Lifetime(Ident::from_str("'abc")));
|
||||
@@ -1884,7 +1882,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn raw_string() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
|
||||
.next_token()
|
||||
@@ -1894,7 +1892,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn literal_suffixes() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
macro_rules! test {
|
||||
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
|
||||
@@ -1938,7 +1936,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn nested_block_comments() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
|
||||
match lexer.next_token().tok {
|
||||
@@ -1951,7 +1949,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn crlf_comments() {
|
||||
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let cm = Lrc::new(CodeMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(cm.clone());
|
||||
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
|
||||
let comment = lexer.next_token();
|
||||
|
||||
Reference in New Issue
Block a user