style: rename crates to kebab case
This commit is contained in:
183
crates/proc-macro-api/src/lib.rs
Normal file
183
crates/proc-macro-api/src/lib.rs
Normal file
@@ -0,0 +1,183 @@
|
||||
//! Client-side Proc-Macro crate
|
||||
//!
|
||||
//! We separate proc-macro expanding logic to an extern program to allow
|
||||
//! different implementations (e.g. wasm or dylib loading). And this crate
|
||||
//! is used to provide basic infrastructure for communication between two
|
||||
//! processes: Client (RA itself), Server (the external program)
|
||||
|
||||
pub mod msg;
|
||||
mod process;
|
||||
mod version;
|
||||
|
||||
use paths::AbsPathBuf;
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fmt, io,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tt::Subtree;
|
||||
|
||||
use crate::{
|
||||
msg::{ExpandMacro, FlatTree, PanicMessage},
|
||||
process::ProcMacroProcessSrv,
|
||||
};
|
||||
|
||||
pub use version::{read_dylib_info, RustCInfo};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
|
||||
pub enum ProcMacroKind {
|
||||
CustomDerive,
|
||||
FuncLike,
|
||||
Attr,
|
||||
}
|
||||
|
||||
/// A handle to an external process which load dylibs with macros (.so or .dll)
|
||||
/// and runs actual macro expansion functions.
|
||||
#[derive(Debug)]
|
||||
pub struct ProcMacroServer {
|
||||
/// Currently, the proc macro process expands all procedural macros sequentially.
|
||||
///
|
||||
/// That means that concurrent salsa requests may block each other when expanding proc macros,
|
||||
/// which is unfortunate, but simple and good enough for the time being.
|
||||
///
|
||||
/// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here.
|
||||
process: Arc<Mutex<ProcMacroProcessSrv>>,
|
||||
}
|
||||
|
||||
pub struct MacroDylib {
|
||||
path: AbsPathBuf,
|
||||
}
|
||||
|
||||
impl MacroDylib {
|
||||
// FIXME: this is buggy due to TOCTOU, we should check the version in the
|
||||
// macro process instead.
|
||||
pub fn new(path: AbsPathBuf) -> io::Result<MacroDylib> {
|
||||
let _p = profile::span("MacroDylib::new");
|
||||
|
||||
let info = version::read_dylib_info(&path)?;
|
||||
if info.version.0 < 1 || info.version.1 < 47 {
|
||||
let msg = format!("proc-macro {} built by {:#?} is not supported by Rust Analyzer, please update your rust version.", path.display(), info);
|
||||
return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
|
||||
}
|
||||
|
||||
Ok(MacroDylib { path })
|
||||
}
|
||||
}
|
||||
|
||||
/// A handle to a specific macro (a `#[proc_macro]` annotated function).
|
||||
///
|
||||
/// It exists withing a context of a specific [`ProcMacroProcess`] -- currently
|
||||
/// we share a single expander process for all macros.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProcMacro {
|
||||
process: Arc<Mutex<ProcMacroProcessSrv>>,
|
||||
dylib_path: AbsPathBuf,
|
||||
name: String,
|
||||
kind: ProcMacroKind,
|
||||
}
|
||||
|
||||
impl Eq for ProcMacro {}
|
||||
impl PartialEq for ProcMacro {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.name == other.name
|
||||
&& self.kind == other.kind
|
||||
&& self.dylib_path == other.dylib_path
|
||||
&& Arc::ptr_eq(&self.process, &other.process)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ServerError {
|
||||
pub message: String,
|
||||
pub io: Option<io::Error>,
|
||||
}
|
||||
|
||||
impl fmt::Display for ServerError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.message.fmt(f)?;
|
||||
if let Some(io) = &self.io {
|
||||
f.write_str(": ")?;
|
||||
io.fmt(f)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MacroPanic {
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl ProcMacroServer {
|
||||
/// Spawns an external process as the proc macro server and returns a client connected to it.
|
||||
pub fn spawn(
|
||||
process_path: AbsPathBuf,
|
||||
args: impl IntoIterator<Item = impl AsRef<OsStr>>,
|
||||
) -> io::Result<ProcMacroServer> {
|
||||
let process = ProcMacroProcessSrv::run(process_path, args)?;
|
||||
Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) })
|
||||
}
|
||||
|
||||
pub fn load_dylib(
|
||||
&self,
|
||||
dylib: MacroDylib,
|
||||
) -> Result<Result<Vec<ProcMacro>, String>, ServerError> {
|
||||
let _p = profile::span("ProcMacroClient::by_dylib_path");
|
||||
let macros =
|
||||
self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
|
||||
|
||||
let res = macros.map(|macros| {
|
||||
macros
|
||||
.into_iter()
|
||||
.map(|(name, kind)| ProcMacro {
|
||||
process: self.process.clone(),
|
||||
name,
|
||||
kind,
|
||||
dylib_path: dylib.path.clone(),
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl ProcMacro {
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn kind(&self) -> ProcMacroKind {
|
||||
self.kind
|
||||
}
|
||||
|
||||
pub fn expand(
|
||||
&self,
|
||||
subtree: &Subtree,
|
||||
attr: Option<&Subtree>,
|
||||
env: Vec<(String, String)>,
|
||||
) -> Result<Result<Subtree, PanicMessage>, ServerError> {
|
||||
let current_dir = env
|
||||
.iter()
|
||||
.find(|(name, _)| name == "CARGO_MANIFEST_DIR")
|
||||
.map(|(_, value)| value.clone());
|
||||
|
||||
let task = ExpandMacro {
|
||||
macro_body: FlatTree::new(subtree),
|
||||
macro_name: self.name.to_string(),
|
||||
attributes: attr.map(FlatTree::new),
|
||||
lib: self.dylib_path.to_path_buf().into(),
|
||||
env,
|
||||
current_dir,
|
||||
};
|
||||
|
||||
let request = msg::Request::ExpandMacro(task);
|
||||
let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
|
||||
match response {
|
||||
msg::Response::ExpandMacro(it) => Ok(it.map(FlatTree::to_subtree)),
|
||||
msg::Response::ListMacros { .. } => {
|
||||
Err(ServerError { message: "unexpected response".to_string(), io: None })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
154
crates/proc-macro-api/src/msg.rs
Normal file
154
crates/proc-macro-api/src/msg.rs
Normal file
@@ -0,0 +1,154 @@
|
||||
//! Defines messages for cross-process message passing based on `ndjson` wire protocol
|
||||
pub(crate) mod flat;
|
||||
|
||||
use std::{
|
||||
io::{self, BufRead, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
use crate::ProcMacroKind;
|
||||
|
||||
pub use crate::msg::flat::FlatTree;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum Request {
|
||||
ListMacros { dylib_path: PathBuf },
|
||||
ExpandMacro(ExpandMacro),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum Response {
|
||||
ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
|
||||
ExpandMacro(Result<FlatTree, PanicMessage>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct PanicMessage(pub String);
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ExpandMacro {
|
||||
/// Argument of macro call.
|
||||
///
|
||||
/// In custom derive this will be a struct or enum; in attribute-like macro - underlying
|
||||
/// item; in function-like macro - the macro body.
|
||||
pub macro_body: FlatTree,
|
||||
|
||||
/// Name of macro to expand.
|
||||
///
|
||||
/// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.).
|
||||
/// In attribute-like and function-like macros - single name of macro itself (`show_streams`).
|
||||
pub macro_name: String,
|
||||
|
||||
/// Possible attributes for the attribute-like macros.
|
||||
pub attributes: Option<FlatTree>,
|
||||
|
||||
pub lib: PathBuf,
|
||||
|
||||
/// Environment variables to set during macro expansion.
|
||||
pub env: Vec<(String, String)>,
|
||||
|
||||
pub current_dir: Option<String>,
|
||||
}
|
||||
|
||||
pub trait Message: Serialize + DeserializeOwned {
|
||||
fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result<Option<Self>> {
|
||||
Ok(match read_json(inp, buf)? {
|
||||
None => None,
|
||||
Some(text) => {
|
||||
let mut deserializer = serde_json::Deserializer::from_str(text);
|
||||
// Note that some proc-macro generate very deep syntax tree
|
||||
// We have to disable the current limit of serde here
|
||||
deserializer.disable_recursion_limit();
|
||||
Some(Self::deserialize(&mut deserializer)?)
|
||||
}
|
||||
})
|
||||
}
|
||||
fn write(self, out: &mut impl Write) -> io::Result<()> {
|
||||
let text = serde_json::to_string(&self)?;
|
||||
write_json(out, &text)
|
||||
}
|
||||
}
|
||||
|
||||
impl Message for Request {}
|
||||
impl Message for Response {}
|
||||
|
||||
fn read_json<'a>(inp: &mut impl BufRead, buf: &'a mut String) -> io::Result<Option<&'a String>> {
|
||||
loop {
|
||||
buf.clear();
|
||||
|
||||
inp.read_line(buf)?;
|
||||
buf.pop(); // Remove trailing '\n'
|
||||
|
||||
if buf.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Some ill behaved macro try to use stdout for debugging
|
||||
// We ignore it here
|
||||
if !buf.starts_with('{') {
|
||||
tracing::error!("proc-macro tried to print : {}", buf);
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(Some(buf));
|
||||
}
|
||||
}
|
||||
|
||||
fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
|
||||
tracing::debug!("> {}", msg);
|
||||
out.write_all(msg.as_bytes())?;
|
||||
out.write_all(b"\n")?;
|
||||
out.flush()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tt::*;
|
||||
|
||||
fn fixture_token_tree() -> Subtree {
|
||||
let mut subtree = Subtree::default();
|
||||
subtree
|
||||
.token_trees
|
||||
.push(TokenTree::Leaf(Ident { text: "struct".into(), id: TokenId(0) }.into()));
|
||||
subtree
|
||||
.token_trees
|
||||
.push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into()));
|
||||
subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
|
||||
text: "Foo".into(),
|
||||
id: TokenId::unspecified(),
|
||||
})));
|
||||
subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
|
||||
char: '@',
|
||||
id: TokenId::unspecified(),
|
||||
spacing: Spacing::Joint,
|
||||
})));
|
||||
subtree.token_trees.push(TokenTree::Subtree(Subtree {
|
||||
delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
|
||||
token_trees: vec![],
|
||||
}));
|
||||
subtree
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proc_macro_rpc_works() {
|
||||
let tt = fixture_token_tree();
|
||||
let task = ExpandMacro {
|
||||
macro_body: FlatTree::new(&tt),
|
||||
macro_name: Default::default(),
|
||||
attributes: None,
|
||||
lib: std::env::current_dir().unwrap(),
|
||||
env: Default::default(),
|
||||
current_dir: Default::default(),
|
||||
};
|
||||
|
||||
let json = serde_json::to_string(&task).unwrap();
|
||||
// println!("{}", json);
|
||||
let back: ExpandMacro = serde_json::from_str(&json).unwrap();
|
||||
|
||||
assert_eq!(tt, back.macro_body.to_subtree());
|
||||
}
|
||||
}
|
||||
328
crates/proc-macro-api/src/msg/flat.rs
Normal file
328
crates/proc-macro-api/src/msg/flat.rs
Normal file
@@ -0,0 +1,328 @@
|
||||
//! Serialization-friendly representation of `tt::Subtree`.
|
||||
//!
|
||||
//! It is possible to serialize `Subtree` as is, as a tree, but using
|
||||
//! arbitrary-nested trees in JSON is problematic, as they can cause the JSON
|
||||
//! parser to overflow the stack.
|
||||
//!
|
||||
//! Additionally, such implementation would be pretty verbose, and we do care
|
||||
//! about performance here a bit.
|
||||
//!
|
||||
//! So what this module does is dumping a `tt::Subtree` into a bunch of flat
|
||||
//! array of numbers. See the test in the parent module to get an example
|
||||
//! output.
|
||||
//!
|
||||
//! ```json
|
||||
//! {
|
||||
//! // Array of subtrees, each subtree is represented by 4 numbers:
|
||||
//! // id of delimiter, delimiter kind, index of first child in `token_tree`,
|
||||
//! // index of last child in `token_tree`
|
||||
//! "subtree":[4294967295,0,0,5,2,2,5,5],
|
||||
//! // 2 ints per literal: [token id, index into `text`]
|
||||
//! "literal":[4294967295,1],
|
||||
//! // 3 ints per punct: [token id, char, spacing]
|
||||
//! "punct":[4294967295,64,1],
|
||||
//! // 2 ints per ident: [token id, index into `text`]
|
||||
//! "ident": [0,0,1,1],
|
||||
//! // children of all subtrees, concatenated. Each child is represented as `index << 2 | tag`
|
||||
//! // where tag denotes one of subtree, literal, punct or ident.
|
||||
//! "token_tree":[3,7,1,4],
|
||||
//! // Strings shared by idents and literals
|
||||
//! "text": ["struct","Foo"]
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! We probably should replace most of the code here with bincode someday, but,
|
||||
//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
|
||||
//! the time being.
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, VecDeque},
|
||||
convert::TryInto,
|
||||
};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tt::TokenId;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct FlatTree {
|
||||
subtree: Vec<u32>,
|
||||
literal: Vec<u32>,
|
||||
punct: Vec<u32>,
|
||||
ident: Vec<u32>,
|
||||
token_tree: Vec<u32>,
|
||||
text: Vec<String>,
|
||||
}
|
||||
|
||||
struct SubtreeRepr {
|
||||
id: tt::TokenId,
|
||||
kind: Option<tt::DelimiterKind>,
|
||||
tt: [u32; 2],
|
||||
}
|
||||
|
||||
struct LiteralRepr {
|
||||
id: tt::TokenId,
|
||||
text: u32,
|
||||
}
|
||||
|
||||
struct PunctRepr {
|
||||
id: tt::TokenId,
|
||||
char: char,
|
||||
spacing: tt::Spacing,
|
||||
}
|
||||
|
||||
struct IdentRepr {
|
||||
id: tt::TokenId,
|
||||
text: u32,
|
||||
}
|
||||
|
||||
impl FlatTree {
|
||||
pub fn new(subtree: &tt::Subtree) -> FlatTree {
|
||||
let mut w = Writer {
|
||||
string_table: HashMap::new(),
|
||||
work: VecDeque::new(),
|
||||
|
||||
subtree: Vec::new(),
|
||||
literal: Vec::new(),
|
||||
punct: Vec::new(),
|
||||
ident: Vec::new(),
|
||||
token_tree: Vec::new(),
|
||||
text: Vec::new(),
|
||||
};
|
||||
w.write(subtree);
|
||||
|
||||
return FlatTree {
|
||||
subtree: write_vec(w.subtree, SubtreeRepr::write),
|
||||
literal: write_vec(w.literal, LiteralRepr::write),
|
||||
punct: write_vec(w.punct, PunctRepr::write),
|
||||
ident: write_vec(w.ident, IdentRepr::write),
|
||||
token_tree: w.token_tree,
|
||||
text: w.text,
|
||||
};
|
||||
|
||||
fn write_vec<T, F: Fn(T) -> [u32; N], const N: usize>(xs: Vec<T>, f: F) -> Vec<u32> {
|
||||
xs.into_iter().flat_map(f).collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_subtree(self) -> tt::Subtree {
|
||||
return Reader {
|
||||
subtree: read_vec(self.subtree, SubtreeRepr::read),
|
||||
literal: read_vec(self.literal, LiteralRepr::read),
|
||||
punct: read_vec(self.punct, PunctRepr::read),
|
||||
ident: read_vec(self.ident, IdentRepr::read),
|
||||
token_tree: self.token_tree,
|
||||
text: self.text,
|
||||
}
|
||||
.read();
|
||||
|
||||
fn read_vec<T, F: Fn([u32; N]) -> T, const N: usize>(xs: Vec<u32>, f: F) -> Vec<T> {
|
||||
let mut chunks = xs.chunks_exact(N);
|
||||
let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect();
|
||||
assert!(chunks.remainder().is_empty());
|
||||
res
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SubtreeRepr {
|
||||
fn write(self) -> [u32; 4] {
|
||||
let kind = match self.kind {
|
||||
None => 0,
|
||||
Some(tt::DelimiterKind::Parenthesis) => 1,
|
||||
Some(tt::DelimiterKind::Brace) => 2,
|
||||
Some(tt::DelimiterKind::Bracket) => 3,
|
||||
};
|
||||
[self.id.0, kind, self.tt[0], self.tt[1]]
|
||||
}
|
||||
fn read([id, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
|
||||
let kind = match kind {
|
||||
0 => None,
|
||||
1 => Some(tt::DelimiterKind::Parenthesis),
|
||||
2 => Some(tt::DelimiterKind::Brace),
|
||||
3 => Some(tt::DelimiterKind::Bracket),
|
||||
other => panic!("bad kind {}", other),
|
||||
};
|
||||
SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] }
|
||||
}
|
||||
}
|
||||
|
||||
impl LiteralRepr {
|
||||
fn write(self) -> [u32; 2] {
|
||||
[self.id.0, self.text]
|
||||
}
|
||||
fn read([id, text]: [u32; 2]) -> LiteralRepr {
|
||||
LiteralRepr { id: TokenId(id), text }
|
||||
}
|
||||
}
|
||||
|
||||
impl PunctRepr {
|
||||
fn write(self) -> [u32; 3] {
|
||||
let spacing = match self.spacing {
|
||||
tt::Spacing::Alone => 0,
|
||||
tt::Spacing::Joint => 1,
|
||||
};
|
||||
[self.id.0, self.char as u32, spacing]
|
||||
}
|
||||
fn read([id, char, spacing]: [u32; 3]) -> PunctRepr {
|
||||
let spacing = match spacing {
|
||||
0 => tt::Spacing::Alone,
|
||||
1 => tt::Spacing::Joint,
|
||||
other => panic!("bad spacing {}", other),
|
||||
};
|
||||
PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
|
||||
}
|
||||
}
|
||||
|
||||
impl IdentRepr {
|
||||
fn write(self) -> [u32; 2] {
|
||||
[self.id.0, self.text]
|
||||
}
|
||||
fn read(data: [u32; 2]) -> IdentRepr {
|
||||
IdentRepr { id: TokenId(data[0]), text: data[1] }
|
||||
}
|
||||
}
|
||||
|
||||
struct Writer<'a> {
|
||||
work: VecDeque<(usize, &'a tt::Subtree)>,
|
||||
string_table: HashMap<&'a str, u32>,
|
||||
|
||||
subtree: Vec<SubtreeRepr>,
|
||||
literal: Vec<LiteralRepr>,
|
||||
punct: Vec<PunctRepr>,
|
||||
ident: Vec<IdentRepr>,
|
||||
token_tree: Vec<u32>,
|
||||
text: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> Writer<'a> {
|
||||
fn write(&mut self, root: &'a tt::Subtree) {
|
||||
self.enqueue(root);
|
||||
while let Some((idx, subtree)) = self.work.pop_front() {
|
||||
self.subtree(idx, subtree);
|
||||
}
|
||||
}
|
||||
|
||||
fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) {
|
||||
let mut first_tt = self.token_tree.len();
|
||||
let n_tt = subtree.token_trees.len();
|
||||
self.token_tree.resize(first_tt + n_tt, !0);
|
||||
|
||||
self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
|
||||
|
||||
for child in &subtree.token_trees {
|
||||
let idx_tag = match child {
|
||||
tt::TokenTree::Subtree(it) => {
|
||||
let idx = self.enqueue(it);
|
||||
idx << 2 | 0b00
|
||||
}
|
||||
tt::TokenTree::Leaf(leaf) => match leaf {
|
||||
tt::Leaf::Literal(lit) => {
|
||||
let idx = self.literal.len() as u32;
|
||||
let text = self.intern(&lit.text);
|
||||
self.literal.push(LiteralRepr { id: lit.id, text });
|
||||
idx << 2 | 0b01
|
||||
}
|
||||
tt::Leaf::Punct(punct) => {
|
||||
let idx = self.punct.len() as u32;
|
||||
self.punct.push(PunctRepr {
|
||||
char: punct.char,
|
||||
spacing: punct.spacing,
|
||||
id: punct.id,
|
||||
});
|
||||
idx << 2 | 0b10
|
||||
}
|
||||
tt::Leaf::Ident(ident) => {
|
||||
let idx = self.ident.len() as u32;
|
||||
let text = self.intern(&ident.text);
|
||||
self.ident.push(IdentRepr { id: ident.id, text });
|
||||
idx << 2 | 0b11
|
||||
}
|
||||
},
|
||||
};
|
||||
self.token_tree[first_tt] = idx_tag;
|
||||
first_tt += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
|
||||
let idx = self.subtree.len();
|
||||
let delimiter_id = subtree.delimiter.map_or(TokenId::unspecified(), |it| it.id);
|
||||
let delimiter_kind = subtree.delimiter.map(|it| it.kind);
|
||||
self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] });
|
||||
self.work.push_back((idx, subtree));
|
||||
idx as u32
|
||||
}
|
||||
|
||||
pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
|
||||
let table = &mut self.text;
|
||||
*self.string_table.entry(text).or_insert_with(|| {
|
||||
let idx = table.len();
|
||||
table.push(text.to_string());
|
||||
idx as u32
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct Reader {
|
||||
subtree: Vec<SubtreeRepr>,
|
||||
literal: Vec<LiteralRepr>,
|
||||
punct: Vec<PunctRepr>,
|
||||
ident: Vec<IdentRepr>,
|
||||
token_tree: Vec<u32>,
|
||||
text: Vec<String>,
|
||||
}
|
||||
|
||||
impl Reader {
|
||||
pub(crate) fn read(self) -> tt::Subtree {
|
||||
let mut res: Vec<Option<tt::Subtree>> = vec![None; self.subtree.len()];
|
||||
for i in (0..self.subtree.len()).rev() {
|
||||
let repr = &self.subtree[i];
|
||||
let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
|
||||
let s = tt::Subtree {
|
||||
delimiter: repr.kind.map(|kind| tt::Delimiter { id: repr.id, kind }),
|
||||
token_trees: token_trees
|
||||
.iter()
|
||||
.copied()
|
||||
.map(|idx_tag| {
|
||||
let tag = idx_tag & 0b11;
|
||||
let idx = (idx_tag >> 2) as usize;
|
||||
match tag {
|
||||
// XXX: we iterate subtrees in reverse to guarantee
|
||||
// that this unwrap doesn't fire.
|
||||
0b00 => res[idx].take().unwrap().into(),
|
||||
0b01 => {
|
||||
let repr = &self.literal[idx];
|
||||
tt::Leaf::Literal(tt::Literal {
|
||||
text: self.text[repr.text as usize].as_str().into(),
|
||||
id: repr.id,
|
||||
})
|
||||
.into()
|
||||
}
|
||||
0b10 => {
|
||||
let repr = &self.punct[idx];
|
||||
tt::Leaf::Punct(tt::Punct {
|
||||
char: repr.char,
|
||||
spacing: repr.spacing,
|
||||
id: repr.id,
|
||||
})
|
||||
.into()
|
||||
}
|
||||
0b11 => {
|
||||
let repr = &self.ident[idx];
|
||||
tt::Leaf::Ident(tt::Ident {
|
||||
text: self.text[repr.text as usize].as_str().into(),
|
||||
id: repr.id,
|
||||
})
|
||||
.into()
|
||||
}
|
||||
other => panic!("bad tag: {}", other),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
res[i] = Some(s);
|
||||
}
|
||||
|
||||
res[0].take().unwrap()
|
||||
}
|
||||
}
|
||||
106
crates/proc-macro-api/src/process.rs
Normal file
106
crates/proc-macro-api/src/process.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
//! Handle process life-time and message passing for proc-macro client
|
||||
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
io::{self, BufRead, BufReader, Write},
|
||||
process::{Child, ChildStdin, ChildStdout, Command, Stdio},
|
||||
};
|
||||
|
||||
use paths::{AbsPath, AbsPathBuf};
|
||||
use stdx::JodChild;
|
||||
|
||||
use crate::{
|
||||
msg::{Message, Request, Response},
|
||||
ProcMacroKind, ServerError,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ProcMacroProcessSrv {
|
||||
_process: Process,
|
||||
stdin: ChildStdin,
|
||||
stdout: BufReader<ChildStdout>,
|
||||
}
|
||||
|
||||
impl ProcMacroProcessSrv {
|
||||
pub(crate) fn run(
|
||||
process_path: AbsPathBuf,
|
||||
args: impl IntoIterator<Item = impl AsRef<OsStr>>,
|
||||
) -> io::Result<ProcMacroProcessSrv> {
|
||||
let mut process = Process::run(process_path, args)?;
|
||||
let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
|
||||
|
||||
let srv = ProcMacroProcessSrv { _process: process, stdin, stdout };
|
||||
|
||||
Ok(srv)
|
||||
}
|
||||
|
||||
pub(crate) fn find_proc_macros(
|
||||
&mut self,
|
||||
dylib_path: &AbsPath,
|
||||
) -> Result<Result<Vec<(String, ProcMacroKind)>, String>, ServerError> {
|
||||
let request = Request::ListMacros { dylib_path: dylib_path.to_path_buf().into() };
|
||||
|
||||
let response = self.send_task(request)?;
|
||||
|
||||
match response {
|
||||
Response::ListMacros(it) => Ok(it),
|
||||
Response::ExpandMacro { .. } => {
|
||||
Err(ServerError { message: "unexpected response".to_string(), io: None })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn send_task(&mut self, req: Request) -> Result<Response, ServerError> {
|
||||
let mut buf = String::new();
|
||||
send_request(&mut self.stdin, &mut self.stdout, req, &mut buf)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Process {
|
||||
child: JodChild,
|
||||
}
|
||||
|
||||
impl Process {
|
||||
fn run(
|
||||
path: AbsPathBuf,
|
||||
args: impl IntoIterator<Item = impl AsRef<OsStr>>,
|
||||
) -> io::Result<Process> {
|
||||
let args: Vec<OsString> = args.into_iter().map(|s| s.as_ref().into()).collect();
|
||||
let child = JodChild(mk_child(&path, &args)?);
|
||||
Ok(Process { child })
|
||||
}
|
||||
|
||||
fn stdio(&mut self) -> Option<(ChildStdin, BufReader<ChildStdout>)> {
|
||||
let stdin = self.child.stdin.take()?;
|
||||
let stdout = self.child.stdout.take()?;
|
||||
let read = BufReader::new(stdout);
|
||||
|
||||
Some((stdin, read))
|
||||
}
|
||||
}
|
||||
|
||||
fn mk_child(
|
||||
path: &AbsPath,
|
||||
args: impl IntoIterator<Item = impl AsRef<OsStr>>,
|
||||
) -> io::Result<Child> {
|
||||
Command::new(path.as_os_str())
|
||||
.args(args)
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
.spawn()
|
||||
}
|
||||
|
||||
fn send_request(
|
||||
mut writer: &mut impl Write,
|
||||
mut reader: &mut impl BufRead,
|
||||
req: Request,
|
||||
buf: &mut String,
|
||||
) -> Result<Response, ServerError> {
|
||||
req.write(&mut writer)
|
||||
.map_err(|err| ServerError { message: "failed to write request".into(), io: Some(err) })?;
|
||||
let res = Response::read(&mut reader, buf)
|
||||
.map_err(|err| ServerError { message: "failed to read response".into(), io: Some(err) })?;
|
||||
res.ok_or_else(|| ServerError { message: "server exited".into(), io: None })
|
||||
}
|
||||
149
crates/proc-macro-api/src/version.rs
Normal file
149
crates/proc-macro-api/src/version.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
//! Reading proc-macro rustc version information from binary data
|
||||
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{self, Read},
|
||||
};
|
||||
|
||||
use memmap2::Mmap;
|
||||
use object::read::{File as BinaryFile, Object, ObjectSection};
|
||||
use paths::AbsPath;
|
||||
use snap::read::FrameDecoder as SnapDecoder;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RustCInfo {
|
||||
pub version: (usize, usize, usize),
|
||||
pub channel: String,
|
||||
pub commit: Option<String>,
|
||||
pub date: Option<String>,
|
||||
}
|
||||
|
||||
/// Read rustc dylib information
|
||||
pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result<RustCInfo> {
|
||||
macro_rules! err {
|
||||
($e:literal) => {
|
||||
io::Error::new(io::ErrorKind::InvalidData, $e)
|
||||
};
|
||||
}
|
||||
|
||||
let ver_str = read_version(dylib_path)?;
|
||||
let mut items = ver_str.split_whitespace();
|
||||
let tag = items.next().ok_or_else(|| err!("version format error"))?;
|
||||
if tag != "rustc" {
|
||||
return Err(err!("version format error (No rustc tag)"));
|
||||
}
|
||||
|
||||
let version_part = items.next().ok_or_else(|| err!("no version string"))?;
|
||||
let mut version_parts = version_part.split('-');
|
||||
let version = version_parts.next().ok_or_else(|| err!("no version"))?;
|
||||
let channel = version_parts.next().unwrap_or_default().to_string();
|
||||
|
||||
let commit = match items.next() {
|
||||
Some(commit) => {
|
||||
match commit.len() {
|
||||
0 => None,
|
||||
_ => Some(commit[1..].to_string() /* remove ( */),
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
let date = match items.next() {
|
||||
Some(date) => {
|
||||
match date.len() {
|
||||
0 => None,
|
||||
_ => Some(date[0..date.len() - 2].to_string() /* remove ) */),
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let version_numbers = version
|
||||
.split('.')
|
||||
.map(|it| it.parse::<usize>())
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map_err(|_| err!("version number error"))?;
|
||||
|
||||
if version_numbers.len() != 3 {
|
||||
return Err(err!("version number format error"));
|
||||
}
|
||||
let version = (version_numbers[0], version_numbers[1], version_numbers[2]);
|
||||
|
||||
Ok(RustCInfo { version, channel, commit, date })
|
||||
}
|
||||
|
||||
/// This is used inside read_version() to locate the ".rustc" section
|
||||
/// from a proc macro crate's binary file.
|
||||
fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'a [u8]> {
|
||||
BinaryFile::parse(dylib_binary)
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?
|
||||
.section_by_name(section_name)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "section read error"))?
|
||||
.data()
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
|
||||
}
|
||||
|
||||
/// Check the version of rustc that was used to compile a proc macro crate's
|
||||
///
|
||||
/// binary file.
|
||||
/// A proc macro crate binary's ".rustc" section has following byte layout:
|
||||
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
|
||||
/// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
|
||||
/// means bytes from here(including this sequence) are compressed in
|
||||
/// snappy compression format. Version info is inside here, so decompress
|
||||
/// this.
|
||||
/// The bytes you get after decompressing the snappy format portion has
|
||||
/// following layout:
|
||||
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes(again)
|
||||
/// * [crate root bytes] next 4 bytes is to store crate root position,
|
||||
/// according to rustc's source code comment
|
||||
/// * [length byte] next 1 byte tells us how many bytes we should read next
|
||||
/// for the version string's utf8 bytes
|
||||
/// * [version string bytes encoded in utf8] <- GET THIS BOI
|
||||
/// * [some more bytes that we don't really care but about still there] :-)
|
||||
/// Check this issue for more about the bytes layout:
|
||||
/// <https://github.com/rust-analyzer/rust-analyzer/issues/6174>
|
||||
fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
|
||||
let dylib_file = File::open(dylib_path)?;
|
||||
let dylib_mmaped = unsafe { Mmap::map(&dylib_file) }?;
|
||||
|
||||
let dot_rustc = read_section(&dylib_mmaped, ".rustc")?;
|
||||
|
||||
// check if magic is valid
|
||||
if &dot_rustc[0..4] != b"rust" {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("unknown metadata magic, expected `rust`, found `{:?}`", &dot_rustc[0..4]),
|
||||
));
|
||||
}
|
||||
let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
|
||||
// Last supported version is:
|
||||
// https://github.com/rust-lang/rust/commit/0696e79f2740ad89309269b460579e548a5cd632
|
||||
match version {
|
||||
5 | 6 => {}
|
||||
_ => {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
format!("unsupported metadata version {}", version),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let snappy_portion = &dot_rustc[8..];
|
||||
|
||||
let mut snappy_decoder = SnapDecoder::new(snappy_portion);
|
||||
|
||||
// the bytes before version string bytes, so this basically is:
|
||||
// 8 bytes for [b'r',b'u',b's',b't',0,0,0,5]
|
||||
// 4 bytes for [crate root bytes]
|
||||
// 1 byte for length of version string
|
||||
// so 13 bytes in total, and we should check the 13th byte
|
||||
// to know the length
|
||||
let mut bytes_before_version = [0u8; 13];
|
||||
snappy_decoder.read_exact(&mut bytes_before_version)?;
|
||||
let length = bytes_before_version[12];
|
||||
|
||||
let mut version_string_utf8 = vec![0u8; length as usize];
|
||||
snappy_decoder.read_exact(&mut version_string_utf8)?;
|
||||
let version_string = String::from_utf8(version_string_utf8);
|
||||
version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
|
||||
}
|
||||
Reference in New Issue
Block a user