rustc: Move ext to librustsyntax

This commit is contained in:
Brian Anderson
2012-03-29 13:48:05 -07:00
parent 14e9f58b50
commit 106c9faa59
14 changed files with 45 additions and 46 deletions

View File

@@ -68,23 +68,7 @@ mod middle {
}
}
mod syntax {
mod ext {
mod base;
mod expand;
mod qquote;
mod build;
mod fmt;
mod env;
mod simplext;
mod concat_idents;
mod ident_to_str;
mod log_syntax;
mod auto_serialize;
}
}
mod syntax;
mod front {
mod config;

View File

@@ -26,4 +26,5 @@ export util;
import rustsyntax::attr;
export attr;
import rustsyntax::ext;
export ext;

View File

@@ -1,855 +0,0 @@
/*
The compiler code necessary to implement the #[auto_serialize]
extension. The idea here is that type-defining items may be tagged
with #[auto_serialize], which will cause us to generate a little
companion module with the same name as the item.
For example, a type like:
type node_id = uint;
would generate two functions like:
fn serialize_node_id<S: serializer>(s: S, v: node_id) {
s.emit_uint(v);
}
fn deserialize_node_id<D: deserializer>(d: D) -> node_id {
d.read_uint()
}
Other interesting scenarios are whe the item has type parameters or
references other non-built-in types. A type definition like:
type spanned<T> = {node: T, span: span};
would yield functions like:
fn serialize_spanned<S: serializer,T>(s: S, v: spanned<T>, t: fn(T)) {
s.emit_rec(2u) {||
s.emit_rec_field("node", 0u) {||
t(s.node);
};
s.emit_rec_field("span", 1u) {||
serialize_span(s, s.span);
};
}
}
fn deserialize_spanned<D: deserializer>(d: D, t: fn() -> T) -> node_id {
d.read_rec(2u) {||
{node: d.read_rec_field("node", 0u, t),
span: d.read_rec_field("span", 1u) {||deserialize_span(d)}}
}
}
In general, the code to serialize an instance `v` of a non-built-in
type a::b::c<T0,...,Tn> looks like:
a::b::serialize_c(s, {|v| c_T0}, ..., {|v| c_Tn}, v)
where `c_Ti` is the code to serialize an instance `v` of the type
`Ti`.
Similarly, the code to deserialize an instance of a non-built-in type
`a::b::c<T0,...,Tn>` using the deserializer `d` looks like:
a::b::deserialize_c(d, {|| c_T0}, ..., {|| c_Tn})
where `c_Ti` is the code to deserialize an instance of `Ti` using the
deserializer `d`.
TODO--Hygiene. Search for "__" strings. We also assume "std" is the
standard library.
Misc notes:
-----------
I use move mode arguments for ast nodes that will get inserted as is
into the tree. This is intended to prevent us from inserting the same
node twice.
*/
import base::*;
import codemap::span;
import std::map;
import std::map::hashmap;
import syntax::attr;
export expand;
type ser_tps_map = map::hashmap<str, fn@(@ast::expr) -> [@ast::stmt]>;
type deser_tps_map = map::hashmap<str, fn@() -> @ast::expr>;
fn expand(cx: ext_ctxt,
span: span,
_mitem: ast::meta_item,
in_items: [@ast::item]) -> [@ast::item] {
fn not_auto_serialize(a: ast::attribute) -> bool {
attr::get_attr_name(a) != "auto_serialize"
}
fn filter_attrs(item: @ast::item) -> @ast::item {
@{attrs: vec::filter(item.attrs, not_auto_serialize)
with *item}
}
vec::flat_map(in_items) {|in_item|
alt in_item.node {
ast::item_ty(ty, tps) {
[filter_attrs(in_item)] + ty_fns(cx, in_item.ident, ty, tps)
}
ast::item_enum(variants, tps) {
[filter_attrs(in_item)] + enum_fns(cx, in_item.ident,
in_item.span, variants, tps)
}
_ {
cx.span_err(span, "#[auto_serialize] can only be \
applied to type and enum \
definitions");
[in_item]
}
}
}
}
impl helpers for ext_ctxt {
fn helper_path(base_path: @ast::path,
helper_name: str) -> @ast::path {
let head = vec::init(base_path.node.idents);
let tail = vec::last(base_path.node.idents);
self.path(base_path.span, head + [helper_name + "_" + tail])
}
fn path(span: span, strs: [str]) -> @ast::path {
@{node: {global: false, idents: strs, types: []},
span: span}
}
fn path_tps(span: span, strs: [str], tps: [@ast::ty]) -> @ast::path {
@{node: {global: false, idents: strs, types: tps},
span: span}
}
fn ty_path(span: span, strs: [str], tps: [@ast::ty]) -> @ast::ty {
@{id: self.next_id(),
node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()),
span: span}
}
fn ty_fn(span: span,
-input_tys: [@ast::ty],
-output: @ast::ty) -> @ast::ty {
let args = vec::map(input_tys) {|ty|
{mode: ast::expl(ast::by_ref),
ty: ty,
ident: "",
id: self.next_id()}
};
@{id: self.next_id(),
node: ast::ty_fn(ast::proto_any, {inputs: args,
output: output,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []}),
span: span}
}
fn ty_nil(span: span) -> @ast::ty {
@{id: self.next_id(), node: ast::ty_nil, span: span}
}
fn expr(span: span, node: ast::expr_) -> @ast::expr {
@{id: self.next_id(), node: node, span: span}
}
fn var_ref(span: span, name: str) -> @ast::expr {
self.expr(span, ast::expr_path(self.path(span, [name])))
}
fn blk(span: span, stmts: [@ast::stmt]) -> ast::blk {
{node: {view_items: [],
stmts: stmts,
expr: none,
id: self.next_id(),
rules: ast::default_blk},
span: span}
}
fn expr_blk(expr: @ast::expr) -> ast::blk {
{node: {view_items: [],
stmts: [],
expr: some(expr),
id: self.next_id(),
rules: ast::default_blk},
span: expr.span}
}
fn binder_pat(span: span, nm: str) -> @ast::pat {
let path = @{node: {global: false,
idents: [nm],
types: []},
span: span};
@{id: self.next_id(),
node: ast::pat_ident(path, none),
span: span}
}
fn stmt(expr: @ast::expr) -> @ast::stmt {
@{node: ast::stmt_semi(expr, self.next_id()),
span: expr.span}
}
fn alt_stmt(arms: [ast::arm], span: span, -v: @ast::expr) -> @ast::stmt {
self.stmt(
self.expr(
span,
ast::expr_alt(v, arms, ast::alt_exhaustive)))
}
fn lit_str(span: span, s: str) -> @ast::expr {
self.expr(
span,
ast::expr_lit(
@{node: ast::lit_str(s),
span: span}))
}
fn lit_uint(span: span, i: uint) -> @ast::expr {
self.expr(
span,
ast::expr_lit(
@{node: ast::lit_uint(i as u64, ast::ty_u),
span: span}))
}
fn lambda(blk: ast::blk) -> @ast::expr {
let ext_cx = self;
let blk_e = self.expr(blk.span, ast::expr_block(blk));
#ast{ {|| $(blk_e) } }
}
fn clone_folder() -> fold::ast_fold {
fold::make_fold({
new_id: {|_id| self.next_id()}
with *fold::default_ast_fold()
})
}
fn clone(v: @ast::expr) -> @ast::expr {
let fld = self.clone_folder();
fld.fold_expr(v)
}
fn clone_ty(v: @ast::ty) -> @ast::ty {
let fld = self.clone_folder();
fld.fold_ty(v)
}
fn clone_ty_param(v: ast::ty_param) -> ast::ty_param {
let fld = self.clone_folder();
fold::fold_ty_param(v, fld)
}
fn at(span: span, expr: @ast::expr) -> @ast::expr {
fn repl_sp(old_span: span, repl_span: span, with_span: span) -> span {
if old_span == repl_span {
with_span
} else {
old_span
}
}
let fld = fold::make_fold({
new_span: repl_sp(_, ast_util::dummy_sp(), span)
with *fold::default_ast_fold()
});
fld.fold_expr(expr)
}
}
fn ser_path(cx: ext_ctxt, tps: ser_tps_map, path: @ast::path,
-s: @ast::expr, -v: @ast::expr)
-> [@ast::stmt] {
let ext_cx = cx; // required for #ast{}
// We want to take a path like a::b::c<...> and generate a call
// like a::b::c::serialize(s, ...), as described above.
let callee =
cx.expr(
path.span,
ast::expr_path(
cx.helper_path(path, "serialize")));
let ty_args = vec::map(path.node.types) {|ty|
let sv_stmts = ser_ty(cx, tps, ty, cx.clone(s), #ast{ __v });
let sv = cx.expr(path.span,
ast::expr_block(cx.blk(path.span, sv_stmts)));
cx.at(ty.span, #ast{ {|__v| $(sv)} })
};
[cx.stmt(
cx.expr(
path.span,
ast::expr_call(callee, [s, v] + ty_args, false)))]
}
fn ser_variant(cx: ext_ctxt,
tps: ser_tps_map,
tys: [@ast::ty],
span: span,
-s: @ast::expr,
pfn: fn([@ast::pat]) -> ast::pat_,
bodyfn: fn(-@ast::expr, ast::blk) -> @ast::expr,
argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr)
-> ast::arm {
let vnames = vec::from_fn(vec::len(tys)) {|i| #fmt["__v%u", i]};
let pats = vec::from_fn(vec::len(tys)) {|i|
cx.binder_pat(tys[i].span, vnames[i])
};
let pat: @ast::pat = @{id: cx.next_id(), node: pfn(pats), span: span};
let stmts = vec::from_fn(vec::len(tys)) {|i|
let v = cx.var_ref(span, vnames[i]);
let arg_blk =
cx.blk(
span,
ser_ty(cx, tps, tys[i], cx.clone(s), v));
cx.stmt(argfn(cx.clone(s), i, arg_blk))
};
let body_blk = cx.blk(span, stmts);
let body = cx.blk(span, [cx.stmt(bodyfn(s, body_blk))]);
{pats: [pat], guard: none, body: body}
}
fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
-s: @ast::expr, -v: @ast::expr) -> @ast::expr {
cx.lambda(cx.blk(ty.span, ser_ty(cx, tps, ty, s, v)))
}
fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
ty: @ast::ty, -s: @ast::expr, -v: @ast::expr)
-> [@ast::stmt] {
let ext_cx = cx; // required for #ast{}
alt ty.node {
ast::ty_nil {
[#ast[stmt]{$(s).emit_nil()}]
}
ast::ty_bot {
cx.span_err(
ty.span, #fmt["Cannot serialize bottom type"]);
[]
}
ast::ty_box(mt) {
let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
[#ast(stmt){$(s).emit_box($(l));}]
}
ast::ty_uniq(mt) {
let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
[#ast(stmt){$(s).emit_uniq($(l));}]
}
ast::ty_ptr(_) | ast::ty_rptr(_, _) {
cx.span_err(
ty.span, #fmt["Cannot serialize pointer types"]);
[]
}
ast::ty_rec(flds) {
let fld_stmts = vec::from_fn(vec::len(flds)) {|fidx|
let fld = flds[fidx];
let vf = cx.expr(fld.span,
ast::expr_field(cx.clone(v),
fld.node.ident,
[]));
let s = cx.clone(s);
let f = cx.lit_str(fld.span, fld.node.ident);
let i = cx.lit_uint(fld.span, fidx);
let l = ser_lambda(cx, tps, fld.node.mt.ty, cx.clone(s), vf);
#ast(stmt){$(s).emit_rec_field($(f), $(i), $(l));}
};
let fld_lambda = cx.lambda(cx.blk(ty.span, fld_stmts));
[#ast(stmt){$(s).emit_rec($(fld_lambda));}]
}
ast::ty_fn(_, _) {
cx.span_err(
ty.span, #fmt["Cannot serialize function types"]);
[]
}
ast::ty_tup(tys) {
// Generate code like
//
// alt v {
// (v1, v2, v3) {
// .. serialize v1, v2, v3 ..
// }
// };
let arms = [
ser_variant(
cx, tps, tys, ty.span, s,
// Generate pattern (v1, v2, v3)
{|pats| ast::pat_tup(pats)},
// Generate body s.emit_tup(3, {|| blk })
{|-s, blk|
let sz = cx.lit_uint(ty.span, vec::len(tys));
let body = cx.lambda(blk);
#ast{ $(s).emit_tup($(sz), $(body)) }
},
// Generate s.emit_tup_elt(i, {|| blk })
{|-s, i, blk|
let idx = cx.lit_uint(ty.span, i);
let body = cx.lambda(blk);
#ast{ $(s).emit_tup_elt($(idx), $(body)) }
})
];
[cx.alt_stmt(arms, ty.span, v)]
}
ast::ty_path(path, _) {
if vec::len(path.node.idents) == 1u &&
vec::is_empty(path.node.types) {
let ident = path.node.idents[0];
alt tps.find(ident) {
some(f) { f(v) }
none { ser_path(cx, tps, path, s, v) }
}
} else {
ser_path(cx, tps, path, s, v)
}
}
ast::ty_constr(ty, _) {
ser_ty(cx, tps, ty, s, v)
}
ast::ty_mac(_) {
cx.span_err(
ty.span, #fmt["Cannot serialize macro types"]);
[]
}
ast::ty_infer {
cx.span_err(
ty.span, #fmt["Cannot serialize inferred types"]);
[]
}
ast::ty_vec(mt) {
let ser_e =
cx.expr(
ty.span,
ast::expr_block(
cx.blk(
ty.span,
ser_ty(
cx, tps, mt.ty,
cx.clone(s),
cx.at(ty.span, #ast{ __e })))));
[#ast(stmt){
std::serialization::emit_from_vec($(s), $(v), {|__e| $(ser_e) })
}]
}
}
}
fn mk_ser_fn(cx: ext_ctxt, span: span, name: str, tps: [ast::ty_param],
f: fn(ext_ctxt, ser_tps_map,
-@ast::expr, -@ast::expr) -> [@ast::stmt])
-> @ast::item {
let ext_cx = cx; // required for #ast
let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
let v_ty = cx.ty_path(span, [name], tp_types);
let tp_inputs =
vec::map(tps, {|tp|
{mode: ast::expl(ast::by_ref),
ty: cx.ty_fn(span,
[cx.ty_path(span, [tp.ident], [])],
cx.ty_nil(span)),
ident: "__s" + tp.ident,
id: cx.next_id()}});
#debug["tp_inputs = %?", tp_inputs];
let ser_inputs: [ast::arg] =
[{mode: ast::expl(ast::by_ref),
ty: cx.ty_path(span, ["__S"], []),
ident: "__s",
id: cx.next_id()},
{mode: ast::expl(ast::by_ref),
ty: v_ty,
ident: "__v",
id: cx.next_id()}]
+ tp_inputs;
let tps_map = map::str_hash();
vec::iter2(tps, tp_inputs) {|tp, arg|
let arg_ident = arg.ident;
tps_map.insert(
tp.ident,
fn@(v: @ast::expr) -> [@ast::stmt] {
let f = cx.var_ref(span, arg_ident);
#debug["serializing type arg %s", arg_ident];
[#ast(stmt){$(f)($(v));}]
});
}
let ser_bnds = @[
ast::bound_iface(cx.ty_path(span,
["std", "serialization", "serializer"],
[]))];
let ser_tps: [ast::ty_param] =
[{ident: "__S",
id: cx.next_id(),
bounds: ser_bnds}] +
vec::map(tps) {|tp| cx.clone_ty_param(tp) };
let ser_output: @ast::ty = @{id: cx.next_id(),
node: ast::ty_nil,
span: span};
let ser_blk = cx.blk(span,
f(cx, tps_map, #ast{ __s }, #ast{ __v }));
@{ident: "serialize_" + name,
attrs: [],
id: cx.next_id(),
node: ast::item_fn({inputs: ser_inputs,
output: ser_output,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []},
ser_tps,
ser_blk),
span: span}
}
// ______________________________________________________________________
fn deser_path(cx: ext_ctxt, tps: deser_tps_map, path: @ast::path,
-d: @ast::expr) -> @ast::expr {
// We want to take a path like a::b::c<...> and generate a call
// like a::b::c::deserialize(d, ...), as described above.
let callee =
cx.expr(
path.span,
ast::expr_path(
cx.helper_path(path, "deserialize")));
let ty_args = vec::map(path.node.types) {|ty|
let dv_expr = deser_ty(cx, tps, ty, cx.clone(d));
cx.lambda(cx.expr_blk(dv_expr))
};
cx.expr(path.span, ast::expr_call(callee, [d] + ty_args, false))
}
fn deser_lambda(cx: ext_ctxt, tps: deser_tps_map, ty: @ast::ty,
-d: @ast::expr) -> @ast::expr {
cx.lambda(cx.expr_blk(deser_ty(cx, tps, ty, d)))
}
fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
ty: @ast::ty, -d: @ast::expr) -> @ast::expr {
let ext_cx = cx; // required for #ast{}
alt ty.node {
ast::ty_nil {
#ast{ $(d).read_nil() }
}
ast::ty_bot {
#ast{ fail }
}
ast::ty_box(mt) {
let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
#ast{ @$(d).read_box($(l)) }
}
ast::ty_uniq(mt) {
let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
#ast{ ~$(d).read_uniq($(l)) }
}
ast::ty_ptr(_) | ast::ty_rptr(_, _) {
#ast{ fail }
}
ast::ty_rec(flds) {
let fields = vec::from_fn(vec::len(flds)) {|fidx|
let fld = flds[fidx];
let d = cx.clone(d);
let f = cx.lit_str(fld.span, fld.node.ident);
let i = cx.lit_uint(fld.span, fidx);
let l = deser_lambda(cx, tps, fld.node.mt.ty, cx.clone(d));
{node: {mutbl: fld.node.mt.mutbl,
ident: fld.node.ident,
expr: #ast{ $(d).read_rec_field($(f), $(i), $(l))} },
span: fld.span}
};
let fld_expr = cx.expr(ty.span, ast::expr_rec(fields, none));
let fld_lambda = cx.lambda(cx.expr_blk(fld_expr));
#ast{ $(d).read_rec($(fld_lambda)) }
}
ast::ty_fn(_, _) {
#ast{ fail }
}
ast::ty_tup(tys) {
// Generate code like
//
// d.read_tup(3u) {||
// (d.read_tup_elt(0u, {||...}),
// d.read_tup_elt(1u, {||...}),
// d.read_tup_elt(2u, {||...}))
// }
let arg_exprs = vec::from_fn(vec::len(tys)) {|i|
let idx = cx.lit_uint(ty.span, i);
let body = deser_lambda(cx, tps, tys[i], cx.clone(d));
#ast{ $(d).read_tup_elt($(idx), $(body)) }
};
let body =
cx.lambda(cx.expr_blk(
cx.expr(ty.span, ast::expr_tup(arg_exprs))));
let sz = cx.lit_uint(ty.span, vec::len(tys));
#ast{ $(d).read_tup($(sz), $(body)) }
}
ast::ty_path(path, _) {
if vec::len(path.node.idents) == 1u &&
vec::is_empty(path.node.types) {
let ident = path.node.idents[0];
alt tps.find(ident) {
some(f) { f() }
none { deser_path(cx, tps, path, d) }
}
} else {
deser_path(cx, tps, path, d)
}
}
ast::ty_constr(ty, constrs) {
deser_ty(cx, tps, ty, d)
}
ast::ty_mac(_) {
#ast{ fail }
}
ast::ty_infer {
#ast{ fail }
}
ast::ty_vec(mt) {
let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
#ast{ std::serialization::read_to_vec($(d), $(l)) }
}
}
}
fn mk_deser_fn(cx: ext_ctxt, span: span, name: str, tps: [ast::ty_param],
f: fn(ext_ctxt, deser_tps_map, -@ast::expr) -> @ast::expr)
-> @ast::item {
let ext_cx = cx; // required for #ast
let tp_types = vec::map(tps, {|tp| cx.ty_path(span, [tp.ident], [])});
let v_ty = cx.ty_path(span, [name], tp_types);
let tp_inputs =
vec::map(tps, {|tp|
{mode: ast::expl(ast::by_ref),
ty: cx.ty_fn(span,
[],
cx.ty_path(span, [tp.ident], [])),
ident: "__d" + tp.ident,
id: cx.next_id()}});
#debug["tp_inputs = %?", tp_inputs];
let deser_inputs: [ast::arg] =
[{mode: ast::expl(ast::by_ref),
ty: cx.ty_path(span, ["__D"], []),
ident: "__d",
id: cx.next_id()}]
+ tp_inputs;
let tps_map = map::str_hash();
vec::iter2(tps, tp_inputs) {|tp, arg|
let arg_ident = arg.ident;
tps_map.insert(
tp.ident,
fn@() -> @ast::expr {
let f = cx.var_ref(span, arg_ident);
#ast{ $(f)() }
});
}
let deser_bnds = @[
ast::bound_iface(cx.ty_path(span,
["std", "serialization", "deserializer"],
[]))];
let deser_tps: [ast::ty_param] =
[{ident: "__D",
id: cx.next_id(),
bounds: deser_bnds}] + vec::map(tps) {|tp|
let cloned = cx.clone_ty_param(tp);
{bounds: @(*cloned.bounds + [ast::bound_copy]) with cloned}
};
let deser_blk = cx.expr_blk(f(cx, tps_map, #ast(expr){__d}));
@{ident: "deserialize_" + name,
attrs: [],
id: cx.next_id(),
node: ast::item_fn({inputs: deser_inputs,
output: v_ty,
purity: ast::impure_fn,
cf: ast::return_val,
constraints: []},
deser_tps,
deser_blk),
span: span}
}
fn ty_fns(cx: ext_ctxt, name: str, ty: @ast::ty, tps: [ast::ty_param])
-> [@ast::item] {
let span = ty.span;
[
mk_ser_fn(cx, span, name, tps, ser_ty(_, _, ty, _, _)),
mk_deser_fn(cx, span, name, tps, deser_ty(_, _, ty, _))
]
}
fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: str,
e_span: span, variants: [ast::variant],
-s: @ast::expr, -v: @ast::expr) -> [@ast::stmt] {
let ext_cx = cx;
let arms = vec::from_fn(vec::len(variants)) {|vidx|
let variant = variants[vidx];
let v_span = variant.span;
let v_name = variant.node.name;
let variant_tys = vec::map(variant.node.args) {|a| a.ty };
ser_variant(
cx, tps, variant_tys, v_span, cx.clone(s),
// Generate pattern var(v1, v2, v3)
{|pats|
if vec::is_empty(pats) {
ast::pat_ident(cx.path(v_span, [v_name]), none)
} else {
ast::pat_enum(cx.path(v_span, [v_name]), pats)
}
},
// Generate body s.emit_enum_variant("foo", 0u,
// 3u, {|| blk })
{|-s, blk|
let v_name = cx.lit_str(v_span, v_name);
let v_id = cx.lit_uint(v_span, vidx);
let sz = cx.lit_uint(v_span, vec::len(variant_tys));
let body = cx.lambda(blk);
#ast[expr]{
$(s).emit_enum_variant($(v_name), $(v_id),
$(sz), $(body))
}
},
// Generate s.emit_enum_variant_arg(i, {|| blk })
{|-s, i, blk|
let idx = cx.lit_uint(v_span, i);
let body = cx.lambda(blk);
#ast[expr]{
$(s).emit_enum_variant_arg($(idx), $(body))
}
})
};
let lam = cx.lambda(cx.blk(e_span, [cx.alt_stmt(arms, e_span, v)]));
let e_name = cx.lit_str(e_span, e_name);
[#ast(stmt){ $(s).emit_enum($(e_name), $(lam)) }]
}
fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: str,
e_span: span, variants: [ast::variant],
-d: @ast::expr) -> @ast::expr {
let ext_cx = cx;
let arms: [ast::arm] = vec::from_fn(vec::len(variants)) {|vidx|
let variant = variants[vidx];
let v_span = variant.span;
let v_name = variant.node.name;
let tys = vec::map(variant.node.args) {|a| a.ty };
let arg_exprs = vec::from_fn(vec::len(tys)) {|i|
let idx = cx.lit_uint(v_span, i);
let body = deser_lambda(cx, tps, tys[i], cx.clone(d));
#ast{ $(d).read_enum_variant_arg($(idx), $(body)) }
};
let body = {
if vec::is_empty(tys) {
// for a nullary variant v, do "v"
cx.var_ref(v_span, v_name)
} else {
// for an n-ary variant v, do "v(a_1, ..., a_n)"
cx.expr(v_span, ast::expr_call(
cx.var_ref(v_span, v_name), arg_exprs, false))
}
};
{pats: [@{id: cx.next_id(),
node: ast::pat_lit(cx.lit_uint(v_span, vidx)),
span: v_span}],
guard: none,
body: cx.expr_blk(body)}
};
// Generate code like:
let e_name = cx.lit_str(e_span, e_name);
let alt_expr = cx.expr(e_span,
ast::expr_alt(#ast{__i}, arms, ast::alt_check));
let var_lambda = #ast{ {|__i| $(alt_expr)} };
let read_var = #ast{ $(cx.clone(d)).read_enum_variant($(var_lambda)) };
let read_lambda = cx.lambda(cx.expr_blk(read_var));
#ast{ $(d).read_enum($(e_name), $(read_lambda)) }
}
fn enum_fns(cx: ext_ctxt, e_name: str, e_span: span,
variants: [ast::variant], tps: [ast::ty_param])
-> [@ast::item] {
[
mk_ser_fn(cx, e_span, e_name, tps,
ser_enum(_, _, e_name, e_span, variants, _, _)),
mk_deser_fn(cx, e_span, e_name, tps,
deser_enum(_, _, e_name, e_span, variants, _))
]
}

View File

@@ -1,178 +0,0 @@
import std::map::hashmap;
import syntax::parse::parser;
import driver::diagnostic::span_handler;
import codemap::{codemap, span, expn_info, expanded_from};
import std::map::str_hash;
type syntax_expander_ =
fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> @ast::expr;
type syntax_expander = {
expander: syntax_expander_,
span: option<span>};
type macro_def = {ident: str, ext: syntax_extension};
type macro_definer =
fn@(ext_ctxt, span, ast::mac_arg, ast::mac_body) -> macro_def;
type item_decorator =
fn@(ext_ctxt, span, ast::meta_item, [@ast::item]) -> [@ast::item];
enum syntax_extension {
normal(syntax_expander),
macro_defining(macro_definer),
item_decorator(item_decorator),
}
// A temporary hard-coded map of methods for expanding syntax extension
// AST nodes into full ASTs
fn syntax_expander_table() -> hashmap<str, syntax_extension> {
fn builtin(f: syntax_expander_) -> syntax_extension
{normal({expander: f, span: none})}
let syntax_expanders = str_hash::<syntax_extension>();
syntax_expanders.insert("fmt", builtin(ext::fmt::expand_syntax_ext));
syntax_expanders.insert("auto_serialize",
item_decorator(ext::auto_serialize::expand));
syntax_expanders.insert("env", builtin(ext::env::expand_syntax_ext));
syntax_expanders.insert("macro",
macro_defining(ext::simplext::add_new_extension));
syntax_expanders.insert("concat_idents",
builtin(ext::concat_idents::expand_syntax_ext));
syntax_expanders.insert("ident_to_str",
builtin(ext::ident_to_str::expand_syntax_ext));
syntax_expanders.insert("log_syntax",
builtin(ext::log_syntax::expand_syntax_ext));
syntax_expanders.insert("ast",
builtin(ext::qquote::expand_ast));
ret syntax_expanders;
}
iface ext_ctxt {
fn codemap() -> codemap;
fn parse_sess() -> parser::parse_sess;
fn cfg() -> ast::crate_cfg;
fn print_backtrace();
fn backtrace() -> expn_info;
fn bt_push(ei: codemap::expn_info_);
fn bt_pop();
fn span_fatal(sp: span, msg: str) -> !;
fn span_err(sp: span, msg: str);
fn span_unimpl(sp: span, msg: str) -> !;
fn span_bug(sp: span, msg: str) -> !;
fn bug(msg: str) -> !;
fn next_id() -> ast::node_id;
}
fn mk_ctxt(parse_sess: parser::parse_sess,
cfg: ast::crate_cfg) -> ext_ctxt {
type ctxt_repr = {parse_sess: parser::parse_sess,
cfg: ast::crate_cfg,
mut backtrace: expn_info};
impl of ext_ctxt for ctxt_repr {
fn codemap() -> codemap { self.parse_sess.cm }
fn parse_sess() -> parser::parse_sess { self.parse_sess }
fn cfg() -> ast::crate_cfg { self.cfg }
fn print_backtrace() { }
fn backtrace() -> expn_info { self.backtrace }
fn bt_push(ei: codemap::expn_info_) {
alt ei {
expanded_from({call_site: cs, callie: callie}) {
self.backtrace =
some(@expanded_from({
call_site: {lo: cs.lo, hi: cs.hi,
expn_info: self.backtrace},
callie: callie}));
}
}
}
fn bt_pop() {
alt self.backtrace {
some(@expanded_from({call_site: {expn_info: prev, _}, _})) {
self.backtrace = prev
}
_ { self.bug("tried to pop without a push"); }
}
}
fn span_fatal(sp: span, msg: str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_fatal(sp, msg);
}
fn span_err(sp: span, msg: str) {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_err(sp, msg);
}
fn span_unimpl(sp: span, msg: str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_unimpl(sp, msg);
}
fn span_bug(sp: span, msg: str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.span_bug(sp, msg);
}
fn bug(msg: str) -> ! {
self.print_backtrace();
self.parse_sess.span_diagnostic.handler().bug(msg);
}
fn next_id() -> ast::node_id {
ret parser::next_node_id(self.parse_sess);
}
}
let imp : ctxt_repr = {
parse_sess: parse_sess,
cfg: cfg,
mut backtrace: none
};
ret imp as ext_ctxt
}
fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: str) -> str {
alt expr.node {
ast::expr_lit(l) {
alt l.node {
ast::lit_str(s) { ret s; }
_ { cx.span_fatal(l.span, error); }
}
}
_ { cx.span_fatal(expr.span, error); }
}
}
fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: str) -> ast::ident {
alt expr.node {
ast::expr_path(p) {
if vec::len(p.node.types) > 0u || vec::len(p.node.idents) != 1u {
cx.span_fatal(expr.span, error);
} else { ret p.node.idents[0]; }
}
_ { cx.span_fatal(expr.span, error); }
}
}
fn make_new_lit(cx: ext_ctxt, sp: codemap::span, lit: ast::lit_) ->
@ast::expr {
let sp_lit = @{node: lit, span: sp};
ret @{id: cx.next_id(), node: ast::expr_lit(sp_lit), span: sp};
}
fn get_mac_arg(cx: ext_ctxt, sp: span, arg: ast::mac_arg) -> @ast::expr {
alt (arg) {
some(expr) {expr}
none {cx.span_fatal(sp, "missing macro args")}
}
}
fn get_mac_body(cx: ext_ctxt, sp: span, args: ast::mac_body)
-> ast::mac_body_
{
alt (args) {
some(body) {body}
none {cx.span_fatal(sp, "missing macro body")}
}
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View File

@@ -1,81 +0,0 @@
import codemap::span;
import syntax::ext::base::ext_ctxt;
// NOTE: Moved from fmt.rs which had this fixme:
// FIXME: Cleanup the naming of these functions
fn mk_lit(cx: ext_ctxt, sp: span, lit: ast::lit_) -> @ast::expr {
let sp_lit = @{node: lit, span: sp};
ret @{id: cx.next_id(), node: ast::expr_lit(sp_lit), span: sp};
}
fn mk_str(cx: ext_ctxt, sp: span, s: str) -> @ast::expr {
let lit = ast::lit_str(s);
ret mk_lit(cx, sp, lit);
}
fn mk_int(cx: ext_ctxt, sp: span, i: int) -> @ast::expr {
let lit = ast::lit_int(i as i64, ast::ty_i);
ret mk_lit(cx, sp, lit);
}
fn mk_uint(cx: ext_ctxt, sp: span, u: uint) -> @ast::expr {
let lit = ast::lit_uint(u as u64, ast::ty_u);
ret mk_lit(cx, sp, lit);
}
fn mk_binary(cx: ext_ctxt, sp: span, op: ast::binop,
lhs: @ast::expr, rhs: @ast::expr)
-> @ast::expr {
let binexpr = ast::expr_binary(op, lhs, rhs);
ret @{id: cx.next_id(), node: binexpr, span: sp};
}
fn mk_unary(cx: ext_ctxt, sp: span, op: ast::unop, e: @ast::expr)
-> @ast::expr {
let expr = ast::expr_unary(op, e);
ret @{id: cx.next_id(), node: expr, span: sp};
}
fn mk_path(cx: ext_ctxt, sp: span, idents: [ast::ident]) ->
@ast::expr {
let path = {global: false, idents: idents, types: []};
let sp_path = @{node: path, span: sp};
let pathexpr = ast::expr_path(sp_path);
ret @{id: cx.next_id(), node: pathexpr, span: sp};
}
fn mk_access_(cx: ext_ctxt, sp: span, p: @ast::expr, m: ast::ident)
-> @ast::expr {
let expr = ast::expr_field(p, m, []);
ret @{id: cx.next_id(), node: expr, span: sp};
}
fn mk_access(cx: ext_ctxt, sp: span, p: [ast::ident], m: ast::ident)
-> @ast::expr {
let pathexpr = mk_path(cx, sp, p);
ret mk_access_(cx, sp, pathexpr, m);
}
fn mk_call_(cx: ext_ctxt, sp: span, fn_expr: @ast::expr,
args: [@ast::expr]) -> @ast::expr {
let callexpr = ast::expr_call(fn_expr, args, false);
ret @{id: cx.next_id(), node: callexpr, span: sp};
}
fn mk_call(cx: ext_ctxt, sp: span, fn_path: [ast::ident],
args: [@ast::expr]) -> @ast::expr {
let pathexpr = mk_path(cx, sp, fn_path);
ret mk_call_(cx, sp, pathexpr, args);
}
// e = expr, t = type
fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]) ->
@ast::expr {
let vecexpr = ast::expr_vec(exprs, ast::m_imm);
ret @{id: cx.next_id(), node: vecexpr, span: sp};
}
fn mk_rec_e(cx: ext_ctxt, sp: span,
fields: [{ident: ast::ident, ex: @ast::expr}]) ->
@ast::expr {
let mut astfields: [ast::field] = [];
for field: {ident: ast::ident, ex: @ast::expr} in fields {
let ident = field.ident;
let val = field.ex;
let astfield =
{node: {mutbl: ast::m_imm, ident: ident, expr: val}, span: sp};
astfields += [astfield];
}
let recexpr = ast::expr_rec(astfields, option::none::<@ast::expr>);
ret @{id: cx.next_id(), node: recexpr, span: sp};
}

View File

@@ -1,24 +0,0 @@
import base::*;
import syntax::ast;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let arg = get_mac_arg(cx,sp,arg);
let args: [@ast::expr] =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
cx.span_fatal(sp, "#concat_idents requires a vector argument .")
}
};
let mut res: ast::ident = "";
for e: @ast::expr in args {
res += expr_to_ident(cx, e, "expected an ident");
}
ret @{id: cx.next_id(),
node: ast::expr_path(@{node: {global: false, idents: [res],
types: []},
span: sp}),
span: sp};
}

View File

@@ -1,44 +0,0 @@
/*
* The compiler code necessary to support the #env extension. Eventually this
* should all get sucked into either the compiler syntax extension plugin
* interface.
*/
import base::*;
export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let arg = get_mac_arg(cx,sp,arg);
let args: [@ast::expr] =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
cx.span_fatal(sp, "#env requires arguments of the form `[...]`.")
}
};
if vec::len::<@ast::expr>(args) != 1u {
cx.span_fatal(sp, "malformed #env call");
}
// FIXME: if this was more thorough it would manufacture an
// option<str> rather than just an maybe-empty string.
let var = expr_to_str(cx, args[0], "#env requires a string");
alt os::getenv(var) {
option::none { ret make_new_str(cx, sp, ""); }
option::some(s) { ret make_new_str(cx, sp, s); }
}
}
fn make_new_str(cx: ext_ctxt, sp: codemap::span, s: str) -> @ast::expr {
ret make_new_lit(cx, sp, ast::lit_str(s));
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View File

@@ -1,152 +0,0 @@
import driver::session;
import driver::session::session;
import std::map::hashmap;
import syntax::attr;
import syntax::ast::{crate, expr_, expr_mac, mac_invoc};
import syntax::fold::*;
import syntax::ext::base::*;
import syntax::ext::qquote::{qq_helper};
import syntax::parse::parser;
import syntax::parse::parser::parse_expr_from_source_str;
import codemap::{span, expanded_from};
fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
e: expr_, s: span, fld: ast_fold,
orig: fn@(expr_, span, ast_fold) -> (expr_, span))
-> (expr_, span)
{
ret alt e {
expr_mac(mac) {
alt mac.node {
mac_invoc(pth, args, body) {
assert (vec::len(pth.node.idents) > 0u);
let extname = pth.node.idents[0];
alt exts.find(extname) {
none {
cx.span_fatal(pth.span,
#fmt["macro undefined: '%s'", extname])
}
some(item_decorator(_)) {
cx.span_fatal(
pth.span,
#fmt["%s can only be used as a decorator", extname]);
}
some(normal({expander: exp, span: exp_sp})) {
let expanded = exp(cx, pth.span, args, body);
let info = {call_site: s,
callie: {name: extname, span: exp_sp}};
cx.bt_push(expanded_from(info));
//keep going, outside-in
let fully_expanded = fld.fold_expr(expanded).node;
cx.bt_pop();
(fully_expanded, s)
}
some(macro_defining(ext)) {
let named_extension = ext(cx, pth.span, args, body);
exts.insert(named_extension.ident, named_extension.ext);
(ast::expr_rec([], none), s)
}
}
}
_ { cx.span_bug(mac.span, "naked syntactic bit") }
}
}
_ { orig(e, s, fld) }
};
}
fn expand_mod_items(exts: hashmap<str, syntax_extension>, cx: ext_ctxt,
module: ast::_mod, fld: ast_fold,
orig: fn@(ast::_mod, ast_fold) -> ast::_mod)
-> ast::_mod
{
// Fold the contents first:
let module = orig(module, fld);
// For each item, look through the attributes. If any of them are
// decorated with "item decorators", then use that function to transform
// the item into a new set of items.
let new_items = vec::flat_map(module.items) {|item|
vec::foldr(item.attrs, [item]) {|attr, items|
let mname = alt attr.node.value.node {
ast::meta_word(n) { n }
ast::meta_name_value(n, _) { n }
ast::meta_list(n, _) { n }
};
alt exts.find(mname) {
none { items }
some(normal(_)) | some(macro_defining(_)) {
cx.span_err(
attr.span,
#fmt["%s cannot be used as a decorator", mname]);
items
}
some(item_decorator(dec_fn)) {
dec_fn(cx, attr.span, attr.node.value, items)
}
}
}
};
ret {items: new_items with module};
}
fn new_span(cx: ext_ctxt, sp: span) -> span {
/* this discards information in the case of macro-defining macros */
ret {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
}
// FIXME: this is a terrible kludge to inject some macros into the default
// compilation environment. When the macro-definition system is substantially
// more mature, these should move from here, into a compiled part of libcore
// at very least.
fn core_macros() -> str {
ret
"{
#macro([#error[f, ...], log(core::error, #fmt[f, ...])]);
#macro([#warn[f, ...], log(core::warn, #fmt[f, ...])]);
#macro([#info[f, ...], log(core::info, #fmt[f, ...])]);
#macro([#debug[f, ...], log(core::debug, #fmt[f, ...])]);
}";
}
fn expand_crate(parse_sess: parser::parse_sess,
cfg: ast::crate_cfg, c: @crate) -> @crate {
let exts = syntax_expander_table();
let afp = default_ast_fold();
let cx: ext_ctxt = mk_ctxt(parse_sess, cfg);
let f_pre =
{fold_expr: bind expand_expr(exts, cx, _, _, _, afp.fold_expr),
fold_mod: bind expand_mod_items(exts, cx, _, _, afp.fold_mod),
new_span: bind new_span(cx, _)
with *afp};
let f = make_fold(f_pre);
let cm = parse_expr_from_source_str("<core-macros>",
@core_macros(),
cfg,
parse_sess);
// This is run for its side-effects on the expander env,
// as it registers all the core macros as expanders.
f.fold_expr(cm);
let res = @f.fold_crate(*c);
ret res;
}
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:

View File

@@ -1,293 +0,0 @@
/*
* The compiler code necessary to support the #fmt extension. Eventually this
* should all get sucked into either the standard library extfmt module or the
* compiler syntax extension plugin interface.
*/
import extfmt::ct::*;
import base::*;
import codemap::span;
import syntax::ext::build::*;
export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let arg = get_mac_arg(cx,sp,arg);
let args: [@ast::expr] =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
cx.span_fatal(sp, "#fmt requires arguments of the form `[...]`.")
}
};
if vec::len::<@ast::expr>(args) == 0u {
cx.span_fatal(sp, "#fmt requires a format string");
}
let fmt =
expr_to_str(cx, args[0],
"first argument to #fmt must be a string literal.");
let fmtspan = args[0].span;
#debug("Format string:");
log(debug, fmt);
fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: str) -> ! {
cx.span_fatal(sp, msg);
}
let parse_fmt_err = bind parse_fmt_err_(cx, fmtspan, _);
let pieces = parse_fmt_string(fmt, parse_fmt_err);
ret pieces_to_expr(cx, sp, pieces, args);
}
// FIXME: A lot of these functions for producing expressions can probably
// be factored out in common with other code that builds expressions.
// FIXME: Cleanup the naming of these functions
// NOTE: Moved many of the common ones to build.rs --kevina
fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
-> @ast::expr {
fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> [ast::ident] {
ret ["extfmt", "rt", ident];
}
fn make_rt_path_expr(cx: ext_ctxt, sp: span, ident: str) -> @ast::expr {
let path = make_path_vec(cx, ident);
ret mk_path(cx, sp, path);
}
// Produces an AST expression that represents a RT::conv record,
// which tells the RT::conv* functions how to perform the conversion
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr {
let mut flagexprs: [@ast::expr] = [];
for f: flag in flags {
let mut fstr;
alt f {
flag_left_justify { fstr = "flag_left_justify"; }
flag_left_zero_pad { fstr = "flag_left_zero_pad"; }
flag_space_for_sign { fstr = "flag_space_for_sign"; }
flag_sign_always { fstr = "flag_sign_always"; }
flag_alternate { fstr = "flag_alternate"; }
}
flagexprs += [make_rt_path_expr(cx, sp, fstr)];
}
ret mk_vec_e(cx, sp, flagexprs);
}
fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
alt cnt {
count_implied {
ret make_rt_path_expr(cx, sp, "count_implied");
}
count_is(c) {
let count_lit = mk_int(cx, sp, c);
let count_is_path = make_path_vec(cx, "count_is");
let count_is_args = [count_lit];
ret mk_call(cx, sp, count_is_path, count_is_args);
}
_ { cx.span_unimpl(sp, "unimplemented #fmt conversion"); }
}
}
fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr {
let mut rt_type;
alt t {
ty_hex(c) {
alt c {
case_upper { rt_type = "ty_hex_upper"; }
case_lower { rt_type = "ty_hex_lower"; }
}
}
ty_bits { rt_type = "ty_bits"; }
ty_octal { rt_type = "ty_octal"; }
_ { rt_type = "ty_default"; }
}
ret make_rt_path_expr(cx, sp, rt_type);
}
fn make_conv_rec(cx: ext_ctxt, sp: span, flags_expr: @ast::expr,
width_expr: @ast::expr, precision_expr: @ast::expr,
ty_expr: @ast::expr) -> @ast::expr {
ret mk_rec_e(cx, sp,
[{ident: "flags", ex: flags_expr},
{ident: "width", ex: width_expr},
{ident: "precision", ex: precision_expr},
{ident: "ty", ex: ty_expr}]);
}
let rt_conv_flags = make_flags(cx, sp, cnv.flags);
let rt_conv_width = make_count(cx, sp, cnv.width);
let rt_conv_precision = make_count(cx, sp, cnv.precision);
let rt_conv_ty = make_ty(cx, sp, cnv.ty);
ret make_conv_rec(cx, sp, rt_conv_flags, rt_conv_width,
rt_conv_precision, rt_conv_ty);
}
fn make_conv_call(cx: ext_ctxt, sp: span, conv_type: str, cnv: conv,
arg: @ast::expr) -> @ast::expr {
let fname = "conv_" + conv_type;
let path = make_path_vec(cx, fname);
let cnv_expr = make_rt_conv_expr(cx, sp, cnv);
let args = [cnv_expr, arg];
ret mk_call(cx, arg.span, path, args);
}
fn make_new_conv(cx: ext_ctxt, sp: span, cnv: conv, arg: @ast::expr) ->
@ast::expr {
// FIXME: Extract all this validation into extfmt::ct
fn is_signed_type(cnv: conv) -> bool {
alt cnv.ty {
ty_int(s) {
alt s { signed { ret true; } unsigned { ret false; } }
}
ty_float { ret true; }
_ { ret false; }
}
}
let unsupported = "conversion not supported in #fmt string";
alt cnv.param {
option::none { }
_ { cx.span_unimpl(sp, unsupported); }
}
for f: flag in cnv.flags {
alt f {
flag_left_justify { }
flag_sign_always {
if !is_signed_type(cnv) {
cx.span_fatal(sp,
"+ flag only valid in " +
"signed #fmt conversion");
}
}
flag_space_for_sign {
if !is_signed_type(cnv) {
cx.span_fatal(sp,
"space flag only valid in " +
"signed #fmt conversions");
}
}
flag_left_zero_pad { }
_ { cx.span_unimpl(sp, unsupported); }
}
}
alt cnv.width {
count_implied { }
count_is(_) { }
_ { cx.span_unimpl(sp, unsupported); }
}
alt cnv.precision {
count_implied { }
count_is(_) { }
_ { cx.span_unimpl(sp, unsupported); }
}
alt cnv.ty {
ty_str { ret make_conv_call(cx, arg.span, "str", cnv, arg); }
ty_int(sign) {
alt sign {
signed { ret make_conv_call(cx, arg.span, "int", cnv, arg); }
unsigned {
ret make_conv_call(cx, arg.span, "uint", cnv, arg);
}
}
}
ty_bool { ret make_conv_call(cx, arg.span, "bool", cnv, arg); }
ty_char { ret make_conv_call(cx, arg.span, "char", cnv, arg); }
ty_hex(_) { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
ty_bits { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
ty_octal { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
ty_float { ret make_conv_call(cx, arg.span, "float", cnv, arg); }
ty_poly { ret make_conv_call(cx, arg.span, "poly", cnv, arg); }
_ { cx.span_unimpl(sp, unsupported); }
}
}
fn log_conv(c: conv) {
alt c.param {
some(p) { log(debug, "param: " + int::to_str(p, 10u)); }
_ { #debug("param: none"); }
}
for f: flag in c.flags {
alt f {
flag_left_justify { #debug("flag: left justify"); }
flag_left_zero_pad { #debug("flag: left zero pad"); }
flag_space_for_sign { #debug("flag: left space pad"); }
flag_sign_always { #debug("flag: sign always"); }
flag_alternate { #debug("flag: alternate"); }
}
}
alt c.width {
count_is(i) { log(debug,
"width: count is " + int::to_str(i, 10u)); }
count_is_param(i) {
log(debug,
"width: count is param " + int::to_str(i, 10u));
}
count_is_next_param { #debug("width: count is next param"); }
count_implied { #debug("width: count is implied"); }
}
alt c.precision {
count_is(i) { log(debug,
"prec: count is " + int::to_str(i, 10u)); }
count_is_param(i) {
log(debug,
"prec: count is param " + int::to_str(i, 10u));
}
count_is_next_param { #debug("prec: count is next param"); }
count_implied { #debug("prec: count is implied"); }
}
alt c.ty {
ty_bool { #debug("type: bool"); }
ty_str { #debug("type: str"); }
ty_char { #debug("type: char"); }
ty_int(s) {
alt s {
signed { #debug("type: signed"); }
unsigned { #debug("type: unsigned"); }
}
}
ty_bits { #debug("type: bits"); }
ty_hex(cs) {
alt cs {
case_upper { #debug("type: uhex"); }
case_lower { #debug("type: lhex"); }
}
}
ty_octal { #debug("type: octal"); }
ty_float { #debug("type: float"); }
ty_poly { #debug("type: poly"); }
}
}
let fmt_sp = args[0].span;
let mut n = 0u;
let mut tmp_expr = mk_str(cx, sp, "");
let nargs = vec::len::<@ast::expr>(args);
for pc: piece in pieces {
alt pc {
piece_string(s) {
let s_expr = mk_str(cx, fmt_sp, s);
tmp_expr = mk_binary(cx, fmt_sp, ast::add, tmp_expr, s_expr);
}
piece_conv(conv) {
n += 1u;
if n >= nargs {
cx.span_fatal(sp,
"not enough arguments to #fmt " +
"for the given format string");
}
#debug("Building conversion:");
log_conv(conv);
let arg_expr = args[n];
let c_expr = make_new_conv(cx, fmt_sp, conv, arg_expr);
tmp_expr = mk_binary(cx, fmt_sp, ast::add, tmp_expr, c_expr);
}
}
}
let expected_nargs = n + 1u; // n conversions + the fmt string
if expected_nargs < nargs {
cx.span_fatal
(sp, #fmt["too many arguments to #fmt. found %u, expected %u",
nargs, expected_nargs]);
}
ret tmp_expr;
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//

View File

@@ -1,22 +0,0 @@
import base::*;
import syntax::ast;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let arg = get_mac_arg(cx,sp,arg);
let args: [@ast::expr] =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
cx.span_fatal(sp, "#ident_to_str requires a vector argument .")
}
};
if vec::len::<@ast::expr>(args) != 1u {
cx.span_fatal(sp, "malformed #ident_to_str call");
}
ret make_new_lit(cx, sp,
ast::lit_str(expr_to_ident(cx, args[0u],
"expected an ident")));
}

View File

@@ -1,13 +0,0 @@
import base::*;
import syntax::ast;
import io::writer_util;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {
let arg = get_mac_arg(cx,sp,arg);
cx.print_backtrace();
io::stdout().write_line(print::pprust::expr_to_str(arg));
//trivial expression
ret @{id: cx.next_id(), node: ast::expr_rec([], option::none), span: sp};
}

View File

@@ -1,346 +0,0 @@
import syntax::ast::{crate, expr_, mac_invoc,
mac_aq, mac_var};
import syntax::fold::*;
import syntax::visit::*;
import syntax::ext::base::*;
import syntax::ext::build::*;
import syntax::parse::parser;
import syntax::parse::parser::{parser, parse_from_source_str};
import syntax::print::*;
import io::*;
import codemap::span;
type aq_ctxt = @{lo: uint,
mut gather: [{lo: uint, hi: uint,
e: @ast::expr,
constr: str}]};
enum fragment {
from_expr(@ast::expr),
from_ty(@ast::ty)
}
iface qq_helper {
fn span() -> span;
fn visit(aq_ctxt, vt<aq_ctxt>);
fn extract_mac() -> option<ast::mac_>;
fn mk_parse_fn(ext_ctxt,span) -> @ast::expr;
fn get_fold_fn() -> str;
}
impl of qq_helper for @ast::crate {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_crate"])
}
fn get_fold_fn() -> str {"fold_crate"}
}
impl of qq_helper for @ast::expr {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {
alt (self.node) {
ast::expr_mac({node: mac, _}) {some(mac)}
_ {none}
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ["syntax", "parse", "parser", "parse_expr"])
}
fn get_fold_fn() -> str {"fold_expr"}
}
impl of qq_helper for @ast::ty {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {
alt (self.node) {
ast::ty_mac({node: mac, _}) {some(mac)}
_ {none}
}
}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_ty"])
}
fn get_fold_fn() -> str {"fold_ty"}
}
impl of qq_helper for @ast::item {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_item"])
}
fn get_fold_fn() -> str {"fold_item"}
}
impl of qq_helper for @ast::stmt {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ["syntax", "ext", "qquote", "parse_stmt"])
}
fn get_fold_fn() -> str {"fold_stmt"}
}
impl of qq_helper for @ast::pat {
fn span() -> span {self.span}
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
fn extract_mac() -> option<ast::mac_> {fail}
fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
mk_path(cx, sp, ["syntax", "parse", "parser", "parse_pat"])
}
fn get_fold_fn() -> str {"fold_pat"}
}
fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt
{
let v = @{visit_expr: visit_aq_expr,
visit_ty: visit_aq_ty
with *default_visitor()};
let cx = @{lo:lo, mut gather: []};
node.visit(cx, mk_vt(v));
// FIXME: Maybe this is an overkill (merge_sort), it might be better
// to just keep the gather array in sorted order ...
cx.gather = std::sort::merge_sort({|a,b| a.lo < b.lo}, copy cx.gather);
ret cx;
}
fn visit_aq<T:qq_helper>(node: T, constr: str, &&cx: aq_ctxt, v: vt<aq_ctxt>)
{
alt (node.extract_mac()) {
some(mac_aq(sp, e)) {
cx.gather += [{lo: sp.lo - cx.lo, hi: sp.hi - cx.lo,
e: e, constr: constr}];
}
_ {node.visit(cx, v);}
}
}
// FIXME: these are only here because I (kevina) couldn't figure out how to
// get bind to work in gather_anti_quotes
fn visit_aq_expr(node: @ast::expr, &&cx: aq_ctxt, v: vt<aq_ctxt>) {
visit_aq(node,"from_expr",cx,v);
}
fn visit_aq_ty(node: @ast::ty, &&cx: aq_ctxt, v: vt<aq_ctxt>) {
visit_aq(node,"from_ty",cx,v);
}
fn is_space(c: char) -> bool {
syntax::parse::lexer::is_whitespace(c)
}
fn expand_ast(ecx: ext_ctxt, _sp: span,
arg: ast::mac_arg, body: ast::mac_body)
-> @ast::expr
{
let mut what = "expr";
option::may(arg) {|arg|
let args: [@ast::expr] =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
ecx.span_fatal
(_sp, "#ast requires arguments of the form `[...]`.")
}
};
if vec::len::<@ast::expr>(args) != 1u {
ecx.span_fatal(_sp, "#ast requires exactly one arg");
}
alt (args[0].node) {
ast::expr_path(@{node: {idents: id, _},_}) if vec::len(id) == 1u
{what = id[0]}
_ {ecx.span_fatal(args[0].span, "expected an identifier");}
}
}
let body = get_mac_body(ecx,_sp,body);
ret alt what {
"crate" {finish(ecx, body, parse_crate)}
"expr" {finish(ecx, body, parser::parse_expr)}
"ty" {finish(ecx, body, parse_ty)}
"item" {finish(ecx, body, parse_item)}
"stmt" {finish(ecx, body, parse_stmt)}
"pat" {finish(ecx, body, parser::parse_pat)}
_ {ecx.span_fatal(_sp, "unsupported ast type")}
};
}
fn parse_crate(p: parser) -> @ast::crate {
parser::parse_crate_mod(p, [])
}
fn parse_ty(p: parser) -> @ast::ty {
parser::parse_ty(p, false)
}
fn parse_stmt(p: parser) -> @ast::stmt {
parser::parse_stmt(p, [])
}
fn parse_item(p: parser) -> @ast::item {
alt (parser::parse_item(p, [])) {
some(item) {item}
none {fail; /* FIXME: Error message, somehow */}
}
}
fn finish<T: qq_helper>
(ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: parser) -> T)
-> @ast::expr
{
let cm = ecx.codemap();
let str = @codemap::span_to_snippet(body.span, cm);
#debug["qquote--str==%?", str];
let fname = codemap::mk_substr_filename(cm, body.span);
let node = parse_from_source_str
(f, fname, codemap::fss_internal(body.span), str,
ecx.cfg(), ecx.parse_sess());
let loc = codemap::lookup_char_pos(cm, body.span.lo);
let sp = node.span();
let qcx = gather_anti_quotes(sp.lo, node);
let cx = qcx;
uint::range(1u, vec::len(cx.gather)) {|i|
assert cx.gather[i-1u].lo < cx.gather[i].lo;
// ^^ check that the vector is sorted
assert cx.gather[i-1u].hi <= cx.gather[i].lo;
// ^^ check that the spans are non-overlapping
}
let mut str2 = "";
enum state {active, skip(uint), blank};
let mut state = active;
let mut i = 0u, j = 0u;
let g_len = vec::len(cx.gather);
str::chars_iter(*str) {|ch|
if (j < g_len && i == cx.gather[j].lo) {
assert ch == '$';
let repl = #fmt("$%u ", j);
state = skip(str::char_len(repl));
str2 += repl;
}
alt state {
active {str::push_char(str2, ch);}
skip(1u) {state = blank;}
skip(sk) {state = skip (sk-1u);}
blank if is_space(ch) {str::push_char(str2, ch);}
blank {str::push_char(str2, ' ');}
}
i += 1u;
if (j < g_len && i == cx.gather[j].hi) {
assert ch == ')';
state = active;
j += 1u;
}
}
let cx = ecx;
let cfg_call = {||
mk_call_(cx, sp, mk_access(cx, sp, ["ext_cx"], "cfg"), [])
};
let parse_sess_call = {||
mk_call_(cx, sp, mk_access(cx, sp, ["ext_cx"], "parse_sess"), [])
};
let pcall = mk_call(cx,sp,
["syntax", "parse", "parser",
"parse_from_source_str"],
[node.mk_parse_fn(cx,sp),
mk_str(cx,sp, fname),
mk_call(cx,sp,
["syntax","ext","qquote", "mk_file_substr"],
[mk_str(cx,sp, loc.file.name),
mk_uint(cx,sp, loc.line),
mk_uint(cx,sp, loc.col)]),
mk_unary(cx,sp, ast::box(ast::m_imm),
mk_str(cx,sp, str2)),
cfg_call(),
parse_sess_call()]
);
let mut rcall = pcall;
if (g_len > 0u) {
rcall = mk_call(cx,sp,
["syntax", "ext", "qquote", "replace"],
[pcall,
mk_vec_e(cx,sp, vec::map(copy qcx.gather) {|g|
mk_call(cx,sp,
["syntax", "ext", "qquote", g.constr],
[g.e])}),
mk_path(cx,sp,
["syntax", "ext", "qquote",
node.get_fold_fn()])]);
}
ret rcall;
}
fn replace<T>(node: T, repls: [fragment], ff: fn (ast_fold, T) -> T)
-> T
{
let aft = default_ast_fold();
let f_pre = {fold_expr: bind replace_expr(repls, _, _, _,
aft.fold_expr),
fold_ty: bind replace_ty(repls, _, _, _,
aft.fold_ty)
with *aft};
ret ff(make_fold(f_pre), node);
}
fn fold_crate(f: ast_fold, &&n: @ast::crate) -> @ast::crate {
@f.fold_crate(*n)
}
fn fold_expr(f: ast_fold, &&n: @ast::expr) -> @ast::expr {f.fold_expr(n)}
fn fold_ty(f: ast_fold, &&n: @ast::ty) -> @ast::ty {f.fold_ty(n)}
fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item {f.fold_item(n)}
fn fold_stmt(f: ast_fold, &&n: @ast::stmt) -> @ast::stmt {f.fold_stmt(n)}
fn fold_pat(f: ast_fold, &&n: @ast::pat) -> @ast::pat {f.fold_pat(n)}
fn replace_expr(repls: [fragment],
e: ast::expr_, s: span, fld: ast_fold,
orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-> (ast::expr_, span)
{
alt e {
ast::expr_mac({node: mac_var(i), _}) {
alt (repls[i]) {
from_expr(r) {(r.node, r.span)}
_ {fail /* fixme error message */}}}
_ {orig(e,s,fld)}
}
}
fn replace_ty(repls: [fragment],
e: ast::ty_, s: span, fld: ast_fold,
orig: fn@(ast::ty_, span, ast_fold)->(ast::ty_, span))
-> (ast::ty_, span)
{
alt e {
ast::ty_mac({node: mac_var(i), _}) {
alt (repls[i]) {
from_ty(r) {(r.node, r.span)}
_ {fail /* fixme error message */}}}
_ {orig(e,s,fld)}
}
}
fn print_expr(expr: @ast::expr) {
let stdout = io::stdout();
let pp = pprust::rust_printer(stdout);
pprust::print_expr(pp, expr);
pp::eof(pp.s);
stdout.write_str("\n");
}
fn mk_file_substr(fname: str, line: uint, col: uint) -> codemap::file_substr {
codemap::fss_external({filename: fname, line: line, col: col})
}
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:

View File

@@ -1,778 +0,0 @@
use std;
import codemap::span;
import std::map::{hashmap, str_hash};
import base::*;
import fold::*;
import ast_util::respan;
import ast::{ident, path, ty, blk_, expr, path_, expr_path,
expr_vec, expr_mac, mac_invoc, node_id};
export add_new_extension;
fn path_to_ident(pth: @path) -> option<ident> {
if vec::len(pth.node.idents) == 1u && vec::len(pth.node.types) == 0u {
ret some(pth.node.idents[0u]);
}
ret none;
}
//a vec of binders might be a little big.
type clause = {params: binders, body: @expr};
/* logically, an arb_depth should contain only one kind of matchable */
enum arb_depth<T> { leaf(T), seq(@[arb_depth<T>], span), }
enum matchable {
match_expr(@expr),
match_path(@path),
match_ident(ast::spanned<ident>),
match_ty(@ty),
match_block(ast::blk),
match_exact, /* don't bind anything, just verify the AST traversal */
}
/* for when given an incompatible bit of AST */
fn match_error(cx: ext_ctxt, m: matchable, expected: str) -> ! {
alt m {
match_expr(x) {
cx.span_fatal(x.span,
"this argument is an expr, expected " + expected);
}
match_path(x) {
cx.span_fatal(x.span,
"this argument is a path, expected " + expected);
}
match_ident(x) {
cx.span_fatal(x.span,
"this argument is an ident, expected " + expected);
}
match_ty(x) {
cx.span_fatal(x.span,
"this argument is a type, expected " + expected);
}
match_block(x) {
cx.span_fatal(x.span,
"this argument is a block, expected " + expected);
}
match_exact { cx.bug("what is a match_exact doing in a bindings?"); }
}
}
// We can't make all the matchables in a match_result the same type because
// idents can be paths, which can be exprs.
// If we want better match failure error messages (like in Fortifying Syntax),
// we'll want to return something indicating amount of progress and location
// of failure instead of `none`.
type match_result = option<arb_depth<matchable>>;
type selector = fn@(matchable) -> match_result;
fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
{pre: [@expr], rep: option<@expr>, post: [@expr]} {
let mut idx: uint = 0u;
let mut res = none;
for elt: @expr in elts {
alt elt.node {
expr_mac(m) {
alt m.node {
ast::mac_ellipsis {
if res != none {
cx.span_fatal(m.span, "only one ellipsis allowed");
}
res =
some({pre: vec::slice(elts, 0u, idx - 1u),
rep: some(elts[idx - 1u]),
post: vec::slice(elts, idx + 1u, vec::len(elts))});
}
_ { }
}
}
_ { }
}
idx += 1u;
}
ret alt res {
some(val) { val }
none { {pre: elts, rep: none, post: []} }
}
}
fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) ->
option<[U]> {
let mut res = [];
for elem: T in v {
alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
}
ret some(res);
}
fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
alt ad {
leaf(x) { ret f(x); }
seq(ads, span) {
alt option_flatten_map(bind a_d_map(_, f), *ads) {
none { ret none; }
some(ts) { ret some(seq(@ts, span)); }
}
}
}
}
fn compose_sels(s1: selector, s2: selector) -> selector {
fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
ret alt s1(m) {
none { none }
some(matches) { a_d_map(matches, s2) }
}
}
ret bind scomp(s1, s2, _);
}
type binders =
{real_binders: hashmap<ident, selector>,
mut literal_ast_matchers: [selector]};
type bindings = hashmap<ident, arb_depth<matchable>>;
fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { }
/* these three functions are the big moving parts */
/* create the selectors needed to bind and verify the pattern */
fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders {
let res: binders =
{real_binders: str_hash::<selector>(),
mut literal_ast_matchers: []};
//this oughta return binders instead, but macro args are a sequence of
//expressions, rather than a single expression
fn trivial_selector(m: matchable) -> match_result { ret some(leaf(m)); }
p_t_s_rec(cx, match_expr(e), trivial_selector, res);
ret res;
}
/* use the selectors on the actual arguments to the macro to extract
bindings. Most of the work is done in p_t_s, which generates the
selectors. */
fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
let res = str_hash::<arb_depth<matchable>>();
//need to do this first, to check vec lengths.
for sel: selector in b.literal_ast_matchers {
alt sel(match_expr(e)) { none { ret none; } _ { } }
}
let mut never_mind: bool = false;
b.real_binders.items {|key, val|
alt val(match_expr(e)) {
none { never_mind = true; }
some(mtc) { res.insert(key, mtc); }
}
};
//HACK: `ret` doesn't work in `for each`
if never_mind { ret none; }
ret some(res);
}
/* use the bindings on the body to generate the expanded code */
fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
let idx_path: @mut [uint] = @mut [];
fn new_id(_old: node_id, cx: ext_ctxt) -> node_id { ret cx.next_id(); }
fn new_span(cx: ext_ctxt, sp: span) -> span {
/* this discards information in the case of macro-defining macros */
ret {lo: sp.lo, hi: sp.hi, expn_info: cx.backtrace()};
}
let afp = default_ast_fold();
let f_pre =
{fold_ident: bind transcribe_ident(cx, b, idx_path, _, _),
fold_path: bind transcribe_path(cx, b, idx_path, _, _, _),
fold_expr:
bind transcribe_expr(cx, b, idx_path, _, _, _, afp.fold_expr),
fold_ty: bind transcribe_type(cx, b, idx_path, _, _, _, afp.fold_ty),
fold_block:
bind transcribe_block(cx, b, idx_path, _, _, _, afp.fold_block),
map_exprs: bind transcribe_exprs(cx, b, idx_path, _, _),
new_id: bind new_id(_, cx)
with *afp};
let f = make_fold(f_pre);
let result = f.fold_expr(body);
ret result;
}
/* helper: descend into a matcher */
fn follow(m: arb_depth<matchable>, idx_path: @mut [uint]) ->
arb_depth<matchable> {
let mut res: arb_depth<matchable> = m;
for idx: uint in *idx_path {
alt res {
leaf(_) { ret res;/* end of the line */ }
seq(new_ms, _) { res = new_ms[idx]; }
}
}
ret res;
}
fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
idx_path: @mut [uint]) -> option<matchable> {
alt mmaybe {
none { ret none }
some(m) {
ret alt follow(m, idx_path) {
seq(_, sp) {
cx.span_fatal(sp,
"syntax matched under ... but not " +
"used that way.")
}
leaf(m) { ret some(m) }
}
}
}
}
/* helper for transcribe_exprs: what vars from `b` occur in `e`? */
fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
let idents: hashmap<ident, ()> = str_hash::<()>();
fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings,
idents: hashmap<ident, ()>) -> ident {
if b.contains_key(i) { idents.insert(i, ()); }
ret i;
}
// using fold is a hack: we want visit, but it doesn't hit idents ) :
// solve this with macros
let f_pre =
{fold_ident: bind mark_ident(_, _, b, idents)
with *default_ast_fold()};
let f = make_fold(f_pre);
f.fold_expr(e); // ignore result
idents.keys {|x| it(x); };
}
/* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
recur: fn@(&&@expr) -> @expr, exprs: [@expr]) -> [@expr] {
alt elts_to_ell(cx, exprs) {
{pre: pre, rep: repeat_me_maybe, post: post} {
let mut res = vec::map(pre, recur);
alt repeat_me_maybe {
none { }
some(repeat_me) {
let mut repeat: option<{rep_count: uint, name: ident}> = none;
/* we need to walk over all the free vars in lockstep, except for
the leaves, which are just duplicated */
free_vars(b, repeat_me) {|fv|
let cur_pos = follow(b.get(fv), idx_path);
alt cur_pos {
leaf(_) { }
seq(ms, _) {
alt repeat {
none {
repeat = some({rep_count: vec::len(*ms), name: fv});
}
some({rep_count: old_len, name: old_name}) {
let len = vec::len(*ms);
if old_len != len {
let msg =
#fmt["'%s' occurs %u times, but ", fv, len] +
#fmt["'%s' occurs %u times", old_name,
old_len];
cx.span_fatal(repeat_me.span, msg);
}
}
}
}
}
};
alt repeat {
none {
cx.span_fatal(repeat_me.span,
"'...' surrounds an expression without any" +
" repeating syntax variables");
}
some({rep_count: rc, _}) {
/* Whew, we now know how how many times to repeat */
let mut idx: uint = 0u;
while idx < rc {
*idx_path += [idx];
res += [recur(repeat_me)]; // whew!
vec::pop(*idx_path);
idx += 1u;
}
}
}
}
}
res += vec::map(post, recur);
ret res;
}
}
}
// substitute, in a position that's required to be an ident
fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
&&i: ident, _fld: ast_fold) -> ident {
ret alt follow_for_trans(cx, b.find(i), idx_path) {
some(match_ident(a_id)) { a_id.node }
some(m) { match_error(cx, m, "an identifier") }
none { i }
}
}
fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
p: path_, s:span, _fld: ast_fold) -> (path_, span) {
// Don't substitute into qualified names.
if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { ret (p, s); }
ret alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
some(match_ident(id)) {
({global: false, idents: [id.node], types: []}, id.span)
}
some(match_path(a_pth)) { (a_pth.node, a_pth.span) }
some(m) { match_error(cx, m, "a path") }
none { (p, s) }
}
}
fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
e: ast::expr_, s: span, fld: ast_fold,
orig: fn@(ast::expr_, span, ast_fold)->(ast::expr_, span))
-> (ast::expr_, span)
{
ret alt e {
expr_path(p) {
// Don't substitute into qualified names.
if vec::len(p.node.types) > 0u || vec::len(p.node.idents) != 1u {
(e, s);
}
alt follow_for_trans(cx, b.find(p.node.idents[0]), idx_path) {
some(match_ident(id)) {
(expr_path(@respan(id.span,
{global: false,
idents: [id.node],
types: []})), id.span)
}
some(match_path(a_pth)) { (expr_path(a_pth), s) }
some(match_expr(a_exp)) { (a_exp.node, a_exp.span) }
some(m) { match_error(cx, m, "an expression") }
none { orig(e, s, fld) }
}
}
_ { orig(e, s, fld) }
}
}
fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
t: ast::ty_, s: span, fld: ast_fold,
orig: fn@(ast::ty_, span, ast_fold) -> (ast::ty_, span))
-> (ast::ty_, span)
{
ret alt t {
ast::ty_path(pth, _) {
alt path_to_ident(pth) {
some(id) {
alt follow_for_trans(cx, b.find(id), idx_path) {
some(match_ty(ty)) { (ty.node, ty.span) }
some(m) { match_error(cx, m, "a type") }
none { orig(t, s, fld) }
}
}
none { orig(t, s, fld) }
}
}
_ { orig(t, s, fld) }
}
}
/* for parsing reasons, syntax variables bound to blocks must be used like
`{v}` */
fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut [uint],
blk: blk_, s: span, fld: ast_fold,
orig: fn@(blk_, span, ast_fold) -> (blk_, span))
-> (blk_, span)
{
ret alt block_to_ident(blk) {
some(id) {
alt follow_for_trans(cx, b.find(id), idx_path) {
some(match_block(new_blk)) { (new_blk.node, new_blk.span) }
// possibly allow promotion of ident/path/expr to blocks?
some(m) {
match_error(cx, m, "a block")
}
none { orig(blk, s, fld) }
}
}
none { orig(blk, s, fld) }
}
}
/* traverse the pattern, building instructions on how to bind the actual
argument. ps accumulates instructions on navigating the tree.*/
fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
//it might be possible to traverse only exprs, not matchables
alt m {
match_expr(e) {
alt e.node {
expr_path(p_pth) { p_t_s_r_path(cx, p_pth, s, b); }
expr_vec(p_elts, _) {
alt elts_to_ell(cx, p_elts) {
{pre: pre, rep: some(repeat_me), post: post} {
p_t_s_r_length(cx, vec::len(pre) + vec::len(post), true, s,
b);
if vec::len(pre) > 0u {
p_t_s_r_actual_vector(cx, pre, true, s, b);
}
p_t_s_r_ellipses(cx, repeat_me, vec::len(pre), s, b);
if vec::len(post) > 0u {
cx.span_unimpl(e.span,
"matching after `...` not yet supported");
}
}
{pre: pre, rep: none, post: post} {
if post != [] {
cx.bug("elts_to_ell provided an invalid result");
}
p_t_s_r_length(cx, vec::len(pre), false, s, b);
p_t_s_r_actual_vector(cx, pre, false, s, b);
}
}
}
/* FIXME: handle embedded types and blocks, at least */
expr_mac(mac) {
p_t_s_r_mac(cx, mac, s, b);
}
_ {
fn select(cx: ext_ctxt, m: matchable, pat: @expr) ->
match_result {
ret alt m {
match_expr(e) {
if e == pat { some(leaf(match_exact)) } else { none }
}
_ { cx.bug("broken traversal in p_t_s_r") }
}
}
b.literal_ast_matchers += [bind select(cx, _, e)];
}
}
}
_ {
cx.bug("undocumented invariant in p_t_s_rec");
}
}
}
/* make a match more precise */
fn specialize_match(m: matchable) -> matchable {
ret alt m {
match_expr(e) {
alt e.node {
expr_path(pth) {
alt path_to_ident(pth) {
some(id) { match_ident(respan(pth.span, id)) }
none { match_path(pth) }
}
}
_ { m }
}
}
_ { m }
}
}
/* pattern_to_selectors helper functions */
fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
alt path_to_ident(p) {
some(p_id) {
fn select(cx: ext_ctxt, m: matchable) -> match_result {
ret alt m {
match_expr(e) { some(leaf(specialize_match(m))) }
_ { cx.bug("broken traversal in p_t_s_r") }
}
}
if b.real_binders.contains_key(p_id) {
cx.span_fatal(p.span, "duplicate binding identifier");
}
b.real_binders.insert(p_id, compose_sels(s, bind select(cx, _)));
}
none { }
}
}
fn block_to_ident(blk: blk_) -> option<ident> {
if vec::len(blk.stmts) != 0u { ret none; }
ret alt blk.expr {
some(expr) {
alt expr.node { expr_path(pth) { path_to_ident(pth) } _ { none } }
}
none { none }
}
}
fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
fn select_pt_1(cx: ext_ctxt, m: matchable,
fn_m: fn(ast::mac) -> match_result) -> match_result {
ret alt m {
match_expr(e) {
alt e.node { expr_mac(mac) { fn_m(mac) } _ { none } }
}
_ { cx.bug("broken traversal in p_t_s_r") }
}
}
fn no_des(cx: ext_ctxt, sp: span, syn: str) -> ! {
cx.span_fatal(sp, "destructuring " + syn + " is not yet supported");
}
alt mac.node {
ast::mac_ellipsis { cx.span_fatal(mac.span, "misused `...`"); }
ast::mac_invoc(_, _, _) { no_des(cx, mac.span, "macro calls"); }
ast::mac_embed_type(ty) {
alt ty.node {
ast::ty_path(pth, _) {
alt path_to_ident(pth) {
some(id) {
/* look for an embedded type */
fn select_pt_2(m: ast::mac) -> match_result {
ret alt m.node {
ast::mac_embed_type(t) { some(leaf(match_ty(t))) }
_ { none }
}
}
let final_step = bind select_pt_1(cx, _, select_pt_2);
b.real_binders.insert(id, compose_sels(s, final_step));
}
none { no_des(cx, pth.span, "under `#<>`"); }
}
}
_ { no_des(cx, ty.span, "under `#<>`"); }
}
}
ast::mac_embed_block(blk) {
alt block_to_ident(blk.node) {
some(id) {
fn select_pt_2(m: ast::mac) -> match_result {
ret alt m.node {
ast::mac_embed_block(blk) {
some(leaf(match_block(blk)))
}
_ { none }
}
}
let final_step = bind select_pt_1(cx, _, select_pt_2);
b.real_binders.insert(id, compose_sels(s, final_step));
}
none { no_des(cx, blk.span, "under `#{}`"); }
}
}
ast::mac_aq(_,_) { no_des(cx, mac.span, "antiquotes"); }
ast::mac_var(_) { no_des(cx, mac.span, "antiquote variables"); }
}
}
fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
b: binders) {
fn select(cx: ext_ctxt, repeat_me: @expr, offset: uint, m: matchable) ->
match_result {
ret alt m {
match_expr(e) {
alt e.node {
expr_vec(arg_elts, _) {
let mut elts = [];
let mut idx = offset;
while idx < vec::len(arg_elts) {
elts += [leaf(match_expr(arg_elts[idx]))];
idx += 1u;
}
// using repeat_me.span is a little wacky, but the
// error we want to report is one in the macro def
some(seq(@elts, repeat_me.span))
}
_ { none }
}
}
_ { cx.bug("broken traversal in p_t_s_r") }
}
}
p_t_s_rec(cx, match_expr(repeat_me),
compose_sels(s, bind select(cx, repeat_me, offset, _)), b);
}
fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
b: binders) {
fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) ->
match_result {
ret alt m {
match_expr(e) {
alt e.node {
expr_vec(arg_elts, _) {
let actual_len = vec::len(arg_elts);
if at_least && actual_len >= len || actual_len == len {
some(leaf(match_exact))
} else { none }
}
_ { none }
}
}
_ { none }
}
}
b.literal_ast_matchers +=
[compose_sels(s, bind len_select(cx, _, at_least, len))];
}
fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool,
s: selector, b: binders) {
let mut idx: uint = 0u;
while idx < vec::len(elts) {
fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result {
ret alt m {
match_expr(e) {
alt e.node {
expr_vec(arg_elts, _) {
some(leaf(match_expr(arg_elts[idx])))
}
_ { none }
}
}
_ { cx.bug("broken traversal in p_t_s_r") }
}
}
p_t_s_rec(cx, match_expr(elts[idx]),
compose_sels(s, bind select(cx, _, idx)), b);
idx += 1u;
}
}
fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body) -> base::macro_def {
let arg = get_mac_arg(cx,sp,arg);
let args: [@ast::expr] =
alt arg.node {
ast::expr_vec(elts, _) { elts }
_ {
cx.span_fatal(sp,
"#macro requires arguments of the form `[...]`.")
}
};
let mut macro_name: option<str> = none;
let mut clauses: [@clause] = [];
for arg: @expr in args {
alt arg.node {
expr_vec(elts, mutbl) {
if vec::len(elts) != 2u {
cx.span_fatal((*arg).span,
"extension clause must consist of [" +
"macro invocation, expansion body]");
}
alt elts[0u].node {
expr_mac(mac) {
alt mac.node {
mac_invoc(pth, invoc_arg, body) {
alt path_to_ident(pth) {
some(id) {
alt macro_name {
none { macro_name = some(id); }
some(other_id) {
if id != other_id {
cx.span_fatal(pth.span,
"macro name must be " +
"consistent");
}
}
}
}
none {
cx.span_fatal(pth.span,
"macro name must not be a path");
}
}
clauses +=
[@{params: pattern_to_selectors
(cx, get_mac_arg(cx,mac.span,invoc_arg)),
body: elts[1u]}];
// FIXME: check duplicates (or just simplify
// the macro arg situation)
}
_ {
cx.span_bug(mac.span, "undocumented invariant in \
add_extension");
}
}
}
_ {
cx.span_fatal(elts[0u].span,
"extension clause must" +
" start with a macro invocation.");
}
}
}
_ {
cx.span_fatal((*arg).span,
"extension must be [clause, " + " ...]");
}
}
}
let ext = bind generic_extension(_, _, _, _, clauses);
ret {ident:
alt macro_name {
some(id) { id }
none {
cx.span_fatal(sp,
"macro definition must have " +
"at least one clause")
}
},
ext: normal({expander: ext, span: some(arg.span)})};
fn generic_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
_body: ast::mac_body, clauses: [@clause]) -> @expr {
let arg = get_mac_arg(cx,sp,arg);
for c: @clause in clauses {
alt use_selectors_to_bind(c.params, arg) {
some(bindings) { ret transcribe(cx, bindings, c.body); }
none { cont; }
}
}
cx.span_fatal(sp, "no clauses match macro invocation");
}
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//