1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00

feat(unstable): refactor js lint plugin AST (#27615)

This PR changes the underlying buffer backed AST format we use for
JavaScript-based linting plugins. It adds support for various new types,
makes traversal code a lot easier and is more polished compared to
previous iterations.

Here is a quick summary (in no particular order):

- Node prop data is separate from traversal, which makes traversal code
so much easier to reason about. Previously, it was interleaved with node
prop data
- spans are in a separate table as well, as they are rarely needed.
- schema is separate from SWC conversion logic, which makes 
- supports recursive plain objects
- supports numbers
- supports bigint
- supports regex
- adds all SWC nodes

Apologies, this is kinda a big PR, but it's worth it imo.

_Marking as draft because I need to update some tests tomorrow._
This commit is contained in:
Marvin Hagemeister 2025-01-14 13:31:02 +01:00 committed by GitHub
parent 1e95c20561
commit 3fb8fc1ba7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 13757 additions and 3245 deletions

File diff suppressed because it is too large Load diff

View file

@ -744,8 +744,7 @@ export function compileSelector(selector) {
fn = matchNthChild(node, fn); fn = matchNthChild(node, fn);
break; break;
case PSEUDO_HAS: case PSEUDO_HAS:
// FIXME // TODO(@marvinhagemeister)
// fn = matchIs(part, fn);
throw new Error("TODO: :has"); throw new Error("TODO: :has");
case PSEUDO_NOT: case PSEUDO_NOT:
fn = matchNot(node.selectors, fn); fn = matchNot(node.selectors, fn);
@ -767,8 +766,7 @@ export function compileSelector(selector) {
*/ */
function matchFirstChild(next) { function matchFirstChild(next) {
return (ctx, id) => { return (ctx, id) => {
const parent = ctx.getParent(id); const first = ctx.getFirstChild(id);
const first = ctx.getFirstChild(parent);
return first === id && next(ctx, first); return first === id && next(ctx, first);
}; };
} }
@ -779,8 +777,7 @@ function matchFirstChild(next) {
*/ */
function matchLastChild(next) { function matchLastChild(next) {
return (ctx, id) => { return (ctx, id) => {
const parent = ctx.getParent(id); const last = ctx.getLastChild(id);
const last = ctx.getLastChild(parent);
return last === id && next(ctx, id); return last === id && next(ctx, id);
}; };
} }
@ -955,7 +952,9 @@ function matchElem(part, next) {
else if (part.elem === 0) return false; else if (part.elem === 0) return false;
const type = ctx.getType(id); const type = ctx.getType(id);
if (type > 0 && type === part.elem) return next(ctx, id); if (type > 0 && type === part.elem) {
return next(ctx, id);
}
return false; return false;
}; };
@ -968,7 +967,16 @@ function matchElem(part, next) {
*/ */
function matchAttrExists(attr, next) { function matchAttrExists(attr, next) {
return (ctx, id) => { return (ctx, id) => {
return ctx.hasAttrPath(id, attr.prop, 0) ? next(ctx, id) : false; try {
ctx.getAttrPathValue(id, attr.prop, 0);
return next(ctx, id);
} catch (err) {
if (err === -1) {
return false;
}
throw err;
}
}; };
} }
@ -979,9 +987,15 @@ function matchAttrExists(attr, next) {
*/ */
function matchAttrBin(attr, next) { function matchAttrBin(attr, next) {
return (ctx, id) => { return (ctx, id) => {
if (!ctx.hasAttrPath(id, attr.prop, 0)) return false; try {
const value = ctx.getAttrPathValue(id, attr.prop, 0); const value = ctx.getAttrPathValue(id, attr.prop, 0);
if (!matchAttrValue(attr, value)) return false; if (!matchAttrValue(attr, value)) return false;
} catch (err) {
if (err === -1) {
return false;
}
throw err;
}
return next(ctx, id); return next(ctx, id);
}; };
} }

View file

@ -12,6 +12,8 @@ export interface AstContext {
strTableOffset: number; strTableOffset: number;
rootOffset: number; rootOffset: number;
nodes: Map<number, NodeFacade>; nodes: Map<number, NodeFacade>;
spansOffset: number;
propsOffset: number;
strByType: number[]; strByType: number[];
strByProp: number[]; strByProp: number[];
typeByStr: Map<string, number>; typeByStr: Map<string, number>;
@ -19,6 +21,12 @@ export interface AstContext {
matcher: MatchContext; matcher: MatchContext;
} }
export interface Node {
range: Range;
}
export type Range = [number, number];
// TODO(@marvinhagemeister) Remove once we land "official" types // TODO(@marvinhagemeister) Remove once we land "official" types
export interface RuleContext { export interface RuleContext {
id: string; id: string;
@ -121,7 +129,6 @@ export interface MatchContext {
getSiblings(id: number): number[]; getSiblings(id: number): number[];
getParent(id: number): number; getParent(id: number): number;
getType(id: number): number; getType(id: number): number;
hasAttrPath(id: number, propIds: number[], idx: number): boolean;
getAttrPathValue(id: number, propIds: number[], idx: number): unknown; getAttrPathValue(id: number, propIds: number[], idx: number): unknown;
} }

View file

@ -14,9 +14,14 @@ pub enum PropFlags {
Ref, Ref,
RefArr, RefArr,
String, String,
Number,
Bool, Bool,
Null, Null,
Undefined, Undefined,
Object,
Regex,
BigInt,
Array,
} }
impl From<PropFlags> for u8 { impl From<PropFlags> for u8 {
@ -33,21 +38,29 @@ impl TryFrom<u8> for PropFlags {
0 => Ok(PropFlags::Ref), 0 => Ok(PropFlags::Ref),
1 => Ok(PropFlags::RefArr), 1 => Ok(PropFlags::RefArr),
2 => Ok(PropFlags::String), 2 => Ok(PropFlags::String),
3 => Ok(PropFlags::Bool), 3 => Ok(PropFlags::Number),
4 => Ok(PropFlags::Null), 4 => Ok(PropFlags::Bool),
5 => Ok(PropFlags::Undefined), 5 => Ok(PropFlags::Null),
6 => Ok(PropFlags::Undefined),
7 => Ok(PropFlags::Object),
8 => Ok(PropFlags::Regex),
9 => Ok(PropFlags::BigInt),
10 => Ok(PropFlags::Array),
_ => Err("Unknown Prop flag"), _ => Err("Unknown Prop flag"),
} }
} }
} }
pub type Index = u32;
const GROUP_KIND: u8 = 1;
const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000; const MASK_U32_1: u32 = 0b11111111_00000000_00000000_00000000;
const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000; const MASK_U32_2: u32 = 0b00000000_11111111_00000000_00000000;
const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000; const MASK_U32_3: u32 = 0b00000000_00000000_11111111_00000000;
const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111; const MASK_U32_4: u32 = 0b00000000_00000000_00000000_11111111;
// TODO: There is probably a native Rust function to do this. #[inline]
pub fn append_u32(result: &mut Vec<u8>, value: u32) { fn append_u32(result: &mut Vec<u8>, value: u32) {
let v1: u8 = ((value & MASK_U32_1) >> 24) as u8; let v1: u8 = ((value & MASK_U32_1) >> 24) as u8;
let v2: u8 = ((value & MASK_U32_2) >> 16) as u8; let v2: u8 = ((value & MASK_U32_2) >> 16) as u8;
let v3: u8 = ((value & MASK_U32_3) >> 8) as u8; let v3: u8 = ((value & MASK_U32_3) >> 8) as u8;
@ -59,25 +72,11 @@ pub fn append_u32(result: &mut Vec<u8>, value: u32) {
result.push(v4); result.push(v4);
} }
pub fn append_usize(result: &mut Vec<u8>, value: usize) { fn append_usize(result: &mut Vec<u8>, value: usize) {
let raw = u32::try_from(value).unwrap(); let raw = u32::try_from(value).unwrap();
append_u32(result, raw); append_u32(result, raw);
} }
pub fn write_usize(result: &mut [u8], value: usize, idx: usize) {
let raw = u32::try_from(value).unwrap();
let v1: u8 = ((raw & MASK_U32_1) >> 24) as u8;
let v2: u8 = ((raw & MASK_U32_2) >> 16) as u8;
let v3: u8 = ((raw & MASK_U32_3) >> 8) as u8;
let v4: u8 = (raw & MASK_U32_4) as u8;
result[idx] = v1;
result[idx + 1] = v2;
result[idx + 2] = v3;
result[idx + 3] = v4;
}
#[derive(Debug)] #[derive(Debug)]
pub struct StringTable { pub struct StringTable {
id: usize, id: usize,
@ -119,71 +118,47 @@ impl StringTable {
} }
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq)]
pub struct NodeRef(pub usize); pub struct NodeRef(pub Index);
/// Represents an offset to a node whose schema hasn't been committed yet
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq)]
pub struct PendingNodeRef(pub NodeRef); pub struct PendingRef(pub Index);
#[derive(Debug)] pub trait AstBufSerializer {
pub struct BoolPos(pub usize); fn serialize(&mut self) -> Vec<u8>;
#[derive(Debug)]
pub struct FieldPos(pub usize);
#[derive(Debug)]
pub struct FieldArrPos(pub usize);
#[derive(Debug)]
pub struct StrPos(pub usize);
#[derive(Debug)]
pub struct UndefPos(pub usize);
#[derive(Debug)]
pub struct NullPos(pub usize);
#[derive(Debug)]
pub enum NodePos {
Bool(BoolPos),
#[allow(dead_code)]
Field(FieldPos),
#[allow(dead_code)]
FieldArr(FieldArrPos),
Str(StrPos),
Undef(UndefPos),
#[allow(dead_code)]
Null(NullPos),
} }
pub trait AstBufSerializer<K, P> /// <type u8>
where /// <prop offset u32>
K: Into<u8> + Display, /// <child idx u32>
P: Into<u8> + Display, /// <next idx u32>
{ /// <parent idx u32>
fn header(&mut self, kind: K, parent: NodeRef, span: &Span) #[derive(Debug)]
-> PendingNodeRef; struct Node {
fn ref_field(&mut self, prop: P) -> FieldPos; kind: u8,
fn ref_vec_field(&mut self, prop: P, len: usize) -> FieldArrPos; prop_offset: u32,
fn str_field(&mut self, prop: P) -> StrPos; child: u32,
fn bool_field(&mut self, prop: P) -> BoolPos; next: u32,
fn undefined_field(&mut self, prop: P) -> UndefPos; parent: u32,
#[allow(dead_code)]
fn null_field(&mut self, prop: P) -> NullPos;
fn commit_schema(&mut self, offset: PendingNodeRef) -> NodeRef;
fn write_ref(&mut self, pos: FieldPos, value: NodeRef);
fn write_maybe_ref(&mut self, pos: FieldPos, value: Option<NodeRef>);
fn write_refs(&mut self, pos: FieldArrPos, value: Vec<NodeRef>);
fn write_str(&mut self, pos: StrPos, value: &str);
fn write_bool(&mut self, pos: BoolPos, value: bool);
fn serialize(&mut self) -> Vec<u8>;
} }
#[derive(Debug)] #[derive(Debug)]
pub struct SerializeCtx { pub struct SerializeCtx {
buf: Vec<u8>, root_idx: Index,
start_buf: NodeRef,
nodes: Vec<Node>,
prop_stack: Vec<Vec<u8>>,
field_count: Vec<usize>,
field_buf: Vec<u8>,
prev_sibling_stack: Vec<Index>,
/// Vec of spans
spans: Vec<u32>,
/// Maps string id to the actual string
str_table: StringTable, str_table: StringTable,
kind_map: Vec<usize>, /// Maps kind id to string id
prop_map: Vec<usize>, kind_name_map: Vec<usize>,
field_count: u8, /// Maps prop id to string id
prop_name_map: Vec<usize>,
} }
/// This is the internal context used to allocate and fill the buffer. The point /// This is the internal context used to allocate and fill the buffer. The point
@ -198,20 +173,24 @@ impl SerializeCtx {
let kind_size = kind_len as usize; let kind_size = kind_len as usize;
let prop_size = prop_len as usize; let prop_size = prop_len as usize;
let mut ctx = Self { let mut ctx = Self {
start_buf: NodeRef(0), spans: Vec::with_capacity(512),
buf: vec![], root_idx: 0,
nodes: Vec::with_capacity(512),
prop_stack: vec![vec![]],
prev_sibling_stack: vec![0],
field_count: vec![0],
field_buf: Vec::with_capacity(1024),
str_table: StringTable::new(), str_table: StringTable::new(),
kind_map: vec![0; kind_size], kind_name_map: vec![0; kind_size],
prop_map: vec![0; prop_size], prop_name_map: vec![0; prop_size],
field_count: 0,
}; };
let empty_str = ctx.str_table.insert(""); let empty_str = ctx.str_table.insert("");
// Placeholder node is always 0 // Placeholder node is always 0
ctx.append_node(0, NodeRef(0), &DUMMY_SP, 0); ctx.append_node(0, &DUMMY_SP);
ctx.kind_map[0] = empty_str; ctx.kind_name_map[0] = empty_str;
ctx.start_buf = NodeRef(ctx.buf.len()); ctx.kind_name_map[1] = empty_str;
// Insert default props that are always present // Insert default props that are always present
let type_str = ctx.str_table.insert("type"); let type_str = ctx.str_table.insert("type");
@ -220,258 +199,306 @@ impl SerializeCtx {
let length_str = ctx.str_table.insert("length"); let length_str = ctx.str_table.insert("length");
// These values are expected to be in this order on the JS side // These values are expected to be in this order on the JS side
ctx.prop_map[0] = empty_str; ctx.prop_name_map[0] = empty_str;
ctx.prop_map[1] = type_str; ctx.prop_name_map[1] = type_str;
ctx.prop_map[2] = parent_str; ctx.prop_name_map[2] = parent_str;
ctx.prop_map[3] = range_str; ctx.prop_name_map[3] = range_str;
ctx.prop_map[4] = length_str; ctx.prop_name_map[4] = length_str;
ctx ctx
} }
pub fn set_root_idx(&mut self, idx: Index) {
self.root_idx = idx;
}
/// Allocate a node's header /// Allocate a node's header
fn field_header<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize fn field_header<P>(&mut self, prop: P, prop_flags: PropFlags)
where where
P: Into<u8> + Display + Clone, P: Into<u8> + Display + Clone,
{ {
self.field_count += 1; let flags: u8 = prop_flags.into();
let offset = self.buf.len();
let n: u8 = prop.clone().into(); let n: u8 = prop.clone().into();
self.buf.push(n);
if let Some(v) = self.prop_map.get::<usize>(n.into()) { if let Some(v) = self.prop_name_map.get::<usize>(n.into()) {
if *v == 0 { if *v == 0 {
let id = self.str_table.insert(&format!("{prop}")); let id = self.str_table.insert(&format!("{prop}"));
self.prop_map[n as usize] = id; self.prop_name_map[n as usize] = id;
} }
} }
let flags: u8 = prop_flags.into(); // Increment field counter
self.buf.push(flags); let idx = self.field_count.len() - 1;
let count = self.field_count[idx];
self.field_count[idx] = count + 1;
offset let buf = self.prop_stack.last_mut().unwrap();
buf.push(n);
buf.push(flags);
} }
/// Allocate a property pointing to another node. fn get_node(&mut self, id: Index) -> &mut Node {
fn field<P>(&mut self, prop: P, prop_flags: PropFlags) -> usize self.nodes.get_mut(id as usize).unwrap()
}
fn set_parent(&mut self, child_id: Index, parent_id: Index) {
let child = self.get_node(child_id);
child.parent = parent_id;
}
fn set_child(&mut self, parent_id: Index, child_id: Index) {
let parent = self.get_node(parent_id);
parent.child = child_id;
}
fn set_next(&mut self, node_id: Index, next_id: Index) {
let node = self.get_node(node_id);
node.next = next_id;
}
fn update_ref_links(&mut self, parent_id: Index, child_id: Index) {
let last_idx = self.prev_sibling_stack.len() - 1;
let parent = self.get_node(parent_id);
if parent.child == 0 {
parent.child = child_id;
} else {
let prev_id = self.prev_sibling_stack[last_idx];
self.set_next(prev_id, child_id);
}
self.prev_sibling_stack[last_idx] = child_id;
self.set_parent(child_id, parent_id);
}
pub fn append_node<K>(&mut self, kind: K, span: &Span) -> PendingRef
where where
P: Into<u8> + Display + Clone, K: Into<u8> + Display + Clone,
{ {
let offset = self.field_header(prop, prop_flags); self.append_inner(kind, span.lo.0, span.hi.0)
append_usize(&mut self.buf, 0);
offset
} }
fn append_node( pub fn append_inner<K>(
&mut self, &mut self,
kind: u8, kind: K,
parent: NodeRef, span_lo: u32,
span: &Span, span_hi: u32,
prop_count: usize, ) -> PendingRef
) -> PendingNodeRef {
let offset = self.buf.len();
// Node type fits in a u8
self.buf.push(kind);
// Offset to the parent node. Will be 0 if none exists
append_usize(&mut self.buf, parent.0);
// Span, the start and end location of this node
append_u32(&mut self.buf, span.lo.0);
append_u32(&mut self.buf, span.hi.0);
// No node has more than <10 properties
debug_assert!(prop_count < 10);
self.buf.push(prop_count as u8);
PendingNodeRef(NodeRef(offset))
}
pub fn commit_schema(&mut self, node_ref: PendingNodeRef) -> NodeRef {
let mut offset = node_ref.0 .0;
// type + parentId + span lo + span hi
offset += 1 + 4 + 4 + 4;
self.buf[offset] = self.field_count;
self.field_count = 0;
node_ref.0
}
/// Allocate the node header. It's always the same for every node.
/// <type u8>
/// <parent offset u32>
/// <span lo u32>
/// <span high u32>
/// <property count u8> (There is no node with more than 10 properties)
pub fn header<N>(
&mut self,
kind: N,
parent: NodeRef,
span: &Span,
) -> PendingNodeRef
where where
N: Into<u8> + Display + Clone, K: Into<u8> + Display + Clone,
{ {
let n: u8 = kind.clone().into(); let kind_u8: u8 = kind.clone().into();
if let Some(v) = self.kind_map.get::<usize>(n.into()) { let id: Index = self.nodes.len() as u32;
self.nodes.push(Node {
kind: kind_u8,
prop_offset: 0,
child: 0,
next: 0,
parent: 0,
});
if let Some(v) = self.kind_name_map.get::<usize>(kind_u8.into()) {
if *v == 0 { if *v == 0 {
let id = self.str_table.insert(&format!("{kind}")); let s_id = self.str_table.insert(&format!("{kind}"));
self.kind_map[n as usize] = id; self.kind_name_map[kind_u8 as usize] = s_id;
} }
} }
// Prop count will be filled with the actual value when the self.field_count.push(0);
// schema is committed. self.prop_stack.push(vec![]);
self.append_node(n, parent, span, 0) self.prev_sibling_stack.push(0);
// write spans
self.spans.push(span_lo);
self.spans.push(span_hi);
PendingRef(id)
} }
/// Allocate a reference property that will hold the offset of pub fn commit_node(&mut self, id: PendingRef) -> NodeRef {
/// another node. let mut buf = self.prop_stack.pop().unwrap();
pub fn ref_field<P>(&mut self, prop: P) -> usize let count = self.field_count.pop().unwrap();
let offset = self.field_buf.len();
// All nodes have <10 fields
self.field_buf.push(count as u8);
self.field_buf.append(&mut buf);
let node = self.nodes.get_mut(id.0 as usize).unwrap();
node.prop_offset = offset as u32;
self.prev_sibling_stack.pop();
NodeRef(id.0)
}
// Allocate an object field
pub fn open_obj(&mut self) {
self.field_count.push(0);
self.prop_stack.push(vec![]);
}
pub fn commit_obj<P>(&mut self, prop: P)
where where
P: Into<u8> + Display + Clone, P: Into<u8> + Display + Clone,
{ {
self.field(prop, PropFlags::Ref) let mut buf = self.prop_stack.pop().unwrap();
let count = self.field_count.pop().unwrap();
let offset = self.field_buf.len();
append_usize(&mut self.field_buf, count);
self.field_buf.append(&mut buf);
self.field_header(prop, PropFlags::Object);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, offset);
} }
/// Allocate a property that is a vec of node offsets pointing to other /// Allocate an null field
/// nodes. pub fn write_null<P>(&mut self, prop: P)
pub fn ref_vec_field<P>(&mut self, prop: P, len: usize) -> usize
where where
P: Into<u8> + Display + Clone, P: Into<u8> + Display + Clone,
{ {
let offset = self.field(prop, PropFlags::RefArr); self.field_header(prop, PropFlags::Null);
for _ in 0..len { let buf = self.prop_stack.last_mut().unwrap();
append_u32(&mut self.buf, 0); append_u32(buf, 0);
}
offset
} }
// Allocate a property representing a string. Strings are deduplicated /// Allocate an null field
// in the message and the property will only contain the string id. pub fn write_undefined<P>(&mut self, prop: P)
pub fn str_field<P>(&mut self, prop: P) -> usize
where where
P: Into<u8> + Display + Clone, P: Into<u8> + Display + Clone,
{ {
self.field(prop, PropFlags::String) self.field_header(prop, PropFlags::Undefined);
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, 0);
} }
/// Allocate a bool field /// Allocate a number field
pub fn bool_field<P>(&mut self, prop: P) -> usize pub fn write_num<P>(&mut self, prop: P, value: &str)
where where
P: Into<u8> + Display + Clone, P: Into<u8> + Display + Clone,
{ {
let offset = self.field_header(prop, PropFlags::Bool); self.field_header(prop, PropFlags::Number);
self.buf.push(0);
offset let id = self.str_table.insert(value);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, id);
} }
/// Allocate an undefined field /// Allocate a bigint field
pub fn undefined_field<P>(&mut self, prop: P) -> usize pub fn write_bigint<P>(&mut self, prop: P, value: &str)
where where
P: Into<u8> + Display + Clone, P: Into<u8> + Display + Clone,
{ {
self.field_header(prop, PropFlags::Undefined) self.field_header(prop, PropFlags::BigInt);
let id = self.str_table.insert(value);
let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, id);
} }
/// Allocate an undefined field /// Allocate a RegExp field
#[allow(dead_code)] pub fn write_regex<P>(&mut self, prop: P, value: &str)
pub fn null_field<P>(&mut self, prop: P) -> usize
where where
P: Into<u8> + Display + Clone, P: Into<u8> + Display + Clone,
{ {
self.field_header(prop, PropFlags::Null) self.field_header(prop, PropFlags::Regex);
}
/// Replace the placeholder of a reference field with the actual offset let id = self.str_table.insert(value);
/// to the node we want to point to. let buf = self.prop_stack.last_mut().unwrap();
pub fn write_ref(&mut self, field_offset: usize, value: NodeRef) { append_usize(buf, id);
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
panic!("Trying to write a ref into a non-ref field")
}
}
write_usize(&mut self.buf, value.0, field_offset + 2);
}
/// Helper for writing optional node offsets
pub fn write_maybe_ref(
&mut self,
field_offset: usize,
value: Option<NodeRef>,
) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Ref {
panic!("Trying to write a ref into a non-ref field")
}
}
let ref_value = if let Some(v) = value { v } else { NodeRef(0) };
write_usize(&mut self.buf, ref_value.0, field_offset + 2);
}
/// Write a vec of node offsets into the property. The necessary space
/// has been reserved earlier.
pub fn write_refs(&mut self, field_offset: usize, value: Vec<NodeRef>) {
#[cfg(debug_assertions)]
{
let value_kind = self.buf[field_offset + 1];
if PropFlags::try_from(value_kind).unwrap() != PropFlags::RefArr {
panic!("Trying to write a ref into a non-ref array field")
}
}
let mut offset = field_offset + 2;
write_usize(&mut self.buf, value.len(), offset);
offset += 4;
for item in value {
write_usize(&mut self.buf, item.0, offset);
offset += 4;
}
} }
/// Store the string in our string table and save the id of the string /// Store the string in our string table and save the id of the string
/// in the current field. /// in the current field.
pub fn write_str(&mut self, field_offset: usize, value: &str) { pub fn write_str<P>(&mut self, prop: P, value: &str)
#[cfg(debug_assertions)] where
{ P: Into<u8> + Display + Clone,
let value_kind = self.buf[field_offset + 1]; {
if PropFlags::try_from(value_kind).unwrap() != PropFlags::String { self.field_header(prop, PropFlags::String);
panic!("Trying to write a ref into a non-string field")
}
}
let id = self.str_table.insert(value); let id = self.str_table.insert(value);
write_usize(&mut self.buf, id, field_offset + 2); let buf = self.prop_stack.last_mut().unwrap();
append_usize(buf, id);
} }
/// Write a bool to a field. /// Write a bool to a field.
pub fn write_bool(&mut self, field_offset: usize, value: bool) { pub fn write_bool<P>(&mut self, prop: P, value: bool)
#[cfg(debug_assertions)] where
{ P: Into<u8> + Display + Clone,
let value_kind = self.buf[field_offset + 1]; {
if PropFlags::try_from(value_kind).unwrap() != PropFlags::Bool { self.field_header(prop, PropFlags::Bool);
panic!("Trying to write a ref into a non-bool field")
}
}
self.buf[field_offset + 2] = if value { 1 } else { 0 }; let n = if value { 1 } else { 0 };
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, n);
}
/// Replace the placeholder of a reference field with the actual offset
/// to the node we want to point to.
pub fn write_ref<P>(&mut self, prop: P, parent: &PendingRef, value: NodeRef)
where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::Ref);
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, value.0);
if parent.0 > 0 {
self.update_ref_links(parent.0, value.0);
}
}
/// Helper for writing optional node offsets
pub fn write_maybe_ref<P>(
&mut self,
prop: P,
parent: &PendingRef,
value: Option<NodeRef>,
) where
P: Into<u8> + Display + Clone,
{
if let Some(v) = value {
self.write_ref(prop, parent, v);
} else {
self.write_null(prop);
};
}
/// Write a vec of node offsets into the property. The necessary space
/// has been reserved earlier.
pub fn write_ref_vec<P>(
&mut self,
prop: P,
parent_ref: &PendingRef,
value: Vec<NodeRef>,
) where
P: Into<u8> + Display + Clone,
{
self.field_header(prop, PropFlags::RefArr);
let group_id = self.append_node(GROUP_KIND, &DUMMY_SP);
let group_id = self.commit_node(group_id).0;
let buf = self.prop_stack.last_mut().unwrap();
append_u32(buf, group_id);
self.update_ref_links(parent_ref.0, group_id);
let mut prev_id = 0;
for (i, item) in value.iter().enumerate() {
self.set_parent(item.0, group_id);
if i == 0 {
self.set_child(group_id, item.0);
} else {
self.set_next(prev_id, item.0);
}
prev_id = item.0;
}
} }
/// Serialize all information we have into a buffer that can be sent to JS. /// Serialize all information we have into a buffer that can be sent to JS.
@ -481,6 +508,8 @@ impl SerializeCtx {
/// <string table> /// <string table>
/// <node kind map> <- node kind id maps to string id /// <node kind map> <- node kind id maps to string id
/// <node prop map> <- node property id maps to string id /// <node prop map> <- node property id maps to string id
/// <spans> <- List of spans, rarely needed
/// <offset spans>
/// <offset kind map> /// <offset kind map>
/// <offset prop map> /// <offset prop map>
/// <offset str table> /// <offset str table>
@ -490,7 +519,13 @@ impl SerializeCtx {
// The buffer starts with the serialized AST first, because that // The buffer starts with the serialized AST first, because that
// contains absolute offsets. By butting this at the start of the // contains absolute offsets. By butting this at the start of the
// message we don't have to waste time updating any offsets. // message we don't have to waste time updating any offsets.
buf.append(&mut self.buf); for node in &self.nodes {
buf.push(node.kind);
append_u32(&mut buf, node.prop_offset);
append_u32(&mut buf, node.child);
append_u32(&mut buf, node.next);
append_u32(&mut buf, node.parent);
}
// Next follows the string table. We'll keep track of the offset // Next follows the string table. We'll keep track of the offset
// in the message of where the string table begins // in the message of where the string table begins
@ -507,8 +542,8 @@ impl SerializeCtx {
// Write the total number of entries in the kind -> str mapping table // Write the total number of entries in the kind -> str mapping table
// TODO: make this a u8 // TODO: make this a u8
append_usize(&mut buf, self.kind_map.len()); append_usize(&mut buf, self.kind_name_map.len());
for v in &self.kind_map { for v in &self.kind_name_map {
append_usize(&mut buf, *v); append_usize(&mut buf, *v);
} }
@ -517,19 +552,35 @@ impl SerializeCtx {
// as u8. // as u8.
let offset_prop_map = buf.len(); let offset_prop_map = buf.len();
// Write the total number of entries in the kind -> str mapping table // Write the total number of entries in the kind -> str mapping table
append_usize(&mut buf, self.prop_map.len()); append_usize(&mut buf, self.prop_name_map.len());
for v in &self.prop_map { for v in &self.prop_name_map {
append_usize(&mut buf, *v); append_usize(&mut buf, *v);
} }
// Spans are rarely needed, so they're stored in a separate array.
// They're indexed by the node id.
let offset_spans = buf.len();
for v in &self.spans {
append_u32(&mut buf, *v);
}
// The field value table. They're detached from nodes as they're not
// as frequently needed as the nodes themselves. The most common
// operation is traversal and we can traverse nodes without knowing
// about the fields.
let offset_props = buf.len();
buf.append(&mut self.field_buf);
// Putting offsets of relevant parts of the buffer at the end. This // Putting offsets of relevant parts of the buffer at the end. This
// allows us to hop to the relevant part by merely looking at the last // allows us to hop to the relevant part by merely looking at the last
// for values in the message. Each value represents an offset into the // for values in the message. Each value represents an offset into the
// buffer. // buffer.
append_usize(&mut buf, offset_props);
append_usize(&mut buf, offset_spans);
append_usize(&mut buf, offset_kind_map); append_usize(&mut buf, offset_kind_map);
append_usize(&mut buf, offset_prop_map); append_usize(&mut buf, offset_prop_map);
append_usize(&mut buf, offset_str_table); append_usize(&mut buf, offset_str_table);
append_usize(&mut buf, self.start_buf.0); append_u32(&mut buf, self.root_idx);
buf buf
} }

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff