Skip to content

Commit

Permalink
More work on record spread destructs
Browse files Browse the repository at this point in the history
  • Loading branch information
smores56 committed Jan 22, 2025
1 parent 8e312e0 commit 34b519e
Show file tree
Hide file tree
Showing 31 changed files with 599 additions and 181 deletions.
22 changes: 11 additions & 11 deletions crates/compiler/builtins/roc/Dict.roc
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ release_excess_capacity = |@Dict({ buckets, data, max_bucket_capacity: original_
## capacity_of_dict = Dict.capacity(food_dict)
## ```
capacity : Dict * * -> U64
capacity = |@Dict({ max_bucket_capacity })|
capacity = |@Dict({ max_bucket_capacity, .. })|
max_bucket_capacity

## Returns a dictionary containing the key and value provided as input.
Expand Down Expand Up @@ -261,7 +261,7 @@ from_list = |data|
## |> Bool.is_eq(3)
## ```
len : Dict * * -> U64
len = |@Dict({ data })|
len = |@Dict({ data, .. })|
List.len(data)

## Check if the dictionary is empty.
Expand All @@ -271,7 +271,7 @@ len = |@Dict({ data })|
## Dict.is_empty(Dict.empty({}))
## ```
is_empty : Dict * * -> Bool
is_empty = |@Dict({ data })|
is_empty = |@Dict({ data, .. })|
List.is_empty(data)

## Clears all elements from a dictionary keeping around the allocation if it isn't huge.
Expand Down Expand Up @@ -341,7 +341,7 @@ join_map = |dict, transform|
## |> Bool.is_eq(36)
## ```
walk : Dict k v, state, (state, k, v -> state) -> state
walk = |@Dict({ data }), initial_state, transform|
walk = |@Dict({ data, .. }), initial_state, transform|
List.walk(data, initial_state, |state, (k, v)| transform(state, k, v))

## Same as [Dict.walk], except you can stop walking early.
Expand Down Expand Up @@ -373,7 +373,7 @@ walk = |@Dict({ data }), initial_state, transform|
## expect someone_is_an_adult == Bool.true
## ```
walk_until : Dict k v, state, (state, k, v -> [Continue state, Break state]) -> state
walk_until = |@Dict({ data }), initial_state, transform|
walk_until = |@Dict({ data, .. }), initial_state, transform|
List.walk_until(data, initial_state, |state, (k, v)| transform(state, k, v))

## Run the given function on each key-value pair of a dictionary, and return
Expand Down Expand Up @@ -604,7 +604,7 @@ circular_dist = |start, end, size|
## |> Bool.is_eq([(1, "One"), (2, "Two"), (3, "Three"), (4, "Four")])
## ```
to_list : Dict k v -> List (k, v)
to_list = |@Dict({ data })|
to_list = |@Dict({ data, .. })|
data

## Returns the keys of a dictionary as a [List].
Expand All @@ -619,7 +619,7 @@ to_list = |@Dict({ data })|
## |> Bool.is_eq([1,2,3,4])
## ```
keys : Dict k v -> List k
keys = |@Dict({ data })|
keys = |@Dict({ data, .. })|
List.map(data, |(k, _)| k)

## Returns the values of a dictionary as a [List].
Expand All @@ -634,7 +634,7 @@ keys = |@Dict({ data })|
## |> Bool.is_eq(["One","Two","Three","Four"])
## ```
values : Dict k v -> List v
values = |@Dict({ data })|
values = |@Dict({ data, .. })|
List.map(data, |(_, v)| v)

## Combine two dictionaries by keeping the [union](https://en.wikipedia.org/wiki/Union_(set_theory))
Expand Down Expand Up @@ -757,7 +757,7 @@ decrement_dist = |dist_and_fingerprint|
Num.sub_wrap(dist_and_fingerprint, dist_inc)

find : Dict k v, k -> { bucket_index : U64, result : Result v [KeyNotFound] }
find = |@Dict({ buckets, data, shifts }), key|
find = |@Dict({ buckets, data, shifts, .. }), key|
hash = hash_key(key)
dist_and_fingerprint = dist_and_fingerprint_from_hash(hash)
bucket_index = bucket_index_from_hash(hash, shifts)
Expand Down Expand Up @@ -872,7 +872,7 @@ remove_bucket_helper = |buckets, bucket_index|
(buckets, bucket_index)

increase_size : Dict k v -> Dict k v
increase_size = |@Dict({ data, max_bucket_capacity, max_load_factor, shifts })|
increase_size = |@Dict({ data, max_bucket_capacity, max_load_factor, shifts, .. })|
if max_bucket_capacity != max_bucket_count then
new_shifts = shifts |> Num.sub_wrap(1)
(buckets0, new_max_bucket_capacity) = alloc_buckets_from_shift(new_shifts, max_load_factor)
Expand Down Expand Up @@ -1329,7 +1329,7 @@ init_seed = |seed|
|> wymix(wyp1)
|> Num.bitwise_xor(seed)

complete = |@LowLevelHasher({ state })| state
complete = |@LowLevelHasher({ state, .. })| state

# These implementations hash each value individually with the seed and then mix
# the resulting hash with the state. There are other options that may be faster
Expand Down
2 changes: 0 additions & 2 deletions crates/compiler/can/src/copy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -799,12 +799,10 @@ fn deep_copy_pattern_help<C: CopyEnv>(
}
RecordDestructure {
whole_var,
ext_var,
destructs,
opt_spread,
} => RecordDestructure {
whole_var: sub!(*whole_var),
ext_var: sub!(*ext_var),
destructs: destructs
.iter()
.map(|lrd| {
Expand Down
13 changes: 12 additions & 1 deletion crates/compiler/can/src/debug/pretty_print.rs
Original file line number Diff line number Diff line change
Expand Up @@ -538,7 +538,11 @@ fn pattern<'a>(
} => text!(f, "@{} ", opaque.module_string(c.interns))
.append(pattern(c, Free, f, &argument.1.value))
.group(),
RecordDestructure { destructs, .. } => f
RecordDestructure {
destructs,
opt_spread,
whole_var: _,
} => f
.text("{")
.append(
f.intersperse(
Expand All @@ -558,6 +562,13 @@ fn pattern<'a>(
f.text(", "),
),
)
.append(match &**opt_spread {
None => f.text(""),
Some(spread) => match &spread.opt_pattern.value {
None => f.text(".."),
Some(spread_pat) => f.text("..").append(pattern(c, Free, f, &spread_pat.value)),
},
})
.append(f.text("}"))
.group(),
TupleDestructure { destructs, .. } => f
Expand Down
12 changes: 11 additions & 1 deletion crates/compiler/can/src/def.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2181,10 +2181,20 @@ fn pattern_to_vars_by_symbol(
}
}

RecordDestructure { destructs, .. } => {
RecordDestructure {
destructs,
opt_spread,
whole_var: _,
} => {
for destruct in destructs {
vars_by_symbol.insert(destruct.value.symbol, destruct.value.var);
}

if let Some(spread) = &**opt_spread {
if let Some(spread_pat) = &spread.opt_pattern.value {
pattern_to_vars_by_symbol(vars_by_symbol, &spread_pat.value, spread.spread_var);
}
}
}

List {
Expand Down
50 changes: 41 additions & 9 deletions crates/compiler/can/src/exhaustive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,13 @@ enum SketchedPattern {
enum IndexCtor<'a> {
/// Index an opaque type. There should be one argument.
Opaque,
/// Index a record type. The arguments are the types of the record fields.
Record(&'a [Lowercase]),
// TODO: can we get this size down to a pointer width or less to avoid bloating the size?
/// Index a record type.
Record {
/// The arguments are the types of the record fields.
fields: &'a [Lowercase],
opt_spread: Option<&'a IndexCtor<'a>>,
},
/// Index a tuple type.
Tuple,
/// Index a guard constructor. The arguments are a faux guard pattern, and then the real
Expand Down Expand Up @@ -114,7 +119,7 @@ impl<'a> IndexCtor<'a> {
Self::Tag(tag_name)
}
RenderAs::Opaque => Self::Opaque,
RenderAs::Record(fields) => Self::Record(fields),
RenderAs::Record { fields, opt_spread } => Self::Record { fields, opt_spread },
RenderAs::Tuple => Self::Tuple,
RenderAs::Guard => Self::Guard,
}
Expand Down Expand Up @@ -165,7 +170,7 @@ fn index_var(
FlatType::Apply(..) => internal_error!("not an indexable constructor"),
FlatType::Record(fields, ext) => {
let fields_order = match render_as {
RenderAs::Record(fields) => fields,
RenderAs::Record { fields, opt_spread } => fields,
_ => internal_error!(
"record constructors must always be rendered as records"
),
Expand Down Expand Up @@ -228,12 +233,12 @@ fn index_var(
return Ok(vec![]);
}
FlatType::EmptyRecord => {
debug_assert!(matches!(ctor, IndexCtor::Record(..)));
debug_assert!(matches!(ctor, IndexCtor::Record { .. }));
// If there are optional record fields we don't unify them, but we need to
// cover them. Since optional fields correspond to "any" patterns, we can pass
// through arbitrary types.
let num_fields = match render_as {
RenderAs::Record(fields) => fields.len(),
RenderAs::Record { fields, opt_spread } => fields.len(),
_ => internal_error!(
"record constructors must always be rendered as records"
),
Expand Down Expand Up @@ -338,9 +343,20 @@ fn sketch_pattern(pattern: &crate::pattern::Pattern) -> SketchedPattern {
&FloatLiteral(_, _, _, f, _) => SP::Literal(Literal::Float(f64::to_bits(f))),
StrLiteral(v) => SP::Literal(Literal::Str(v.clone())),
&SingleQuote(_, _, c, _) => SP::Literal(Literal::Byte(c as u8)),
RecordDestructure { destructs, .. } => {
RecordDestructure {
destructs,
opt_spread,
whole_var: _,
} => {
let tag_id = TagId(0);
let mut patterns = std::vec::Vec::with_capacity(destructs.len());
let mut patterns = std::vec::Vec::with_capacity(
destructs.len()
+ if opt_spread.is_some_and(|spread| spread.opt_pattern.value.is_some()) {
1
} else {
0
},
);
let mut field_names = std::vec::Vec::with_capacity(destructs.len());

for Loc {
Expand All @@ -358,8 +374,24 @@ fn sketch_pattern(pattern: &crate::pattern::Pattern) -> SketchedPattern {
}
}

let spread_render_as = if let Some(spread) = &**opt_spread {
match &spread.opt_pattern.value {
None => Box::new(Some(None)),
Some(spread_pat) => {
let inner_sp = sketch_pattern(&spread_pat.value);
patterns.push(inner_sp);
Box::new(Some(Some(RenderAs)))
}
}
} else {
Box::new(None)
};

let union = Union {
render_as: RenderAs::Record(field_names),
render_as: RenderAs::Record {
fields: field_names,
opt_spread: spread_render_as,
},
alternatives: vec![Ctor {
name: CtorName::Tag(TagName("#Record".into())),
tag_id,
Expand Down
2 changes: 1 addition & 1 deletion crates/compiler/can/src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1547,7 +1547,7 @@ pub fn canonicalize_expr<'a>(
loc_binop2,
);

InvalidPrecedence(problem, region)
InvalidPrecedence(problem)
}
};

Expand Down
19 changes: 15 additions & 4 deletions crates/compiler/can/src/module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,6 @@ pub struct ModuleParams {
pub whole_symbol: Symbol,
pub whole_var: Variable,
pub record_var: Variable,
pub record_ext_var: Variable,
pub destructs: Vec<Loc<RecordDestruct>>,
// used while lowering passed functions
pub arity_by_name: VecMap<IdentId, usize>,
Expand All @@ -152,7 +151,6 @@ impl ModuleParams {
pub fn pattern(&self) -> Loc<Pattern> {
let record_pattern = Pattern::RecordDestructure {
whole_var: self.record_var,
ext_var: self.record_ext_var,
destructs: self.destructs.clone(),
opt_spread: Box::new(None),
};
Expand Down Expand Up @@ -310,7 +308,6 @@ pub fn canonicalize_module_defs<'a>(
whole_var,
whole_symbol,
record_var: var_store.fresh(),
record_ext_var: var_store.fresh(),
destructs,
arity_by_name: Default::default(),
}
Expand Down Expand Up @@ -823,7 +820,11 @@ fn fix_values_captured_in_closure_pattern(
closure_captures,
);
}
RecordDestructure { destructs, .. } => {
RecordDestructure {
destructs,
whole_var: _,
opt_spread,
} => {
for loc_destruct in destructs.iter_mut() {
use crate::pattern::DestructType::*;
match &mut loc_destruct.value.typ {
Expand All @@ -840,6 +841,16 @@ fn fix_values_captured_in_closure_pattern(
),
}
}

if let Some(spread) = &mut **opt_spread {
if let Some(spread_pat) = &mut spread.opt_pattern.value {
fix_values_captured_in_closure_pattern(
&mut spread_pat.value,
no_capture_symbols,
closure_captures,
);
}
}
}
TupleDestructure { destructs, .. } => {
for loc_destruct in destructs.iter_mut() {
Expand Down
35 changes: 27 additions & 8 deletions crates/compiler/can/src/pattern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ pub enum Pattern {
},
RecordDestructure {
whole_var: Variable,
ext_var: Variable,
destructs: Vec<Loc<RecordDestruct>>,
opt_spread: Box<Option<RecordDestructureSpread>>,
},
Expand Down Expand Up @@ -145,12 +144,26 @@ impl Pattern {
| MalformedPattern(..)
| AbilityMemberSpecialization { .. } => true,

RecordDestructure { destructs, .. } => {
RecordDestructure {
destructs,
opt_spread,
whole_var: _,
} => {
// If all destructs are surely exhaustive, then this is surely exhaustive.
destructs.iter().all(|d| match &d.value.typ {
if !destructs.iter().all(|d| match &d.value.typ {
DestructType::Required | DestructType::Optional(_, _) => true,
DestructType::Guard(_, pat) => pat.value.surely_exhaustive(),
})
}) {
return false;
}

match &**opt_spread {
Some(spread) => match &spread.opt_pattern.value {
Some(spread_pat) => spread_pat.value.surely_exhaustive(),
None => true,
},
None => true,
}
}
TupleDestructure { destructs, .. } => {
// If all destructs are surely exhaustive, then this is surely exhaustive.
Expand Down Expand Up @@ -679,7 +692,6 @@ pub fn canonicalize_pattern<'a>(
}

RecordDestructure(patterns) => {
let ext_var = var_store.fresh();
let whole_var = var_store.fresh();

let (destructs, opt_spread, opt_erroneous) = canonicalize_record_destructs(
Expand All @@ -697,7 +709,6 @@ pub fn canonicalize_pattern<'a>(
// use the resulting RuntimeError. Otherwise, return a successful record destructure.
opt_erroneous.unwrap_or(Pattern::RecordDestructure {
whole_var,
ext_var,
destructs,
opt_spread: Box::new(opt_spread),
})
Expand Down Expand Up @@ -1100,8 +1111,16 @@ impl<'a> BindingsFromPattern<'a> {
let it = destructs.iter().rev().map(TupleDestruct);
stack.extend(it);
}
RecordDestructure { destructs, .. } => {
let it = destructs.iter().rev().map(RecordDestruct);
RecordDestructure {
destructs,
opt_spread,
whole_var: _,
} => {
let it = destructs.iter().rev().map(RecordDestruct).chain(
opt_spread.iter().flat_map(|spread| {
spread.opt_pattern.value.as_ref().map(Pattern)
}),
);
stack.extend(it);
}
NumLiteral(..)
Expand Down
Loading

0 comments on commit 34b519e

Please sign in to comment.