Skip to content

Commit

Permalink
fix: Correct several issues found during #4740 (#4786)
Browse files Browse the repository at this point in the history
* feat: Add a wrapper around flux's error which uses the prettier string representation

* chore: Print prettier errors when failing to compile the standard library

* feat: Allow an AnalyzerConfig to be passed when compiling the standard library

* refactor: Allow matching on MonoType::STRING etc

* feat: Allow labels to be parsed in types

* fix: Allow label types to be in the exported API of a package

* fix: Don't display bound and unbound variables the same in record's tail position

* chore: make generate

* chore: Fix benchmark
  • Loading branch information
Markus Westerlind authored May 25, 2022
1 parent 5a828bd commit 6acd449
Show file tree
Hide file tree
Showing 14 changed files with 120 additions and 58 deletions.
3 changes: 3 additions & 0 deletions libflux/flux-core/src/ast/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -609,6 +609,8 @@ pub enum MonoType {
Record(RecordType),
#[serde(rename = "FunctionType")]
Function(Box<FunctionType>),
#[serde(rename = "LabelType")]
Label(Box<StringLit>),
}

impl MonoType {
Expand All @@ -622,6 +624,7 @@ impl MonoType {
MonoType::Dict(t) => &t.base,
MonoType::Record(t) => &t.base,
MonoType::Function(t) => &t.base,
MonoType::Label(t) => &t.base,
}
}
}
Expand Down
1 change: 1 addition & 0 deletions libflux/flux-core/src/ast/walk/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,7 @@ where

walk(v, Node::MonoType(&f.monotype));
}
MonoType::Label(lit) => walk(v, Node::StringLit(lit)),
},
Node::PropertyType(n) => {
walk(v, Node::from_property_key(&n.name));
Expand Down
1 change: 1 addition & 0 deletions libflux/flux-core/src/formatter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,7 @@ impl<'doc> Formatter<'doc> {
self.format_monotype(&n.monotype),
]
}
ast::MonoType::Label(label) => self.format_string_literal(label),
}
.group()
}
Expand Down
1 change: 1 addition & 0 deletions libflux/flux-core/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -572,6 +572,7 @@ impl<'input> Parser<'input> {
element: ty,
}))
}
TokenType::String => MonoType::Label(Box::new(self.parse_string_literal())),
_ => {
if t.lit.len() == 1 {
self.parse_tvar()
Expand Down
40 changes: 29 additions & 11 deletions libflux/flux-core/src/semantic/bootstrap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use crate::{
types::{
MonoType, PolyType, PolyTypeHashMap, Record, RecordLabel, SemanticMap, Tvar, TvarKinds,
},
Analyzer, PackageExports,
Analyzer, AnalyzerConfig, PackageExports,
},
};

Expand All @@ -43,10 +43,16 @@ pub type SemanticPackageMap = SemanticMap<String, Package>;
/// Infers the Flux standard library given the path to the source code.
/// The prelude and the imports are returned.
#[allow(clippy::type_complexity)]
pub fn infer_stdlib_dir(path: &Path) -> Result<(PackageExports, Packages, SemanticPackageMap)> {
pub fn infer_stdlib_dir(
path: &Path,
config: AnalyzerConfig,
) -> Result<(PackageExports, Packages, SemanticPackageMap)> {
let ast_packages = parse_dir(path)?;

let mut infer_state = InferState::default();
let mut infer_state = InferState {
config,
..InferState::default()
};
let prelude = infer_state.infer_pre(&ast_packages)?;
infer_state.infer_std(&ast_packages, &prelude)?;

Expand Down Expand Up @@ -75,29 +81,35 @@ pub fn parse_dir(dir: &Path) -> io::Result<ASTPackageMap> {
// to work with either separator.
normalized_path = normalized_path.replace('\\', "/");
}
files.push(parser::parse_string(
let source = fs::read_to_string(entry.path())?;
let ast = parser::parse_string(
normalized_path
.rsplitn(2, "/stdlib/")
.collect::<Vec<&str>>()[0]
.to_owned(),
&fs::read_to_string(entry.path())?,
));
&source,
);
files.push((source, ast));
}
}
}
Ok(ast_map(files))
}

// Associates an import path with each file
fn ast_map(files: Vec<ast::File>) -> ASTPackageMap {
fn ast_map(files: Vec<(String, ast::File)>) -> ASTPackageMap {
files
.into_iter()
.fold(ASTPackageMap::new(), |mut acc, file| {
.fold(ASTPackageMap::new(), |mut acc, (source, file)| {
let path = file.name.rsplitn(2, '/').collect::<Vec<&str>>()[1].to_string();
acc.insert(
path.clone(),
ast::Package {
base: ast::BaseNode {
location: ast::SourceLocation {
source: Some(source),
..ast::SourceLocation::default()
},
..ast::BaseNode::default()
},
path,
Expand Down Expand Up @@ -154,6 +166,7 @@ struct InferState {
// types available for import
imports: Packages,
sem_pkg_map: SemanticPackageMap,
config: AnalyzerConfig,
}

impl InferState {
Expand Down Expand Up @@ -241,8 +254,13 @@ impl InferState {
.ok_or_else(|| anyhow!(r#"package import "{}" not found"#, name))?;

let env = Environment::new(prelude.into());
let mut analyzer = Analyzer::new_with_defaults(env, &mut self.imports);
let (exports, sem_pkg) = analyzer.analyze_ast(file).map_err(|err| err.error)?;
let mut analyzer = Analyzer::new(env, &mut self.imports, self.config.clone());
let (exports, sem_pkg) = analyzer.analyze_ast(file).map_err(|mut err| {
if err.error.source.is_none() {
err.error.source = file.base.location.source.clone();
}
err.error.pretty_error()
})?;

Ok((exports, sem_pkg))
}
Expand Down Expand Up @@ -342,7 +360,7 @@ pub fn stdlib(dir: &Path) -> Result<(PackageExports, FileSystemImporter<StdFS>)>

/// Compiles the stdlib found at the srcdir into the outdir.
pub fn compile_stdlib(srcdir: &Path, outdir: &Path) -> Result<()> {
let (_, imports, mut sem_pkgs) = infer_stdlib_dir(srcdir)?;
let (_, imports, mut sem_pkgs) = infer_stdlib_dir(srcdir, AnalyzerConfig::default())?;
// Write each file as compiled module
for (path, exports) in &imports {
if let Some(code) = sem_pkgs.remove(path) {
Expand Down
4 changes: 4 additions & 0 deletions libflux/flux-core/src/semantic/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -616,6 +616,10 @@ impl<'a> Converter<'a> {
}
r
}

ast::MonoType::Label(string_lit) => {
MonoType::Label(types::Label::from(string_lit.value.as_str()))
}
}
}

Expand Down
21 changes: 20 additions & 1 deletion libflux/flux-core/src/semantic/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,6 +270,19 @@ fn build_record(
(r, cons)
}

/// Wrapper around `FileErrors` which defaults to using codespan to print the errors
#[derive(Error, Debug, PartialEq)]
pub struct PrettyFileErrors(pub FileErrors);

impl fmt::Display for PrettyFileErrors {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match &self.0.source {
Some(source) => f.write_str(&self.0.pretty(source)),
None => self.0.fmt(f),
}
}
}

/// Error represents any any error that can occur during any step of the type analysis process.
#[derive(Error, Debug, PartialEq)]
pub struct FileErrors {
Expand Down Expand Up @@ -320,6 +333,12 @@ impl FileErrors {
self.pretty_config(&term::Config::default(), source)
}

/// Wraps `FileErrors` in type which defaults to the more readable codespan error
/// representation
pub fn pretty_error(self) -> PrettyFileErrors {
PrettyFileErrors(self)
}

/// Prints the errors in their short form
pub fn pretty_short(&self, source: &str) -> String {
self.pretty_config(
Expand Down Expand Up @@ -417,7 +436,7 @@ pub enum Feature {
}

/// A set of configuration options for the behavior of an Analyzer.
#[derive(Default)]
#[derive(Clone, Default, Debug)]
pub struct AnalyzerConfig {
/// Features used in the flux compiler
pub features: Vec<Feature>,
Expand Down
13 changes: 4 additions & 9 deletions libflux/flux-core/src/semantic/nodes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,6 @@ impl Substituter for FinalizeTypes<'_> {
let typ = self.sub.try_apply(*tvr)?;
Some(self.visit_type(&typ).unwrap_or(typ))
}
MonoType::Label(_) => Some(MonoType::STRING),
_ => typ.walk(self),
}
}
Expand Down Expand Up @@ -817,14 +816,10 @@ impl ArrayExpr {
for el in &mut self.elements {
el.infer(infer)?;

match &elt {
None => {
elt = Some(el.type_of());
}
Some(elt) => {
infer.equal(elt, &el.type_of(), el.loc());
}
}
elt = Some(match &elt {
None => el.type_of(),
Some(elt) => infer.equal(elt, &el.type_of(), el.loc()),
});
}
let elt = elt.unwrap_or_else(|| MonoType::Var(infer.sub.fresh()));
self.typ = MonoType::arr(elt);
Expand Down
21 changes: 20 additions & 1 deletion libflux/flux-core/src/semantic/tests/labels.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ fn labels_simple() {
z = [{ a: 1, b: ""}] |> fill(column: b, value: 1.0)
"#,
exp: map![
"b" => "string",
"b" => "\"b\"",
"x" => "[{ a: string }]",
"y" => "[{ a: int, b: float }]",
"z" => "[{ a: int, b: float }]",
Expand Down Expand Up @@ -230,6 +230,25 @@ fn optional_label_defined() {
}
}

#[test]
fn label_types_are_preserved_in_exports() {
test_infer! {
config: AnalyzerConfig{
features: vec![Feature::LabelPolymorphism],
..AnalyzerConfig::default()
},
src: r#"
builtin elapsed: (?timeColumn: T = "_time") => stream[{ A with T: time }]
where
A: Record,
T: Label
"#,
exp: map![
"elapsed" => r#"(?timeColumn:A = "_time") => stream[{B with A:time}] where A: Label, B: Record"#
],
}
}

#[test]
fn optional_label_undefined() {
test_error_msg! {
Expand Down
20 changes: 10 additions & 10 deletions libflux/flux-core/src/semantic/tests/vectorize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ fn vectorize_field_access() -> anyhow::Result<()> {

expect_test::expect![[r##"
(r) => {
return {a: r:{F with b:v[#B], a:v[#D]}.a:v[#D], b: r:{F with b:v[#B], a:v[#D]}.b:v[#B]}:{a:v[#D], b:v[#B]}
}:(r:{F with b:v[#B], a:v[#D]}) => {a:v[#D], b:v[#B]}"##]].assert_eq(&crate::semantic::formatter::format_node(
return {a: r:{#F with b:v[#B], a:v[#D]}.a:v[#D], b: r:{#F with b:v[#B], a:v[#D]}.b:v[#B]}:{a:v[#D], b:v[#B]}
}:(r:{#F with b:v[#B], a:v[#D]}) => {a:v[#D], b:v[#B]}"##]].assert_eq(&crate::semantic::formatter::format_node(
Node::FunctionExpr(function),
)?);

Expand All @@ -66,8 +66,8 @@ fn vectorize_with_construction() -> anyhow::Result<()> {

expect_test::expect![[r##"
(r) => {
return {r:{C with a:v[#B]} with b: r:{C with a:v[#B]}.a:v[#B]}:{C with b:v[#B], a:v[#B]}
}:(r:{C with a:v[#B]}) => {C with b:v[#B], a:v[#B]}"##]]
return {r:{#C with a:v[#B]} with b: r:{#C with a:v[#B]}.a:v[#B]}:{#C with b:v[#B], a:v[#B]}
}:(r:{#C with a:v[#B]}) => {#C with b:v[#B], a:v[#B]}"##]]
.assert_eq(&crate::semantic::formatter::format_node(
Node::FunctionExpr(function),
)?);
Expand All @@ -88,8 +88,8 @@ fn vectorize_even_when_another_function_fails_to_vectorize() -> anyhow::Result<(

expect_test::expect![[r##"
(r) => {
return {r:{I with a:v[#G], b:v[#G]} with x: r:{I with a:v[#G], b:v[#G]}.a:v[#G] +:v[#G] r:{I with a:v[#G], b:v[#G]}.b:v[#G]}:{I with x:v[#G], a:v[#G], b:v[#G]}
}:(r:{I with a:v[#G], b:v[#G]}) => {I with x:v[#G], a:v[#G], b:v[#G]}"##]]
return {r:{#I with a:v[#G], b:v[#G]} with x: r:{#I with a:v[#G], b:v[#G]}.a:v[#G] +:v[#G] r:{#I with a:v[#G], b:v[#G]}.b:v[#G]}:{#I with x:v[#G], a:v[#G], b:v[#G]}
}:(r:{#I with a:v[#G], b:v[#G]}) => {#I with x:v[#G], a:v[#G], b:v[#G]}"##]]
.assert_eq(&crate::semantic::formatter::format_node(
Node::FunctionExpr(function),
)?);
Expand Down Expand Up @@ -117,8 +117,8 @@ fn vectorize_addition_operator() -> anyhow::Result<()> {

expect_test::expect![[r##"
(r) => {
return {x: r:{F with a:v[#D], b:v[#D]}.a:v[#D] +:v[#D] r:{F with a:v[#D], b:v[#D]}.b:v[#D]}:{x:v[#D]}
}:(r:{F with a:v[#D], b:v[#D]}) => {x:v[#D]}"##]].assert_eq(&crate::semantic::formatter::format_node(
return {x: r:{#F with a:v[#D], b:v[#D]}.a:v[#D] +:v[#D] r:{#F with a:v[#D], b:v[#D]}.b:v[#D]}:{x:v[#D]}
}:(r:{#F with a:v[#D], b:v[#D]}) => {x:v[#D]}"##]].assert_eq(&crate::semantic::formatter::format_node(
Node::FunctionExpr(function),
)?);

Expand All @@ -133,8 +133,8 @@ fn vectorize_subtraction_operator() -> anyhow::Result<()> {

expect_test::expect![[r##"
(r) => {
return {x: r:{F with a:v[#D], b:v[#D]}.a:v[#D] -:v[#D] r:{F with a:v[#D], b:v[#D]}.b:v[#D]}:{x:v[#D]}
}:(r:{F with a:v[#D], b:v[#D]}) => {x:v[#D]}"##]].assert_eq(&crate::semantic::formatter::format_node(
return {x: r:{#F with a:v[#D], b:v[#D]}.a:v[#D] -:v[#D] r:{#F with a:v[#D], b:v[#D]}.b:v[#D]}:{x:v[#D]}
}:(r:{#F with a:v[#D], b:v[#D]}) => {x:v[#D]}"##]].assert_eq(&crate::semantic::formatter::format_node(
Node::FunctionExpr(function),
)?);

Expand Down
Loading

0 comments on commit 6acd449

Please sign in to comment.