From cd75f8b3fc825b83aa5c78bd062986d3f0ac820f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Niklas=20Bu=CC=88low?= Date: Mon, 21 Mar 2022 17:14:39 +0100 Subject: [PATCH] chore: update lean-toolchain --- LeanInk/Analysis/DataTypes.lean | 2 +- LeanInk/Analysis/SemanticToken.lean | 2 +- LeanInk/Annotation/Alectryon.lean | 6 +-- lean-toolchain | 2 +- test/bench/Basic.lean.leanInk.expected | 66 ++++++++++++++++++++------ 5 files changed, 58 insertions(+), 20 deletions(-) diff --git a/LeanInk/Analysis/DataTypes.lean b/LeanInk/Analysis/DataTypes.lean index 69a4159..df48edf 100644 --- a/LeanInk/Analysis/DataTypes.lean +++ b/LeanInk/Analysis/DataTypes.lean @@ -17,7 +17,7 @@ class Positional (α : Type u) where tailPos : α -> String.Pos namespace Positional - def length { α : Type u } [Positional α] (self : α) : Nat := (Positional.tailPos self) - (Positional.headPos self) + def length { α : Type u } [Positional α] (self : α) : String.Pos := (Positional.tailPos self) - (Positional.headPos self) def smallest? { α : Type u } [Positional α] (list : List α) : Option α := List.foldl (λ a y => let y : α := y -- We need to help the compiler a bit here otherwise it thinks `y : Option α` diff --git a/LeanInk/Analysis/SemanticToken.lean b/LeanInk/Analysis/SemanticToken.lean index e697351..296839e 100644 --- a/LeanInk/Analysis/SemanticToken.lean +++ b/LeanInk/Analysis/SemanticToken.lean @@ -48,7 +48,7 @@ namespace SemanticTraversalInfo def highlightKeyword (headPos tailPos : String.Pos) (stx: Syntax) : AnalysisM (List Token) := do if let Syntax.atom info val := stx then - if (val.length > 0 && val[0].isAlpha) || (val.length > 1 && val[0] = '#' && val[1].isAlpha) then + if (val.length > 0 && val[0].isAlpha) || (val.length > 1 && val[0] = '#' && val[⟨1⟩].isAlpha) then return genSemanticToken stx SemanticTokenType.keyword return [] diff --git a/LeanInk/Annotation/Alectryon.lean b/LeanInk/Annotation/Alectryon.lean index 477d044..e3f61fc 100644 --- a/LeanInk/Annotation/Alectryon.lean +++ b/LeanInk/Annotation/Alectryon.lean @@ -151,7 +151,7 @@ def minPos (x y : String.Pos) := if x < y then x else y def maxPos (x y : String.Pos) := if x > y then x else y partial def genTokens (contents : String) (head : String.Pos) (offset : String.Pos) (l : List Token) (compounds : List (Compound Analysis.Token)) : AnalysisM (List Token) := do - let textTail := contents.utf8ByteSize + offset + let textTail := ⟨contents.utf8ByteSize⟩ + offset let mut head : String.Pos := head let mut tokens : List Token := [] for x in compounds do @@ -171,7 +171,7 @@ partial def genTokens (contents : String) (head : String.Pos) (offset : String.P match text with | none => logInfo s!"Empty 1 {text} {x.headPos} {tail}" | some text => tokens := { raw := text }::tokens - match extractContents offset contents head (contents.utf8ByteSize + offset) with + match extractContents offset contents head (⟨contents.utf8ByteSize⟩ + offset) with | none => return tokens.reverse | some x => return ({ raw := x }::tokens).reverse @@ -233,7 +233,7 @@ Generates AlectryonFragments for the given CompoundFragments and input file cont def annotateFileWithCompounds (l : List Alectryon.Fragment) (contents : String) : List Annotation -> AnalysisM (List Fragment) | [] => pure l | x::[] => do - let fragment ← genFragment x contents.utf8ByteSize (contents.extract x.sentence.headPos contents.utf8ByteSize) + let fragment ← genFragment x ⟨contents.utf8ByteSize⟩ (contents.extract x.sentence.headPos ⟨contents.utf8ByteSize⟩) return l.append [fragment] | x::y::ys => do let fragment ← genFragment x y.sentence.headPos (contents.extract x.sentence.headPos (y.sentence.headPos)) diff --git a/lean-toolchain b/lean-toolchain index 6e2f405..62c31b2 100644 --- a/lean-toolchain +++ b/lean-toolchain @@ -1 +1 @@ -leanprover/lean4:nightly-2022-03-07 +leanprover/lean4:nightly-2022-03-21 diff --git a/test/bench/Basic.lean.leanInk.expected b/test/bench/Basic.lean.leanInk.expected index ca21bd7..fb0752e 100644 --- a/test/bench/Basic.lean.leanInk.expected +++ b/test/bench/Basic.lean.leanInk.expected @@ -3637,7 +3637,7 @@ "docstring": null, "_type": "token"}, {"typeinfo": - {"type": "{α : Type ?u.2688} → [self : BEq α] → α → α → Bool", + {"type": "{α : Type ?u.2646} → [self : BEq α] → α → α → Bool", "name": "BEq.beq", "_type": "typeinfo"}, "semanticType": null, @@ -20864,11 +20864,23 @@ "_type": "token"}, {"typeinfo": null, "semanticType": null, - "raw": ", _, ", + "raw": ", ", "link": null, "docstring": null, "_type": "token"}, - {"typeinfo": {"type": "0 < x✝", "name": "h", "_type": "typeinfo"}, + {"typeinfo": {"type": "Nat", "name": "_", "_type": "typeinfo"}, + "semanticType": null, + "raw": "_", + "link": null, + "docstring": null, + "_type": "token"}, + {"typeinfo": null, + "semanticType": null, + "raw": ", ", + "link": null, + "docstring": null, + "_type": "token"}, + {"typeinfo": {"type": "0 < ?m.13614", "name": "h", "_type": "typeinfo"}, "semanticType": "Name.Variable", "raw": "h", "link": null, @@ -21046,7 +21058,7 @@ "docstring": null, "_type": "token"}, {"typeinfo": {"type": "a < b", "name": "this", "_type": "typeinfo"}, - "semanticType": "Name.Variable", + "semanticType": "Keyword", "raw": "this", "link": null, "docstring": null, @@ -22734,7 +22746,7 @@ "_type": "token"}, {"typeinfo": {"type": "Exists fun k => n + k = m", "name": "this", "_type": "typeinfo"}, - "semanticType": "Name.Variable", + "semanticType": "Keyword", "raw": "this", "link": null, "docstring": null, @@ -22769,7 +22781,7 @@ "link": null, "docstring": null, "_type": "token"}, - {"typeinfo": {"type": "succ n ≤ succ m", "name": "h", "_type": "typeinfo"}, + {"typeinfo": {"type": "n + k = m", "name": "h", "_type": "typeinfo"}, "semanticType": "Name.Variable", "raw": "h", "link": null, @@ -31340,7 +31352,7 @@ "link": null, "docstring": null, "_type": "token"}, - {"typeinfo": {"type": "m * n = k * m", "name": "h", "_type": "typeinfo"}, + {"typeinfo": {"type": "n * m = k * m", "name": "h", "_type": "typeinfo"}, "semanticType": "Name.Variable", "raw": "h", "link": null, @@ -32437,7 +32449,7 @@ "link": null, "docstring": null, "_type": "token"}, - {"typeinfo": {"type": "i ≤ j", "name": "h", "_type": "typeinfo"}, + {"typeinfo": {"type": "i ≤ succ j", "name": "h", "_type": "typeinfo"}, "semanticType": "Name.Variable", "raw": "h", "link": null, @@ -32724,7 +32736,7 @@ "_type": "token"}, {"typeinfo": {"type": "n ^ i * 1 ≤ n ^ j * n", "name": "this", "_type": "typeinfo"}, - "semanticType": "Name.Variable", + "semanticType": "Keyword", "raw": "this", "link": null, "docstring": null, @@ -35295,7 +35307,7 @@ "link": null, "docstring": null, "_type": "token"}, - {"typeinfo": {"type": "i < succ a", "name": "h", "_type": "typeinfo"}, + {"typeinfo": {"type": "succ i = succ a", "name": "h", "_type": "typeinfo"}, "semanticType": "Name.Variable", "raw": "h", "link": null, @@ -35693,7 +35705,7 @@ "link": null, "docstring": null, "_type": "token"}, - {"typeinfo": {"type": "succ i < succ a", "name": "h", "_type": "typeinfo"}, + {"typeinfo": {"type": "i < succ a", "name": "h", "_type": "typeinfo"}, "semanticType": "Name.Variable", "raw": "h", "link": null, @@ -41670,7 +41682,20 @@ "_type": "token"}, {"typeinfo": null, "semanticType": null, - "raw": "\n | _, ", + "raw": "\n | ", + "link": null, + "docstring": null, + "_type": "token"}, + {"typeinfo": + {"type": "Exists fun k => a - b + k = c", "name": "_", "_type": "typeinfo"}, + "semanticType": null, + "raw": "_", + "link": null, + "docstring": null, + "_type": "token"}, + {"typeinfo": null, + "semanticType": null, + "raw": ", ", "link": null, "docstring": null, "_type": "token"}, @@ -42142,7 +42167,7 @@ "link": null, "docstring": null, "_type": "token"}, - {"typeinfo": {"type": "d + (a - b) = c", "name": "hd", "_type": "typeinfo"}, + {"typeinfo": {"type": "a - b + d = c", "name": "hd", "_type": "typeinfo"}, "semanticType": "Name.Variable", "raw": "hd", "link": null, @@ -43716,7 +43741,20 @@ "_type": "token"}, {"typeinfo": null, "semanticType": null, - "raw": "\n | _, ", + "raw": "\n | ", + "link": null, + "docstring": null, + "_type": "token"}, + {"typeinfo": + {"type": "Exists fun k => a + k = c + b", "name": "_", "_type": "typeinfo"}, + "semanticType": null, + "raw": "_", + "link": null, + "docstring": null, + "_type": "token"}, + {"typeinfo": null, + "semanticType": null, + "raw": ", ", "link": null, "docstring": null, "_type": "token"},