From 98283df4ac8326e8285038d236eb41d679c6f23e Mon Sep 17 00:00:00 2001 From: Jakob Nybo Nissen Date: Wed, 19 Jul 2023 07:48:18 +0200 Subject: [PATCH] Some JET fixes --- src/dfa.jl | 4 ++-- src/nfa.jl | 2 +- src/re.jl | 8 +++++--- src/tokenizer.jl | 10 +++++----- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/dfa.jl b/src/dfa.jl index 4a9b2f96..8af76864 100644 --- a/src/dfa.jl +++ b/src/dfa.jl @@ -206,7 +206,7 @@ function validate_paths( if !eof # If they are real edges but do not overlap, or there are conflicting # preconditions, there is no conflict - overlaps(edge1, edge2) || continue + overlaps(edge1::Edge, edge2::Edge) || continue end # Now we know there is an ambiguity, so we just need to create @@ -217,7 +217,7 @@ function validate_paths( final_input = if eof "EOF" else - repr(Char(first(intersect(edge1.labels, edge2.labels)))) + repr(Char(first(intersect((edge1::Edge).labels, (edge2::Edge).labels)))) end error( "Ambiguous NFA.\nAfter inputs $input_until_now, observing $final_input " * diff --git a/src/nfa.jl b/src/nfa.jl index d472e2ec..7d84232c 100644 --- a/src/nfa.jl +++ b/src/nfa.jl @@ -191,7 +191,7 @@ function re2nfa(re::RegExp.RE, predefined_actions::Dict{Symbol,Action}=Dict{Symb end nfa_start = NFANode() - nfa_final = rec!(nfa_start, re) + nfa_final::NFANode = rec!(nfa_start, re) return remove_dead_nodes(NFA(nfa_start, nfa_final)) end diff --git a/src/re.jl b/src/re.jl index e430bc89..478a9014 100644 --- a/src/re.jl +++ b/src/re.jl @@ -55,10 +55,12 @@ end RE(s::AbstractString) = parse(s) function actions!(re::RE) - if isnothing(re.actions) - re.actions = Dict{Symbol, Vector{Symbol}}() + x = re.actions + if x === nothing + x = Dict{Symbol, Vector{Symbol}}() + re.actions = x end - re.actions + x end """ diff --git a/src/tokenizer.jl b/src/tokenizer.jl index d0e597c9..8a4299a2 100644 --- a/src/tokenizer.jl +++ b/src/tokenizer.jl @@ -90,7 +90,7 @@ See also: [`Tokenizer`](@ref), [`tokenize`](@ref), [`compile`](@ref) """ function make_tokenizer( machine::TokenizerMachine; - tokens::Tuple{E, AbstractVector{E}}=(UInt32(1):UInt32(machine.n_tokens), UInt32(0)), + tokens::Tuple{E, AbstractVector{E}}=(UInt32(0), UInt32(1):UInt32(machine.n_tokens)), goto::Bool=true, version::Int=1 ) where E @@ -120,7 +120,7 @@ function make_tokenizer( ) actions[action_name] = quote stop = $(vars.p) - token = $(nonerror_tokens[parse(Int, only(m.captures))]) + token = $(nonerror_tokens[parse(Int, something(only(m.captures)))]) end end return quote @@ -209,10 +209,10 @@ function make_tokenizer( version::Int=1, unambiguous=false ) where E - (regex, _tokens) = if tokens isa Vector - (tokens, (UInt32(0), UInt32(1):UInt32(length(tokens)))) + (regex, _tokens) = if tokens isa AbstractVector + (Vector(tokens)::Vector, (UInt32(0), UInt32(1):UInt32(length(tokens)))) else - (map(last, last(tokens)), (first(tokens), map(first, last(tokens)))) + ([last(i) for i in last(tokens)]::Vector, (first(tokens), map(first, last(tokens)))) end make_tokenizer( compile(regex; unambiguous=unambiguous);