Skip to content

Commit

Permalink
Some JET fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
jakobnissen committed Jul 19, 2023
1 parent 65a08b4 commit 98283df
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 11 deletions.
4 changes: 2 additions & 2 deletions src/dfa.jl
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ function validate_paths(
if !eof
# If they are real edges but do not overlap, or there are conflicting
# preconditions, there is no conflict
overlaps(edge1, edge2) || continue
overlaps(edge1::Edge, edge2::Edge) || continue
end

# Now we know there is an ambiguity, so we just need to create
Expand All @@ -217,7 +217,7 @@ function validate_paths(
final_input = if eof
"EOF"
else
repr(Char(first(intersect(edge1.labels, edge2.labels))))
repr(Char(first(intersect((edge1::Edge).labels, (edge2::Edge).labels))))
end
error(
"Ambiguous NFA.\nAfter inputs $input_until_now, observing $final_input " *
Expand Down
2 changes: 1 addition & 1 deletion src/nfa.jl
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ function re2nfa(re::RegExp.RE, predefined_actions::Dict{Symbol,Action}=Dict{Symb
end

nfa_start = NFANode()
nfa_final = rec!(nfa_start, re)
nfa_final::NFANode = rec!(nfa_start, re)
return remove_dead_nodes(NFA(nfa_start, nfa_final))
end

Expand Down
8 changes: 5 additions & 3 deletions src/re.jl
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,12 @@ end
RE(s::AbstractString) = parse(s)

function actions!(re::RE)
if isnothing(re.actions)
re.actions = Dict{Symbol, Vector{Symbol}}()
x = re.actions
if x === nothing
x = Dict{Symbol, Vector{Symbol}}()
re.actions = x
end
re.actions
x
end

"""
Expand Down
10 changes: 5 additions & 5 deletions src/tokenizer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ See also: [`Tokenizer`](@ref), [`tokenize`](@ref), [`compile`](@ref)
"""
function make_tokenizer(
machine::TokenizerMachine;
tokens::Tuple{E, AbstractVector{E}}=(UInt32(1):UInt32(machine.n_tokens), UInt32(0)),
tokens::Tuple{E, AbstractVector{E}}=(UInt32(0), UInt32(1):UInt32(machine.n_tokens)),
goto::Bool=true,
version::Int=1
) where E
Expand Down Expand Up @@ -120,7 +120,7 @@ function make_tokenizer(
)
actions[action_name] = quote
stop = $(vars.p)
token = $(nonerror_tokens[parse(Int, only(m.captures))])
token = $(nonerror_tokens[parse(Int, something(only(m.captures)))])
end
end
return quote
Expand Down Expand Up @@ -209,10 +209,10 @@ function make_tokenizer(
version::Int=1,
unambiguous=false
) where E
(regex, _tokens) = if tokens isa Vector
(tokens, (UInt32(0), UInt32(1):UInt32(length(tokens))))
(regex, _tokens) = if tokens isa AbstractVector
(Vector(tokens)::Vector, (UInt32(0), UInt32(1):UInt32(length(tokens))))
else
(map(last, last(tokens)), (first(tokens), map(first, last(tokens))))
([last(i) for i in last(tokens)]::Vector, (first(tokens), map(first, last(tokens))))
end
make_tokenizer(
compile(regex; unambiguous=unambiguous);
Expand Down

0 comments on commit 98283df

Please sign in to comment.