From 4c1e0525af8986098b2df84b9a3b3363af469c9e Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Mon, 24 Nov 2025 11:45:02 -0800 Subject: [PATCH 01/24] Move StaticLint code into repo --- src/StaticLint/StaticLint.jl | 409 +++++++++++ src/StaticLint/bindings.jl | 410 +++++++++++ src/StaticLint/coretypes.jl | 47 ++ src/StaticLint/exception_types.jl | 7 + src/StaticLint/imports.jl | 204 ++++++ src/StaticLint/interface.jl | 57 ++ src/StaticLint/linting/checks.jl | 1102 +++++++++++++++++++++++++++++ src/StaticLint/macros.jl | 268 +++++++ src/StaticLint/methodmatching.jl | 236 ++++++ src/StaticLint/references.jl | 316 +++++++++ src/StaticLint/scope.jl | 157 ++++ src/StaticLint/server.jl | 101 +++ src/StaticLint/subtypes.jl | 70 ++ src/StaticLint/type_inf.jl | 335 +++++++++ src/StaticLint/utils.jl | 337 +++++++++ 15 files changed, 4056 insertions(+) create mode 100644 src/StaticLint/StaticLint.jl create mode 100644 src/StaticLint/bindings.jl create mode 100644 src/StaticLint/coretypes.jl create mode 100644 src/StaticLint/exception_types.jl create mode 100644 src/StaticLint/imports.jl create mode 100644 src/StaticLint/interface.jl create mode 100644 src/StaticLint/linting/checks.jl create mode 100644 src/StaticLint/macros.jl create mode 100644 src/StaticLint/methodmatching.jl create mode 100644 src/StaticLint/references.jl create mode 100644 src/StaticLint/scope.jl create mode 100644 src/StaticLint/server.jl create mode 100644 src/StaticLint/subtypes.jl create mode 100644 src/StaticLint/type_inf.jl create mode 100644 src/StaticLint/utils.jl diff --git a/src/StaticLint/StaticLint.jl b/src/StaticLint/StaticLint.jl new file mode 100644 index 0000000..0d6a5e6 --- /dev/null +++ b/src/StaticLint/StaticLint.jl @@ -0,0 +1,409 @@ +module StaticLint + +include("exception_types.jl") + +using SymbolServer, CSTParser + +using CSTParser: EXPR, isidentifier, setparent!, valof, headof, hastrivia, parentof, isoperator, ispunctuation, to_codeobject +# CST utils +using CSTParser: is_getfield, isassignment, isdeclaration, isbracketed, iskwarg, iscall, iscurly, isunarycall, isunarysyntax, isbinarycall, isbinarysyntax, issplat, defines_function, is_getfield_w_quotenode, iswhere, iskeyword, isstringliteral, isparameters, isnonstdid, istuple +using SymbolServer: VarRef + +const noname = EXPR(:noname, nothing, nothing, 0, 0, nothing, nothing, nothing) + +include("coretypes.jl") +include("bindings.jl") +include("scope.jl") +include("subtypes.jl") +include("methodmatching.jl") + +const LARGE_FILE_LIMIT = 2_000_000 # bytes + +mutable struct Meta + binding::Union{Nothing,Binding} + scope::Union{Nothing,Scope} + ref::Union{Nothing,Binding,SymbolServer.SymStore} + error +end +Meta() = Meta(nothing, nothing, nothing, nothing) + +function Base.show(io::IO, m::Meta) + m.binding !== nothing && show(io, m.binding) + m.ref !== nothing && printstyled(io, " * ", color = :red) + m.scope !== nothing && printstyled(io, " new scope", color = :green) + m.error !== nothing && printstyled(io, " lint ", color = :red) +end +hasmeta(x::EXPR) = x.meta isa Meta +hasbinding(m::Meta) = m.binding isa Binding +hasref(m::Meta) = m.ref !== nothing +hasscope(m::Meta) = m.scope isa Scope +scopeof(m::Meta) = m.scope +bindingof(m::Meta) = m.binding + + +""" + ExternalEnv + +Holds a representation of an environment cached by SymbolServer. +""" +mutable struct ExternalEnv + symbols::SymbolServer.EnvStore + extended_methods::Dict{SymbolServer.VarRef,Vector{SymbolServer.VarRef}} + project_deps::Vector{Symbol} +end + +abstract type State end +mutable struct Toplevel{T} <: State + file::T + included_files::Vector{String} + scope::Scope + in_modified_expr::Bool + modified_exprs::Union{Nothing,Vector{EXPR}} + delayed::Vector{EXPR} + resolveonly::Vector{EXPR} + env::ExternalEnv + server + flags::Int +end + +Toplevel(file, included_files, scope, in_modified_expr, modified_exprs, delayed, resolveonly, env, server) = + Toplevel(file, included_files, scope, in_modified_expr, modified_exprs, delayed, resolveonly, env, server, 0) + +function (state::Toplevel)(x::EXPR) + resolve_import(x, state) + mark_bindings!(x, state) + add_binding(x, state) + mark_globals(x, state) + handle_macro(x, state) + s0 = scopes(x, state) + resolve_ref(x, state) + followinclude(x, state) + + old_in_modified_expr = state.in_modified_expr + if state.modified_exprs !== nothing && x in state.modified_exprs + state.in_modified_expr = true + end + if CSTParser.defines_function(x) || CSTParser.defines_macro(x) || headof(x) === :export || headof(x) === :public + if state.in_modified_expr + push!(state.delayed, x) + else + push!(state.resolveonly, x) + end + else + old = flag!(state, x) + traverse(x, state) + state.flags = old + end + + state.in_modified_expr = old_in_modified_expr + state.scope != s0 && (state.scope = s0) + return state.scope +end + +mutable struct Delayed <: State + scope::Scope + env::ExternalEnv + server + flags::Int +end + +Delayed(scope, env, server) = Delayed(scope, env, server, 0) + +function (state::Delayed)(x::EXPR) + mark_bindings!(x, state) + add_binding(x, state) + mark_globals(x, state) + handle_macro(x, state) + s0 = scopes(x, state) + + resolve_ref(x, state) + + old = flag!(state, x) + traverse(x, state) + state.flags = old + if state.scope != s0 + for b in values(state.scope.names) + infer_type_by_use(b, state.env) + check_unused_binding(b, state.scope) + end + state.scope = s0 + end + return state.scope +end + +mutable struct ResolveOnly <: State + scope::Scope + env::ExternalEnv + server +end + +function (state::ResolveOnly)(x::EXPR) + if hasscope(x) + s0 = state.scope + state.scope = scopeof(x) + else + s0 = state.scope + end + + # NEW: late import resolution (idempotent for already-resolved imports) + resolve_import(x, state) + + resolve_ref(x, state) + + traverse(x, state) + if state.scope != s0 + state.scope = s0 + end + return state.scope +end + +# feature flags that can disable or enable functionality further down in the CST +const NO_NEW_BINDINGS = 0x1 + +function flag!(state, x::EXPR) + old = state.flags + if CSTParser.ismacrocall(x) && (valof(x.args[1]) == "@." || valof(x.args[1]) == "@__dot__") + state.flags |= NO_NEW_BINDINGS + end + return old +end + +""" + semantic_pass(file, modified_expr=nothing) + +Performs a semantic pass across a project from the entry point `file`. A first pass traverses the top-level scope after which secondary passes handle delayed scopes (e.g. functions). These secondary passes can be, optionally, very light and only seek to resovle references (e.g. link symbols to bindings). This can be done by supplying a list of expressions on which the full secondary pass should be made (`modified_expr`), all others will receive the light-touch version. +""" +function semantic_pass(file, modified_expr = nothing) + server = file.server + env = getenv(file, server) + setscope!(getcst(file), Scope(nothing, getcst(file), Dict(), Dict{Symbol,Any}(:Base => env.symbols[:Base], :Core => env.symbols[:Core]), nothing)) + state = Toplevel(file, [getpath(file)], scopeof(getcst(file)), modified_expr === nothing, modified_expr, EXPR[], EXPR[], env, server) + state(getcst(file)) + for x in state.delayed + if hasscope(x) + traverse(x, Delayed(scopeof(x), env, server)) + for (k, b) in scopeof(x).names + infer_type_by_use(b, env) + check_unused_binding(b, scopeof(x)) + end + else + traverse(x, Delayed(retrieve_delayed_scope(x), env, server)) + end + end + if state.resolveonly !== nothing + for x in state.resolveonly + if hasscope(x) + traverse(x, ResolveOnly(scopeof(x), env, server)) + else + traverse(x, ResolveOnly(retrieve_delayed_scope(x), env, server)) + end + end + end +end + +""" + traverse(x, state) + +Iterates across the child nodes of an EXPR in execution order (rather than +storage order) calling `state` on each node. +""" +function traverse(x::EXPR, state) + if (isassignment(x) && !(CSTParser.is_func_call(x.args[1]) || CSTParser.iscurly(x.args[1]))) || CSTParser.isdeclaration(x) + state(x.args[2]) + state(x.args[1]) + elseif CSTParser.iswhere(x) + for i = 2:length(x.args) + state(x.args[i]) + end + state(x.args[1]) + elseif headof(x) === :generator || headof(x) === :filter + @inbounds for i = 2:length(x.args) + state(x.args[i]) + end + state(x.args[1]) + elseif headof(x) === :call && length(x.args) > 1 && headof(x.args[2]) === :parameters + state(x.args[1]) + @inbounds for i = 3:length(x.args) + state(x.args[i]) + end + state(x.args[2]) + elseif x.args !== nothing && length(x.args) > 0 + @inbounds for i = 1:length(x.args) + state(x.args[i]) + end + end +end + +function check_filesize(x, path) + nb = try + filesize(path) + catch + seterror!(x, FileNotAvailable) + return false + end + + toobig = nb > LARGE_FILE_LIMIT + if toobig + seterror!(x, FileTooBig) + end + return !toobig +end + +""" + followinclude(x, state) + +Checks whether the arguments of a call to `include` can be resolved to a path. +If successful it checks whether a file with that path is loaded on the server +or a file exists on the disc that can be loaded. +If this is successful it traverses the code associated with the loaded file. +""" +function followinclude(x, state::State) + # this runs on the `include` symbol instead of a function call so that we + # can be sure the ref has already been resolved + isinclude = isincludet = false + p = x + if isidentifier(x) && hasref(x) + r = x.meta.ref + + if is_in_fexpr(x, iscall) + p = get_parent_fexpr(x, iscall) + if r == refof_call_func(p) + isinclude = r.name == SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Base), :include) + isincludet = r.name == SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Revise), :includet) + end + end + end + + if !(isinclude || isincludet) + return + end + + x = p + + init_path = path = get_path(x, state) + if isempty(path) + elseif isabspath(path) + if hasfile(state.server, path) + elseif canloadfile(state.server, path) + if check_filesize(x, path) + loadfile(state.server, path) + else + return + end + else + path = "" + end + elseif !isempty(getpath(state.file)) && isabspath(joinpath(dirname(getpath(state.file)), path)) + # Relative path from current + if hasfile(state.server, joinpath(dirname(getpath(state.file)), path)) + path = joinpath(dirname(getpath(state.file)), path) + elseif canloadfile(state.server, joinpath(dirname(getpath(state.file)), path)) + path = joinpath(dirname(getpath(state.file)), path) + if check_filesize(x, path) + loadfile(state.server, path) + else + return + end + else + path = "" + end + elseif !isempty((basepath = _is_in_basedir(getpath(state.file)); basepath)) + # Special handling for include method used within Base + path = joinpath(basepath, path) + if hasfile(state.server, path) + # skip + elseif canloadfile(state.server, path) + loadfile(state.server, path) + else + path = "" + end + else + path = "" + end + if hasfile(state.server, path) + if path in state.included_files + seterror!(x, IncludeLoop) + return + end + f = getfile(state.server, path) + + if f.cst.fullspan > LARGE_FILE_LIMIT + seterror!(x, FileTooBig) + return + end + oldfile = state.file + state.file = f + push!(state.included_files, getpath(state.file)) + setroot(state.file, getroot(oldfile)) + setscope!(getcst(state.file), nothing) + state(getcst(state.file)) + state.file = oldfile + pop!(state.included_files) + elseif !is_in_fexpr(x, CSTParser.defines_function) && !isempty(init_path) + seterror!(x, MissingFile) + end +end + +""" + get_path(x::EXPR) + +Usually called on the argument to `include` calls, and attempts to determine +the path of the file to be included. Has limited support for `joinpath` calls. +""" +function get_path(x::EXPR, state) + if CSTParser.iscall(x) && length(x.args) == 2 + parg = x.args[2] + + if CSTParser.isstringliteral(parg) + if occursin("\0", valof(parg)) + seterror!(parg, IncludePathContainsNULL) + return "" + end + path = CSTParser.str_value(parg) + path = normpath(path) + Base.containsnul(path) && throw(SLInvalidPath("Couldn't convert '$x' into a valid path. Got '$path'")) + return path + elseif CSTParser.ismacrocall(parg) && valof(parg.args[1]) == "@raw_str" && CSTParser.isstringliteral(parg.args[3]) + if occursin("\0", valof(parg.args[3])) + seterror!(parg.args[3], IncludePathContainsNULL) + return "" + end + path = normpath(CSTParser.str_value(parg.args[3])) + Base.containsnul(path) && throw(SLInvalidPath("Couldn't convert '$x' into a valid path. Got '$path'")) + return path + elseif CSTParser.iscall(parg) && isidentifier(parg.args[1]) && valofid(parg.args[1]) == "joinpath" + path_elements = String[] + + for i = 2:length(parg.args) + arg = parg[i] + if _is_macrocall_to_BaseDIR(arg) # Assumes @__DIR__ points to Base macro. + push!(path_elements, dirname(getpath(state.file))) + elseif CSTParser.isstringliteral(arg) + if occursin("\0", valof(arg)) + seterror!(arg, IncludePathContainsNULL) + return "" + end + push!(path_elements, string(valof(arg))) + else + return "" + end + end + isempty(path_elements) && return "" + + path = normpath(joinpath(path_elements...)) + Base.containsnul(path) && throw(SLInvalidPath("Couldn't convert '$x' into a valid path. Got '$path'")) + return path + end + end + return "" +end + +include("server.jl") +include("imports.jl") +include("references.jl") +include("macros.jl") +include("linting/checks.jl") +include("type_inf.jl") +include("utils.jl") +include("interface.jl") +end diff --git a/src/StaticLint/bindings.jl b/src/StaticLint/bindings.jl new file mode 100644 index 0000000..d32a896 --- /dev/null +++ b/src/StaticLint/bindings.jl @@ -0,0 +1,410 @@ +""" +Bindings indicate that an `EXPR` _may_ introduce a new name into the current scope/namespace. +Struct fields: +* `name`: the `EXPR` that defines the unqualifed name of the binding. +* `val`: what the binding points to, either a `Binding` (indicating ..), `EXPR` (this is generally the expression that defines the value) or `SymStore`. +* `type`: the type of the binding, either a `Binding`, `EXPR`, or `SymStore`. +* `refs`: a list containing all references that have been made to the binding. +""" +mutable struct Binding + name::EXPR + val::Union{Binding,EXPR,SymbolServer.SymStore,Nothing} + type::Union{Binding,SymbolServer.SymStore,Nothing} + refs::Vector{Any} + is_public::Bool +end +Binding(x::EXPR) = Binding(CSTParser.get_name(x), x, nothing, [], false) +Binding(name, val, type, refs) = Binding(name, val, type, refs, false) + +function Base.show(io::IO, b::Binding) + printstyled(io, " Binding(", to_codeobject(b.name), + b.is_public ? "แต–" : "", + b.type === nothing ? "" : "::($(b.type.name))", + b.refs isa Vector ? " ($(length(b.refs)) refs))" : ")", color=:blue) +end + +hasbinding(x::EXPR) = hasmeta(x) && hasbinding(x.meta) +bindingof(x) = nothing +bindingof(x::EXPR) = bindingof(x.meta) + + +hasref(x::EXPR) = hasmeta(x) && hasref(x.meta) +refof(x::EXPR) = hasmeta(x) ? x.meta.ref : nothing + + +function gotoobjectofref(x::EXPR) + r = refof(x) + if r isa SymbolServer.SymStore + return r + elseif r isa Binding + + end +end + + +""" + mark_bindings!(x::EXPR, state) + +Checks whether the expression `x` should introduce new names and marks them as needed. Generally this marks expressions that would introdce names to the current scope (i.e. that x sits in) but in cases marks expressions that will add names to lower scopes. This is done when it is not knowable that a child node of `x` will introduce a new name without the context of where it sits in `x` -for example the arguments of the signature of a function definition. +""" +function mark_bindings!(x::EXPR, state) + if hasbinding(x) + return + end + if !hasmeta(x) + x.meta = Meta() + end + if isassignment(x) + if CSTParser.is_func_call(x.args[1]) + name = CSTParser.get_name(x) + mark_binding!(x) + mark_sig_args!(x.args[1]) + elseif CSTParser.iscurly(x.args[1]) + mark_typealias_bindings!(x) + elseif !is_getfield(x.args[1]) && state.flags & NO_NEW_BINDINGS == 0 + mark_binding!(x.args[1], x) + end + elseif CSTParser.defines_anon_function(x) + mark_binding!(x.args[1], x) + elseif CSTParser.iswhere(x) + for i = 2:length(x.args) + mark_binding!(x.args[i]) + end + elseif headof(x) === :for + markiterbinding!(x.args[2]) + elseif headof(x) === :generator || headof(x) === :filter + for i = 2:length(x.args) + markiterbinding!(x.args[i]) + end + elseif headof(x) === :do + for i in 1:length(x.args[2].args) + mark_binding!(x.args[2].args[i]) + end + elseif headof(x) === :function || headof(x) === :macro + name = CSTParser.get_name(x) + x.meta.binding = Binding(name, x, CoreTypes.Function, []) + if isidentifier(name) && headof(x) === :macro + setref!(name, bindingof(x)) + end + mark_sig_args!(CSTParser.get_sig(x)) + elseif CSTParser.defines_module(x) + x.meta.binding = Binding(x.args[2], x, CoreTypes.Module, []) + setref!(x.args[2], bindingof(x)) + elseif headof(x) === :try && isidentifier(x.args[2]) + mark_binding!(x.args[2]) + setref!(x.args[2], bindingof(x.args[2])) + elseif CSTParser.defines_datatype(x) + name = CSTParser.get_name(x) + x.meta.binding = Binding(name, x, CoreTypes.DataType, []) + kwdef = parentof(x) isa EXPR && _points_to_Base_macro(parentof(x).args[1], Symbol("@kwdef"), state) + if isidentifier(name) + setref!(name, bindingof(x)) + end + mark_parameters(CSTParser.get_sig(x)) + if CSTParser.defines_struct(x) # mark field block + for arg in x.args[3].args + CSTParser.defines_function(arg) && continue + if arg.head === :const + arg = arg.args[1] + end + if kwdef && CSTParser.isassignment(arg) + arg = arg.args[1] + end + mark_binding!(arg) + end + end + elseif headof(x) === :local + for i = 1:length(x.args) + if isidentifier(x.args[i]) + mark_binding!(x.args[i]) + setref!(x.args[i], bindingof(x.args[i])) + end + end + end +end + + +function mark_binding!(x::EXPR, val=x) + if CSTParser.iskwarg(x) || (CSTParser.isdeclaration(x) && CSTParser.istuple(x.args[1])) + mark_binding!(x.args[1], x) + elseif CSTParser.istuple(x) || CSTParser.isparameters(x) + for arg in x.args + mark_binding!(arg, val) + end + elseif CSTParser.isbracketed(x) + mark_binding!(CSTParser.rem_invis(x), val) + elseif CSTParser.issplat(x) + mark_binding!(x.args[1], x) + elseif !(isunarysyntax(x) && valof(headof(x)) == "::") + if !hasmeta(x) + x.meta = Meta() + end + x.meta.binding = Binding(CSTParser.get_name(x), val, nothing, []) + end + return x +end + +function mark_parameters(sig::EXPR, params = String[]) + if CSTParser.issubtypedecl(sig) + mark_parameters(sig.args[1], params) + elseif iswhere(sig) + for i = 2:length(sig.args) + x = mark_binding!(sig.args[i]) + val = valof(bindingof(x).name) + if val isa String + push!(params, val) + end + end + mark_parameters(sig.args[1], params) + elseif CSTParser.iscurly(sig) + for i = 2:length(sig.args) + x = mark_binding!(sig.args[i]) + if bindingof(x) isa Binding && valof(bindingof(x).name) in params + # Don't mark a new binding if a parameter has already been + # introduced from a :where + x.meta.binding = nothing + end + end + end + sig +end + + +function markiterbinding!(iter::EXPR) + if CSTParser.isassignment(iter) + mark_binding!(iter.args[1], iter) + elseif CSTParser.iscall(iter) && CSTParser.isoperator(iter.args[1]) && (valof(iter.args[1]) == "in" || valof(iter.args[1]) == "โˆˆ") + mark_binding!(iter.args[2], iter) + elseif headof(iter) === :block + for i = 1:length(iter.args) + markiterbinding!(iter.args[i]) + end + end + return iter +end + +function mark_sig_args!(x::EXPR) + if CSTParser.iscall(x) || CSTParser.istuple(x) + if x.args !== nothing && length(x.args) > 0 + if CSTParser.isbracketed(x.args[1]) && length(x.args[1].args) > 0 && CSTParser.isdeclaration(x.args[1].args[1]) + mark_binding!(x.args[1].args[1]) + end + for i = (CSTParser.iscall(x) ? 2 : 1):length(x.args) + a = x.args[i] + if CSTParser.isparameters(a) + for j = 1:length(a.args) + aa = a.args[j] + mark_binding!(aa) + end + elseif CSTParser.ismacrocall(a) && CSTParser.isidentifier(a.args[1]) && valofid(a.args[1]) == "@nospecialize" && length(a.args) == 3 + mark_binding!(a.args[3]) + else + mark_binding!(a) + end + end + end + elseif CSTParser.iswhere(x) + for i in 2:length(x.args) + mark_binding!(x.args[i]) + end + mark_sig_args!(x.args[1]) + elseif CSTParser.isbracketed(x) + mark_sig_args!(x.args[1]) + elseif CSTParser.isdeclaration(x) + mark_sig_args!(x.args[1]) + elseif CSTParser.isbinarycall(x) + mark_binding!(x.args[1]) + mark_binding!(x.args[2]) + elseif CSTParser.isunarycall(x) && length(x.args) == 2 && (CSTParser.isbracketed(x.args[2]) || CSTParser.isdeclaration(x.args[2])) + mark_binding!(x.args[2]) + end +end + +function mark_typealias_bindings!(x::EXPR) + if !hasmeta(x) + x.meta = Meta() + end + x.meta.binding = Binding(CSTParser.get_name(x.args[1]), x, CoreTypes.DataType, []) + setscope!(x, Scope(x)) + for i = 2:length(x.args[1].args) + arg = x.args[1].args[i] + if isidentifier(arg) + mark_binding!(arg) + elseif CSTParser.issubtypedecl(arg) && isidentifier(arg.args[1]) + mark_binding!(arg.args[1]) + end + end + return x +end + +function is_in_funcdef(x) + if !(parentof(x) isa EXPR) + return false + elseif CSTParser.iswhere(parentof(x)) || CSTParser.isbracketed(parentof(x)) + return is_in_funcdef(parentof(x)) + elseif headof(parentof(x)) === :function || CSTParser.isassignment(parentof(x)) + return true + else + return false + end +end + +rem_wheres_subs_decls(x::EXPR) = (iswhere(x) || isdeclaration(x) || CSTParser.issubtypedecl(x)) ? rem_wheres_subs_decls(x.args[1]) : x + +function _in_func_or_struct_def(x::EXPR) + # only called in :where + # check 1st arg contains a call (or op call) + ex = rem_wheres_subs_decls(x.args[1]) + is_in_fexpr(x, CSTParser.defines_struct) || ((CSTParser.iscall(ex) || CSTParser.is_getfield(ex) || CSTParser.isunarycall(ex)) && is_in_funcdef(x)) +end + +""" + add_binding(x, state, scope=state.scope) + +Add the binding of `x` to the current scope. Special handling is required for: +* macros: to prefix the `@` +* functions: These are added to the top-level scope unless this syntax is used to define a closure within a function. If a function with the same name already exists in the scope then it is not replaced. This enables the `refs` list of the Binding of that 'root method' to hold a method table, the name of the new function will resolve to the binding of the root method (to get a list of actual methods -`[get_method(ref) for ref in binding.refs if get_method(ref) !== nothing]`). For example +```julia +[1] f() = 1 +[2] f(x) = 2 +``` +[1] is the root method and the name of [2] resolves to the binding of [1]. Functions declared with qualified names require special handling, there are comments in the source. + +Some simple type inference is run. +""" +function add_binding(x, state, scope=state.scope) + if bindingof(x) isa Binding + b = bindingof(x) + if isidentifier(b.name) + name = valofid(b.name) + elseif CSTParser.ismacroname(b.name) # must be getfield + name = string(to_codeobject(b.name)) + elseif isoperator(b.name) + name = valof(b.name) + else + return + end + # check for global marker + if isglobal(name, scope) + scope = _get_global_scope(state.scope) + end + + if CSTParser.defines_macro(x) + scope.names[string("@", name)] = b + mn = CSTParser.get_name(x) + if isidentifier(mn) + setref!(mn, b) + end + elseif defines_function(x) + # TODO: Need to do check that we're not in a closure. + tls = retrieve_toplevel_or_func_scope(scope) + tls === nothing && return @warn "top-level scope not retrieved" + if name_is_getfield(b.name) + resolve_ref(parentof(parentof(b.name)).args[1], scope, state) + lhs_ref = refof_maybe_getfield(parentof(parentof(b.name)).args[1]) + if lhs_ref isa SymbolServer.ModuleStore && haskey(lhs_ref.vals, Symbol(name)) + # Overloading + if haskey(tls.names, name) && eventually_overloads(tls.names[name], lhs_ref.vals[Symbol(name)], state) + # Though we're explicitly naming a function for overloading, it has already been imported to the toplevel scope. + if !hasref(b.name) + setref!(b.name, tls.names[name]) # Add ref to previous overload + overload_method(tls, b, VarRef(lhs_ref.name, Symbol(name))) + end + # Do nothing, get_name(x) will resolve to the root method + elseif isexportedby(name, lhs_ref) + # Name is already available + tls.names[name] = b + if !hasref(b.name) # Is this an appropriate indicator that we've not marked the overload? + push!(b.refs, maybe_lookup(lhs_ref[Symbol(name)], state)) + setref!(b.name, b) # we actually set the rhs of the qualified name to point to this binding + end + else + # Mark as overloaded so that calls to `M.f()` resolve properly. + overload_method(tls, b, VarRef(lhs_ref.name, Symbol(name))) # Add to overloaded list but not scope. + end + elseif lhs_ref isa Binding && CoreTypes.ismodule(lhs_ref.type) + if hasscope(lhs_ref.val) && haskey(scopeof(lhs_ref.val).names, name) + # Don't need to do anything, name will resolve + end + end + else + if scopehasbinding(tls, name) + + existing_binding = tls.names[name] + if existing_binding isa Binding && (existing_binding.val isa Binding || existing_binding.val isa SymbolServer.FunctionStore || existing_binding.val isa SymbolServer.DataTypeStore) + # Should possibly be a while statement + # If the .val is as above the Binding likely won't have a proper type attached + # so lets use the .val instead. + existing_binding = existing_binding.val + end + if (existing_binding isa Binding && ((CoreTypes.isfunction(existing_binding.type) || CoreTypes.isdatatype(existing_binding.type))) || existing_binding isa SymbolServer.FunctionStore || existing_binding isa SymbolServer.DataTypeStore) + # do nothing name of `x` will resolve to the root method + else + seterror!(x, CannotDefineFuncAlreadyHasValue) + end + else + scope.names[name] = b + if !hasref(b.name) + setref!(b.name, b) + end + end + if CSTParser.defines_struct(scope.expr) && parentof(scope) isa Scope + # hoist binding for inner constructor to parent scope + return add_binding(x, state, parentof(scope)) + end + end + elseif scopehasbinding(scope, name) + # TODO: some checks about rebinding of consts + check_const_decl(name, b, scope) + + scope.names[name] = b + elseif is_soft_scope(scope) && parentof(scope) isa Scope && isidentifier(b.name) && scopehasbinding(parentof(scope), valofid(b.name)) && !enforce_hard_scope(x, scope) + add_binding(x, state, scope.parent) + else + scope.names[name] = b + end + infer_type(b, scope, state) + elseif bindingof(x) isa SymbolServer.SymStore + scope.names[valofid(x)] = bindingof(x) + end +end + +function enforce_hard_scope(x::EXPR, scope) + scope.expr.head === :for && is_in_fexpr(x, x-> x == scope.expr.args[1]) +end + +name_is_getfield(x) = parentof(x) isa EXPR && parentof(parentof(x)) isa EXPR && CSTParser.is_getfield_w_quotenode(parentof(parentof(x))) + + +""" +eventually_overloads(b, x, state) + + +""" +eventually_overloads(b::Binding, ss::SymbolServer.SymStore, state) = b.val == ss || (b.refs !== nothing && length(b.refs) > 0 && first(b.refs) == ss) +eventually_overloads(b::Binding, ss::SymbolServer.VarRef, state) = eventually_overloads(b, maybe_lookup(ss, state), state) +eventually_overloads(b, ss, state) = false + +isglobal(name, scope) = false +isglobal(name::String, scope) = scope !== nothing && scopehasbinding(scope, "#globals") && name in scope.names["#globals"].refs + +function mark_globals(x::EXPR, state) + if headof(x) === :global + if !scopehasbinding(state.scope, "#globals") + state.scope.names["#globals"] = Binding(EXPR(:IDENTIFIER, EXPR[], nothing, 0, 0, "#globals", nothing, nothing), nothing, nothing, []) + end + for i = 2:length(x.args) + if isidentifier(x.args[i]) && !scopehasbinding(state.scope, valofid(x.args[i])) + push!(state.scope.names["#globals"].refs, valofid(x.args[i])) + end + end + end +end + +function name_extends_imported_method(b::Binding) + if CoreTypes.isfunction(b.type) && CSTParser.hasparent(b.name) && CSTParser.is_getfield(parentof(b.name)) + if refof_maybe_getfield(parentof(b.name)[1]) !== nothing + + end + end +end diff --git a/src/StaticLint/coretypes.jl b/src/StaticLint/coretypes.jl new file mode 100644 index 0000000..293f7a1 --- /dev/null +++ b/src/StaticLint/coretypes.jl @@ -0,0 +1,47 @@ +baremodule CoreTypes # Convenience +using ..SymbolServer +using Base: ==, @static + +const Any = SymbolServer.stdlibs[:Core][:Any] +const DataType = SymbolServer.stdlibs[:Core][:DataType] +const Function = SymbolServer.stdlibs[:Core][:Function] +const Module = SymbolServer.stdlibs[:Core][:Module] +const String = SymbolServer.stdlibs[:Core][:String] +const Char = SymbolServer.stdlibs[:Core][:Char] +const Symbol = SymbolServer.stdlibs[:Core][:Symbol] +const Bool = SymbolServer.stdlibs[:Core][:Bool] +const Int = SymbolServer.stdlibs[:Core][:Int] +const UInt8 = SymbolServer.stdlibs[:Core][:UInt8] +const UInt16 = SymbolServer.stdlibs[:Core][:UInt16] +const UInt32 = SymbolServer.stdlibs[:Core][:UInt32] +const UInt64 = SymbolServer.stdlibs[:Core][:UInt64] +const Float64 = SymbolServer.stdlibs[:Core][:Float64] +const Vararg = SymbolServer.FakeTypeName(Core.Vararg) + +iscoretype(x, name) = false +iscoretype(x::SymbolServer.VarRef, name) = x isa SymbolServer.DataTypeStore && x.name.name == name && x.name isa SymbolServer.VarRef && x.name.parent.name == :Core +iscoretype(x::SymbolServer.DataTypeStore, name) = x isa SymbolServer.DataTypeStore && x.name.name.name == name && x.name.name isa SymbolServer.VarRef && x.name.name.parent.name == :Core +isdatatype(x) = iscoretype(x, :DataType) +isfunction(x) = iscoretype(x, :Function) +ismodule(x) = iscoretype(x, :Module) +isstring(x) = iscoretype(x, :String) +ischar(x) = iscoretype(x, :Char) +issymbol(x) = iscoretype(x, :Symbol) +@static if Core.Int == Core.Int64 + isint(x) = iscoretype(x, :Int64) +else + isint(x) = iscoretype(x, :Int32) +end +isfloat(x) = iscoretype(x, :Float64) +isvector(x) = iscoretype(x, :Vector) +isarray(x) = iscoretype(x, :Array) +isva(x::SymbolServer.FakeUnionAll) = isva(x.body) +@static if Core.Vararg isa Core.Type + function isva(x) + return (x isa SymbolServer.FakeTypeName && x.name.name == :Vararg && + x.name.parent isa SymbolServer.VarRef && x.name.parent.name == :Core) + end +else + isva(x) = x isa SymbolServer.FakeTypeofVararg +end +end diff --git a/src/StaticLint/exception_types.jl b/src/StaticLint/exception_types.jl new file mode 100644 index 0000000..6f1df07 --- /dev/null +++ b/src/StaticLint/exception_types.jl @@ -0,0 +1,7 @@ +struct SLInvalidPath <: Exception + msg::AbstractString +end + +function Base.showerror(io::IO, ex::SLInvalidPath) + print(io, ex.msg) +end \ No newline at end of file diff --git a/src/StaticLint/imports.jl b/src/StaticLint/imports.jl new file mode 100644 index 0000000..f665a85 --- /dev/null +++ b/src/StaticLint/imports.jl @@ -0,0 +1,204 @@ +function resolve_import_block(x::EXPR, state::State, root, usinged, markfinal=true) + if x.head == :as + resolve_import_block(x.args[1], state, root, usinged, markfinal) + if x.args[2].meta === nothing + x.args[2].meta = Meta() + end + if hasbinding(last(x.args[1].args)) && CSTParser.isidentifier(x.args[2]) + lhsbinding = bindingof(last(x.args[1].args)) + x.args[2].meta.binding = Binding(x.args[2], lhsbinding.val, lhsbinding.type, lhsbinding.refs) + setref!(x.args[2], bindingof(x.args[2])) + last(x.args[1].args).meta.binding = nothing + end + return + end + n = length(x.args) + for i = 1:length(x.args) + arg = x.args[i] + if isoperator(arg) && valof(arg) == "." + # Leading dots. Can only be leading elements. + if root == getsymbols(state) + root = state.scope + elseif root isa Scope && parentof(root) !== nothing + root = parentof(root) + else + # Too many dots + seterror!(arg, RelativeImportTooManyDots) + return + end + elseif isidentifier(arg) || (i == n && (CSTParser.ismacroname(arg) || isoperator(arg))) + cand = hasref(arg) ? refof(arg) : _get_field(root, arg, state) + if cand === nothing + # Cannot resolve now (e.g. sibling not yet defined). Schedule a retry. + if state isa Toplevel + # the import/using expression + imp = StaticLint.get_parent_fexpr(arg, y -> headof(y) === :using || headof(y) === :import) + #imp !== nothing && push!(state.resolveonly, imp) + imp !== nothing && (imp โˆˆ state.resolveonly || push!(state.resolveonly, imp)) + # the enclosing module (so we re-resolve refs within it) + mod = StaticLint.maybe_get_parent_fexpr(imp, CSTParser.defines_module) + #mod !== nothing && push!(state.resolveonly, mod) + mod !== nothing && (mod โˆˆ state.resolveonly || push!(state.resolveonly, mod)) + end + return + end + root = maybe_lookup(cand, state) + setref!(arg, root) + if i == n + markfinal && _mark_import_arg(arg, root, state, usinged) + return refof(arg) + end + else + return + end + end +end + +function resolve_import(x::EXPR, state::State, root=getsymbols(state)) + if (headof(x) === :using || headof(x) === :import) + usinged = (headof(x) === :using) + if length(x.args) > 0 && isoperator(headof(x.args[1])) && valof(headof(x.args[1])) == ":" + root2 = resolve_import_block(x.args[1].args[1], state, root, false, false) + if root2 === nothing + # schedule a retry like above + if state isa Toplevel + push!(state.resolveonly, x) + mod = StaticLint.maybe_get_parent_fexpr(x, CSTParser.defines_module) + mod !== nothing && push!(state.resolveonly, mod) + end + return + end + for i = 2:length(x.args[1].args) + resolve_import_block(x.args[1].args[i], state, root2, usinged) + end + else + for i = 1:length(x.args) + resolve_import_block(x.args[i], state, root, usinged) + end + end + end +end + +function _mark_import_arg(arg, par, state, usinged) + if par !== nothing && CSTParser.is_id_or_macroname(arg) + if par isa Binding # mark reference to binding + push!(par.refs, arg) + end + if par isa SymbolServer.VarRef + par = SymbolServer._lookup(par, getsymbols(state), true) + !(par isa SymbolServer.SymStore) && return + end + if bindingof(arg) === nothing + if !hasmeta(arg) + arg.meta = Meta() + end + arg.meta.binding = Binding(arg, par, _typeof(par, state), []) + setref!(arg, bindingof(arg)) + end + + if usinged + if par isa SymbolServer.ModuleStore + add_to_imported_modules(state.scope, Symbol(valofid(arg)), par) + elseif par isa Binding && par.val isa SymbolServer.ModuleStore + add_to_imported_modules(state.scope, Symbol(valofid(arg)), par.val) + elseif par isa Binding && par.val isa EXPR && CSTParser.defines_module(par.val) + add_to_imported_modules(state.scope, Symbol(valofid(arg)), scopeof(par.val)) + elseif par isa Binding && par.val isa Binding && par.val.val isa EXPR && CSTParser.defines_module(par.val.val) + add_to_imported_modules(state.scope, Symbol(valofid(arg)), scopeof(par.val.val)) + end + else + # import binds the name in the current scope + state.scope.names[valofid(arg)] = bindingof(arg) + end + end +end + +function has_workspace_package(server, name) + haskey(server.workspacepackages, name) && + hasscope(getcst(server.workspacepackages[name])) && + haskey(scopeof(getcst(server.workspacepackages[name])).names, name) && + scopeof(getcst(server.workspacepackages[name])).names[name] isa Binding && + scopeof(getcst(server.workspacepackages[name])).names[name].val isa EXPR && + CSTParser.defines_module(scopeof(getcst(server.workspacepackages[name])).names[name].val) +end + +function add_to_imported_modules(scope::Scope, name::Symbol, val) + if scope.modules isa Dict + scope.modules[name] = val + else + scope.modules = Dict{Symbol,Any}(name => val) + end +end +no_modules_above(s::Scope) = !CSTParser.defines_module(s.expr) || s.parent === nothing || no_modules_above(s.parent) +function get_named_toplevel_module(s, name) + return nothing +end +function get_named_toplevel_module(s::Scope, name::String) + if CSTParser.defines_module(s.expr) + m_name = CSTParser.get_name(s.expr) + if ((headof(m_name) === :IDENTIFIER && valof(m_name) == name) || headof(m_name) === :NONSTDIDENTIFIER && length(m_name.args) == 2 && valof(m_name.args[2]) == name) && no_modules_above(s) + return s.expr + end + end + if s.parent isa Scope + return get_named_toplevel_module(s.parent, name) + end + return nothing +end +function _get_field(par, arg, state) + arg_str_rep = CSTParser.str_value(arg) + if par isa SymbolServer.EnvStore + if (arg_scope = retrieve_scope(arg)) !== nothing && (tlm = get_named_toplevel_module(arg_scope, arg_str_rep)) !== nothing && hasbinding(tlm) + return bindingof(tlm) + # elseif has_workspace_package(state.server, arg_str_rep) + # return scopeof(getcst(state.server.workspacepackages[arg_str_rep])).names[arg_str_rep] + elseif haskey(par, Symbol(arg_str_rep)) + if isempty(state.env.project_deps) || Symbol(arg_str_rep) in state.env.project_deps + return par[Symbol(arg_str_rep)] + end + end + elseif par isa SymbolServer.ModuleStore # imported module + if Symbol(arg_str_rep) === par.name.name + return par + elseif haskey(par, Symbol(arg_str_rep)) + par = par[Symbol(arg_str_rep)] + if par isa SymbolServer.VarRef # reference to dependency + return SymbolServer._lookup(par, getsymbols(state), true) + end + return par + end + for used_module_name in par.used_modules + used_module = maybe_lookup(par[used_module_name], state) + if used_module isa SymbolServer.ModuleStore && isexportedby(Symbol(arg_str_rep), used_module) + return used_module[Symbol(arg_str_rep)] + end + end + elseif par isa Scope + if scopehasbinding(par, arg_str_rep) + return par.names[arg_str_rep] + elseif par.modules !== nothing + for used_module in values(par.modules) + if used_module isa SymbolServer.ModuleStore && isexportedby(Symbol(arg_str_rep), used_module) + return maybe_lookup(used_module[Symbol(arg_str_rep)], state) + elseif used_module isa Scope && scope_exports(used_module, arg_str_rep, state) + return used_module.names[arg_str_rep] + end + end + end + elseif par isa Binding + if par.val isa Binding + return _get_field(par.val, arg, state) + elseif par.val isa EXPR && CSTParser.defines_module(par.val) && scopeof(par.val) isa Scope + return _get_field(scopeof(par.val), arg, state) + elseif par.val isa EXPR && isassignment(par.val) + if hasref(par.val.args[2]) + return _get_field(refof(par.val.args[2]), arg, state) + elseif is_getfield_w_quotenode(par.val.args[2]) + return _get_field(refof_maybe_getfield(par.val.args[2]), arg, state) + end + elseif par.val isa SymbolServer.ModuleStore + return _get_field(par.val, arg, state) + end + end + return +end diff --git a/src/StaticLint/interface.jl b/src/StaticLint/interface.jl new file mode 100644 index 0000000..94ff4ee --- /dev/null +++ b/src/StaticLint/interface.jl @@ -0,0 +1,57 @@ +function setup_server(env = dirname(SymbolServer.Pkg.Types.Context().env.project_file), depot = first(SymbolServer.Pkg.depots()), cache = joinpath(dirname(pathof(SymbolServer)), "..", "store")) + server = StaticLint.FileServer() + ssi = SymbolServerInstance(depot, cache) + _, symbols = SymbolServer.getstore(ssi, env) + extended_methods = SymbolServer.collect_extended_methods(symbols) + server.external_env = ExternalEnv(symbols, extended_methods, Symbol[]) + server +end + +""" + lint_string(s, server; gethints = false) + +Parse a string and run a semantic pass over it. This will mark scopes, bindings, +references, and lint hints. An annotated `EXPR` is returned or, if `gethints = true`, +it is paired with a collected list of errors/hints. +""" +function lint_string(s::String, server = setup_server(); gethints = false) + empty!(server.files) + f = File("", s, CSTParser.parse(s, true), nothing, server) + env = getenv(f, server) + setroot(f, f) + setfile(server, "", f) + semantic_pass(f) + check_all(f.cst, LintOptions(), env) + if gethints + return f.cst, [(x, string(haserror(x) ? LintCodeDescriptions[x.meta.error] : "Missing reference", " at offset ", offset)) for (offset, x) in collect_hints(f.cst, env)] + else + return f.cst + end +end + +""" + lint_file(rootpath, server) + +Read a file from disc, parse and run a semantic pass over it. The file should be the +root of a project, e.g. for this package that file is `src/StaticLint.jl`. Other files +in the project will be loaded automatically (calls to `include` with complicated arguments +are not handled, see `followinclude` for details). A `FileServer` will be returned +containing the `File`s of the package. +""" +function lint_file(rootpath, server = setup_server(); gethints = false) + empty!(server.files) + root = loadfile(server, rootpath) + semantic_pass(root) + for f in values(server.files) + check_all(f.cst, LintOptions(), getenv(f, server)) + end + if gethints + hints = [] + for (p,f) in server.files + append!(hints, [(x, string(haserror(x) ? LintCodeDescriptions[x.meta.error] : "Missing reference", " at offset ", offset, " of ", p)) for (offset, x) in collect_hints(f.cst, getenv(f, server))]) + end + return root, hints + else + return root + end +end diff --git a/src/StaticLint/linting/checks.jl b/src/StaticLint/linting/checks.jl new file mode 100644 index 0000000..9d89981 --- /dev/null +++ b/src/StaticLint/linting/checks.jl @@ -0,0 +1,1102 @@ +@enum( + LintCodes, + + MissingRef, + IncorrectCallArgs, + IncorrectIterSpec, + NothingEquality, + NothingNotEq, + ConstIfCondition, + EqInIfConditional, + PointlessOR, + PointlessAND, + UnusedBinding, + InvalidTypeDeclaration, + UnusedTypeParameter, + IncludeLoop, + MissingFile, + InvalidModuleName, + TypePiracy, + UnusedFunctionArgument, + CannotDeclareConst, + InvalidRedefofConst, + NotEqDef, + KwDefaultMismatch, + InappropriateUseOfLiteral, + ShouldBeInALoop, + TypeDeclOnGlobalVariable, + UnsupportedConstLocalVariable, + UnassignedKeywordArgument, + CannotDefineFuncAlreadyHasValue, + DuplicateFuncArgName, + IncludePathContainsNULL, + IndexFromLength, + FileTooBig, + FileNotAvailable, + RelativeImportTooManyDots, +) + +const LintCodeDescriptions = Dict{LintCodes,String}( + IncorrectCallArgs => "Possible method call error.", + IncorrectIterSpec => "A loop iterator has been used that will likely error.", + NothingEquality => "Compare against `nothing` using `isnothing` or `===`", + NothingNotEq => "Compare against `nothing` using `!isnothing` or `!==`", + ConstIfCondition => "A boolean literal has been used as the conditional of an if statement - it will either always or never run.", + EqInIfConditional => "Unbracketed assignment in if conditional statements is not allowed, did you mean to use ==?", + PointlessOR => "The first argument of a `||` call is a boolean literal.", + PointlessAND => "The first argument of a `&&` call is a boolean literal.", + UnusedBinding => "Variable has been assigned but not used.", + InvalidTypeDeclaration => "A non-DataType has been used in a type declaration statement.", + UnusedTypeParameter => "A DataType parameter has been specified but not used.", + IncludeLoop => "Loop detected, this file has already been included.", + MissingFile => "The included file can not be found.", + InvalidModuleName => "Module name matches that of its parent.", + TypePiracy => "An imported function has been extended without using module defined typed arguments.", + UnusedFunctionArgument => "An argument is included in a function signature but not used within its body.", + CannotDeclareConst => "Cannot declare constant; it already has a value.", + InvalidRedefofConst => "Invalid redefinition of constant.", + NotEqDef => "`!=` is defined as `const != = !(==)` and should not be overloaded. Overload `==` instead.", + KwDefaultMismatch => "The default value provided does not match the specified argument type.", + InappropriateUseOfLiteral => "You really shouldn't be using a literal value here.", + ShouldBeInALoop => "`break` or `continue` used outside loop.", + TypeDeclOnGlobalVariable => "Type declarations on global variables are not yet supported.", + UnsupportedConstLocalVariable => "Unsupported `const` declaration on local variable.", + UnassignedKeywordArgument => "Keyword argument not assigned.", + CannotDefineFuncAlreadyHasValue => "Cannot define function ; it already has a value.", + DuplicateFuncArgName => "Function argument name not unique.", + IncludePathContainsNULL => "Cannot include file, path contains NULL characters.", + IndexFromLength => "Indexing with indices obtained from `length`, `size` etc is discouraged. Use `eachindex` or `axes` instead.", + FileTooBig => "File too big, not following include.", + FileNotAvailable => "File not available.", + RelativeImportTooManyDots => "Relative import has more leading dots than available module nesting.", +) + +haserror(m::Meta) = m.error !== nothing +haserror(x::EXPR) = hasmeta(x) && haserror(x.meta) +errorof(x::EXPR) = hasmeta(x) ? x.meta.error : nothing +function seterror!(x::EXPR, e) + if !hasmeta(x) + x.meta = Meta() + end + x.meta.error = e +end + +const default_options = (true, true, true, true, true, true, true, true, true, true) + +struct LintOptions + call::Bool + iter::Bool + nothingcomp::Bool + constif::Bool + lazy::Bool + datadecl::Bool + typeparam::Bool + modname::Bool + pirates::Bool + useoffuncargs::Bool +end +LintOptions() = LintOptions(default_options...) +LintOptions(::Colon) = LintOptions(fill(true, length(default_options))...) + +LintOptions(options::Vararg{Union{Bool,Nothing},length(default_options)}) = + LintOptions(something.(options, default_options)...) + +function check_all(x::EXPR, opts::LintOptions, env::ExternalEnv) + # Do checks + opts.call && check_call(x, env) + opts.iter && check_loop_iter(x, env) + opts.nothingcomp && check_nothing_equality(x, env) + opts.constif && check_if_conds(x) + opts.lazy && check_lazy(x) + opts.datadecl && check_datatype_decl(x, env) + opts.typeparam && check_typeparams(x) + opts.modname && check_modulename(x) + opts.pirates && check_for_pirates(x) + opts.useoffuncargs && check_farg_unused(x) + check_kw_default(x, env) + check_use_of_literal(x) + check_break_continue(x) + check_const(x) + + if x.args !== nothing + for i in 1:length(x.args) + check_all(x.args[i], opts, env) + end + end +end + + +function _typeof(x, state) + if x isa EXPR + if headof(x) in (:abstract, :primitive, :struct) + return CoreTypes.DataType + elseif CSTParser.defines_module(x) + return CoreTypes.Module + elseif CSTParser.defines_function(x) + return CoreTypes.Function + end + elseif x isa SymbolServer.DataTypeStore + return CoreTypes.DataType + elseif x isa SymbolServer.FunctionStore + return CoreTypes.Function + end +end + +# Call +function struct_nargs(x::EXPR) + # struct defs wrapped in macros are likely to have some arbirtary additional constructors, so lets allow anything + parentof(x) isa EXPR && CSTParser.ismacrocall(parentof(x)) && return 0, typemax(Int), Symbol[], true + minargs, maxargs, kws, kwsplat = 0, 0, Symbol[], false + args = x.args[3] + length(args.args) == 0 && return 0, typemax(Int), kws, kwsplat + inner_constructor = findfirst(a -> CSTParser.defines_function(a), args.args) + if inner_constructor !== nothing + return func_nargs(args.args[inner_constructor]) + else + minargs = maxargs = length(args.args) + end + return minargs, maxargs, kws, kwsplat +end + +function func_nargs(x::EXPR) + minargs, maxargs, kws, kwsplat = 0, 0, Symbol[], false + sig = CSTParser.rem_wheres_decls(CSTParser.get_sig(x)) + + if sig.args !== nothing + for i = 2:length(sig.args) + arg = unwrap_nospecialize(sig.args[i]) + if isparameters(arg) + for j = 1:length(arg.args) + arg1 = arg.args[j] + if iskwarg(arg1) + push!(kws, Symbol(CSTParser.str_value(CSTParser.get_arg_name(arg1.args[1])))) + elseif isidentifier(arg1) || isdeclaration(arg1) + push!(kws, Symbol(CSTParser.str_value(CSTParser.get_arg_name(arg1)))) + elseif issplat(arg1) + kwsplat = true + end + end + elseif iskwarg(arg) + if issplat(arg.args[1]) + maxargs = typemax(Int) + else + maxargs !== typemax(Int) && (maxargs += 1) + end + elseif issplat(arg) || + (isdeclaration(arg) && + ((isidentifier(arg.args[2]) && valofid(arg.args[2]) == "Vararg") || + (iscurly(arg.args[2]) && isidentifier(arg.args[2].args[1]) && valofid(arg.args[2].args[1]) == "Vararg"))) + maxargs = typemax(Int) + else + minargs += 1 + maxargs !== typemax(Int) && (maxargs += 1) + end + end + end + + return minargs, maxargs, kws, kwsplat +end + +function func_nargs(m::SymbolServer.MethodStore) + minargs, maxargs, kws, kwsplat = 0, 0, Symbol[], false + + for arg in m.sig + if CoreTypes.isva(last(arg)) + maxargs = typemax(Int) + else + minargs += 1 + maxargs !== typemax(Int) && (maxargs += 1) + end + end + for kw in m.kws + if endswith(String(kw), "...") + kwsplat = true + else + push!(kws, kw) + end + end + return minargs, maxargs, kws, kwsplat +end + +function call_nargs(x::EXPR) + minargs, maxargs, kws = 0, 0, Symbol[] + if length(x.args) > 0 + for i = 2:length(x.args) + arg = x.args[i] + if isparameters(arg) + for j = 1:length(arg.args) + arg1 = arg.args[j] + if iskwarg(arg1) + push!(kws, Symbol(CSTParser.str_value(CSTParser.get_arg_name(arg1.args[1])))) + end + end + elseif iskwarg(arg) + push!(kws, Symbol(CSTParser.str_value(CSTParser.get_arg_name(arg.args[1])))) + elseif issplat(arg) + maxargs = typemax(Int) + else + minargs += 1 + maxargs !== typemax(Int) && (maxargs += 1) + end + end + else + @info string("call_nargs: ", to_codeobject(x)) + end + + return minargs, maxargs, kws +end + +# compare_f_call(m_counts, call_counts) = true # fallback method + +function compare_f_call( + (ref_minargs, ref_maxargs, ref_kws, kwsplat), + (act_minargs, act_maxargs, act_kws), + ) + # check matching on positional arguments + if act_maxargs == typemax(Int) + act_minargs <= act_maxargs < ref_minargs && return false + else + !(ref_minargs <= act_minargs <= act_maxargs <= ref_maxargs) && return false + end + + # check matching on keyword arguments + kwsplat && return true # splatted kw in method so accept any kw in call + + # no splatted kw in method sig + length(act_kws) > length(ref_kws) && return false # call has more kws than method accepts + !all(kw in ref_kws for kw in act_kws) && return false # call supplies a kw that isn't defined in the method + + return true +end + +function is_something_with_methods(x::Binding) + (CoreTypes.isfunction(x.type) && x.val isa EXPR) || + (CoreTypes.isdatatype(x.type) && x.val isa EXPR && CSTParser.defines_struct(x.val)) || + (x.val isa SymbolServer.FunctionStore || x.val isa SymbolServer.DataTypeStore) +end +is_something_with_methods(x::T) where T <: Union{SymbolServer.FunctionStore,SymbolServer.DataTypeStore} = true +is_something_with_methods(x) = false + +function check_call(x, env::ExternalEnv) + if iscall(x) + parentof(x) isa EXPR && headof(parentof(x)) === :do && return # TODO: add number of args specified in do block. + length(x.args) == 0 && return + # find the function we're dealing with + func_ref = refof_call_func(x) + func_ref === nothing && return + + if is_something_with_methods(func_ref) && !(func_ref isa Binding && func_ref.val isa EXPR && func_ref.val.head === :macro) + # intentionally empty + if func_ref isa Binding && func_ref.val isa EXPR && isassignment(func_ref.val) && isidentifier(func_ref.val.args[1]) && isidentifier(func_ref.val.args[2]) + # if func_ref is a shadow binding (for these purposes, an assignment that just changes the name of a mehtod), redirect to the rhs of the assignment. + func_ref = refof(func_ref.val.args[2]) + end + else + return + end + call_counts = call_nargs(x) + tls = retrieve_toplevel_scope(x) + tls === nothing && return @warn "Couldn't get top-level scope." # General check, this means something has gone wrong. + func_ref === nothing && return + !sig_match_any(func_ref, x, call_counts, tls, env) && seterror!(x, IncorrectCallArgs) + end +end + +function sig_match_any(func_ref::Union{SymbolServer.FunctionStore,SymbolServer.DataTypeStore}, x, call_counts, tls::Scope, env::ExternalEnv) + iterate_over_ss_methods(func_ref, tls, env, m -> compare_f_call(func_nargs(m), call_counts)) +end + +function sig_match_any(func_ref::Binding, x, call_counts, tls::Scope, env::ExternalEnv) + if func_ref.val isa SymbolServer.FunctionStore || func_ref.val isa SymbolServer.DataTypeStore + match = sig_match_any(func_ref.val, x, call_counts, tls, env) + match && return true + end + + has_at_least_one_method = func_ref.val isa EXPR && defines_function(func_ref.val) + # handle case where func_ref is typed as Function and yet has no methods + + for r in func_ref.refs + method = get_method(r) + method === nothing && continue + has_at_least_one_method = true + sig_match_any(method, x, call_counts, tls, env) && return true + end + return !has_at_least_one_method +end + +function sig_match_any(func::EXPR, x, call_counts, tls::Scope, env::ExternalEnv) + if CSTParser.defines_function(func) + m_counts = func_nargs(func) + elseif CSTParser.defines_struct(func) + m_counts = struct_nargs(func) + else + return true # We shouldn't get here + end + if compare_f_call(m_counts, call_counts) + return true + else + x1 = CSTParser.rem_where_decl(CSTParser.get_sig(func)) + if (x1.head == :call && x1 == x) || (!(x1.args isa Nothing) && x1.args[1].head == :call && x1.args[1] == x) + return true + end + end + return false +end + +function get_method(name::EXPR) + f = maybe_get_parent_fexpr(name, x -> CSTParser.defines_function(x) || CSTParser.defines_struct(x) || CSTParser.defines_macro(x)) + if f !== nothing && CSTParser.get_name(f) == name + return f + end +end +function get_method(x::Union{SymbolServer.FunctionStore,SymbolServer.DataTypeStore}) + x +end +get_method(x) = nothing + +isdocumented(x::EXPR) = parentof(x) isa EXPR && CSTParser.ismacrocall(parentof(x)) && headof(parentof(x).args[1]) === :globalrefdoc + +function check_loop_iter(x::EXPR, env::ExternalEnv) + if headof(x) === :for + if length(x.args) > 1 + body = x.args[2] + if headof(x.args[1]) === :block && x.args[1].args !== nothing + for arg in x.args[1].args + check_incorrect_iter_spec(arg, body, env) + end + else + check_incorrect_iter_spec(x.args[1], body, env) + end + end + elseif headof(x) === :generator + body = x.args[1] + for i = 2:length(x.args) + check_incorrect_iter_spec(x.args[i], body, env) + end + end +end + +function check_incorrect_iter_spec(x, body, env) + if x.args !== nothing && CSTParser.is_range(x) + rng = rhs_of_iterator(x) + + if headof(rng) === :FLOAT || headof(rng) === :INTEGER || (iscall(rng) && refof(rng.args[1]) === getsymbols(env)[:Base][:length]) + seterror!(x, IncorrectIterSpec) + elseif iscall(rng) && valof(rng.args[1]) == ":" && + length(rng.args) === 3 && + headof(rng.args[2]) === :INTEGER && + iscall(rng.args[3]) && + length(rng.args[3].args) > 1 && ( + refof(rng.args[3].args[1]) === getsymbols(env)[:Base][:length] || + refof(rng.args[3].args[1]) === getsymbols(env)[:Base][:size] + ) + if length(x.args) >= 1 + lhs = x.args[1] + arr = rng.args[3].args[2] + b = refof(arr) + + # 1:length(arr) indexing is ok for Vector and Array specifically + if b isa Binding && (CoreTypes.isarray(b.type) || CoreTypes.isvector(b.type)) + return + end + if !all_underscore(valof(lhs)) + if check_is_used_in_getindex(body, lhs, arr) + seterror!(x, IndexFromLength) + end + end + end + end + end +end + +function check_is_used_in_getindex(expr, lhs, arr) + if headof(expr) === :ref && expr.args !== nothing && length(expr.args) > 1 + this_arr = expr.args[1] + if hasref(this_arr) && hasref(arr) && refof(this_arr) == refof(arr) + for index_arg in expr.args[2:end] + if hasref(index_arg) && hasref(lhs) && refof(index_arg) == refof(lhs) + seterror!(expr, IndexFromLength) + return true + end + end + end + end + if expr.args !== nothing + for arg in expr.args + check_is_used_in_getindex(arg, lhs, arr) && return true + end + end + return false +end + +function check_nothing_equality(x::EXPR, env::ExternalEnv) + if isbinarycall(x) && length(x.args) == 3 + _nothing = getsymbols(env)[:Core][:nothing] + if valof(x.args[1]) == "==" && ( + (valof(x.args[2]) == "nothing" && refof(x.args[2]) == _nothing) || + (valof(x.args[3]) == "nothing" && refof(x.args[3]) == _nothing) + ) + seterror!(x.args[1], NothingEquality) + elseif valof(x.args[1]) == "!=" && ( + (valof(x.args[2]) == "nothing" && refof(x.args[2]) == _nothing) || + (valof(x.args[3]) == "nothing" && refof(x.args[3]) == _nothing) + ) + seterror!(x.args[1], NothingNotEq) + end + end +end + +function _get_top_binding(x::EXPR, name::String) + if scopeof(x) isa Scope + return _get_top_binding(scopeof(x), name) + elseif parentof(x) isa EXPR + return _get_top_binding(parentof(x), name) + else + return nothing + end +end + +function _get_top_binding(s::Scope, name::String) + if scopehasbinding(s, name) + return s.names[name] + elseif parentof(s) isa Scope + return _get_top_binding(parentof(s), name) + else + return nothing + end +end + +function _get_global_scope(s::Scope) + if !CSTParser.defines_module(s.expr) && parentof(s) isa Scope && parentof(s) != s + return _get_global_scope(parentof(s)) + else + return s + end +end + +function check_if_conds(x::EXPR) + if headof(x) === :if + cond = x.args[1] + if headof(cond) === :TRUE || headof(cond) === :FALSE + seterror!(cond, ConstIfCondition) + elseif isassignment(cond) + seterror!(cond, EqInIfConditional) + end + end +end + +function check_lazy(x::EXPR) + if isbinarysyntax(x) + if valof(headof(x)) == "||" + if headof(x.args[1]) === :TRUE || headof(x.args[1]) === :FALSE + seterror!(x, PointlessOR) + end + elseif valof(headof(x)) == "&&" + if headof(x.args[1]) === :TRUE || headof(x.args[1]) === :FALSE || headof(x.args[2]) === :TRUE || headof(x.args[2]) === :FALSE + seterror!(x, PointlessAND) + end + end + end +end + +is_never_datatype(b, env::ExternalEnv) = false +is_never_datatype(b::SymbolServer.DataTypeStore, env::ExternalEnv) = false +function is_never_datatype(b::SymbolServer.FunctionStore, env::ExternalEnv) + !(SymbolServer._lookup(b.extends, getsymbols(env)) isa SymbolServer.DataTypeStore) +end +function is_never_datatype(b::Binding, env::ExternalEnv) + if b.val isa Binding + return is_never_datatype(b.val, env) + elseif b.val isa SymbolServer.FunctionStore + return is_never_datatype(b.val, env) + elseif CoreTypes.isdatatype(b.type) + return false + elseif b.type !== nothing + if !any(x -> x isa SymbolServer.DataTypeStore, get_eventual_datatype(ref, env) for ref in b.refs) + return true + end + end + return false +end + +function check_datatype_decl(x::EXPR, env::ExternalEnv) + # Only call in function signatures? + if isdeclaration(x) && parentof(x) isa EXPR && iscall(parentof(x)) + if (dt = refof_maybe_getfield(last(x.args))) !== nothing + if is_never_datatype(dt, env) + seterror!(x, InvalidTypeDeclaration) + end + elseif CSTParser.isliteral(last(x.args)) + seterror!(x, InvalidTypeDeclaration) + end + end +end + +function check_modulename(x::EXPR) + if CSTParser.defines_module(x) && # x is a module + scopeof(x) isa Scope && parentof(scopeof(x)) isa Scope && # it has a scope and a parent scope + CSTParser.defines_module(parentof(scopeof(x)).expr) && # the parent scope is a module + valof(CSTParser.get_name(x)) == valof(CSTParser.get_name(parentof(scopeof(x)).expr)) # their names match + seterror!(CSTParser.get_name(x), InvalidModuleName) + end +end + +# Check whether function arguments are unused +function check_farg_unused(x::EXPR) + if CSTParser.defines_function(x) + sig = CSTParser.rem_wheres_decls(CSTParser.get_sig(x)) + if (headof(x) === :function && length(x.args) == 2 && x.args[2] isa EXPR && length(x.args[2].args) == 1 && CSTParser.isliteral(x.args[2].args[1])) || + (length(x.args) > 1 && headof(x.args[2]) === :block && length(x.args[2].args) == 1 && CSTParser.isliteral(x.args[2].args[1])) + return # Allow functions that return constants + end + if iscall(sig) + arg_names = Set{String}() + for i = 2:length(sig.args) + arg = sig.args[i] + if arg.head === :parameters + for arg2 in arg.args + !check_farg_unused_(arg2, arg_names) && return + end + else + !check_farg_unused_(arg, arg_names) && return + end + end + end + end +end + +function check_farg_unused_(arg, arg_names) + if !hasbinding(arg) + if iskwarg(arg) + arg = arg.args[1] + end + if is_nospecialize_call(arg) + arg = unwrap_nospecialize(arg) + end + end + if !hasbinding(arg) + return false + end + b = bindingof(arg) + + # We don't care about these + valof(b.name) isa String && all_underscore(valof(b.name)) && return false + + if b === nothing || + # no refs: + isempty(b.refs) || + # only self ref: + (length(b.refs) == 1 && first(b.refs) == b.name) || + # first usage has binding: + (length(b.refs) > 1 && b.refs[2] isa EXPR && hasbinding(b.refs[2])) + seterror!(arg, UnusedFunctionArgument) + end + + if valof(b.name) === nothing + elseif valof(b.name) in arg_names + seterror!(arg, DuplicateFuncArgName) + else + push!(arg_names, valof(b.name)) + end + true +end + +function unwrap_nospecialize(x) + is_nospecialize_call(x) || return x + x.args[3] +end + +function is_nospecialize_call(x) + CSTParser.ismacrocall(x) && + CSTParser.ismacroname(x.args[1]) && + is_nospecialize(x.args[1]) +end + +""" +collect_hints(x::EXPR, env, missingrefs = :all, isquoted = false, errs = Tuple{Int,EXPR}[], pos = 0) + +Collect hints and errors from an expression. `missingrefs` = (:none, :id, :all) determines whether unresolved +identifiers are marked, the :all option will mark identifiers used in getfield calls." +""" +function collect_hints(x::EXPR, env, missingrefs=:all, isquoted=false, errs=Tuple{Int,EXPR}[], pos=0) + if quoted(x) + isquoted = true + elseif isquoted && unquoted(x) + isquoted = false + end + if headof(x) === :errortoken + # collect parse errors + push!(errs, (pos, x)) + elseif !isquoted + if missingrefs != :none && isidentifier(x) && !hasref(x) && + !(valof(x) == "var" && parentof(x) isa EXPR && isnonstdid(parentof(x))) && + !((valof(x) == "stdcall" || valof(x) == "cdecl" || valof(x) == "fastcall" || valof(x) == "thiscall" || valof(x) == "llvmcall") && is_in_fexpr(x, x -> iscall(x) && isidentifier(x.args[1]) && valof(x.args[1]) == "ccall")) + + push!(errs, (pos, x)) + elseif haserror(x) && errorof(x) isa StaticLint.LintCodes + # collect lint hints + push!(errs, (pos, x)) + end + elseif isquoted && missingrefs == :all && should_mark_missing_getfield_ref(x, env) + push!(errs, (pos, x)) + end + + for i in 1:length(x) + collect_hints(x[i], env, missingrefs, isquoted, errs, pos) + pos += x[i].fullspan + end + + errs +end + +function refof_maybe_getfield(x::EXPR) + if isidentifier(x) + return refof(x) + elseif is_getfield_w_quotenode(x) + return refof(x.args[2].args[1]) + end +end + +function should_mark_missing_getfield_ref(x, env) + if isidentifier(x) && !hasref(x) && # x has no ref + parentof(x) isa EXPR && headof(parentof(x)) === :quotenode && parentof(parentof(x)) isa EXPR && is_getfield(parentof(parentof(x))) # x is the rhs of a getproperty + lhsref = refof_maybe_getfield(parentof(parentof(x)).args[1]) + hasref(x) && return false # We've resolved + if lhsref isa SymbolServer.ModuleStore || (lhsref isa Binding && lhsref.val isa SymbolServer.ModuleStore) + # a module, we should know this. + return true + elseif lhsref isa Binding + # by-use type inference runs after we've resolved references so we may not have known lhsref's type first time round, lets try and find `x` again + resolve_getfield(x, lhsref, ResolveOnly(retrieve_scope(x), env, nothing)) # FIXME: Setting `server` to nothing might be sketchy? + hasref(x) && return false # We've resolved + if lhsref.val isa Binding + lhsref = lhsref.val + end + lhsref = get_root_method(lhsref, nothing) + if lhsref isa EXPR + # Not clear what is happening here. + return false + elseif lhsref.type isa SymbolServer.DataTypeStore && !(isempty(lhsref.type.fieldnames) || isunionfaketype(lhsref.type.name) || has_getproperty_method(lhsref.type, env)) + return true + elseif lhsref.type isa Binding && lhsref.type.val isa EXPR && CSTParser.defines_struct(lhsref.type.val) && !has_getproperty_method(lhsref.type) + # We may have infered the lhs type after the semantic pass that was resolving references. Copied from `resolve_getfield(x::EXPR, parent_type::EXPR, state::State)::Bool`. + if scopehasbinding(scopeof(lhsref.type.val), valof(x)) + setref!(x, scopeof(lhsref.type.val).names[valof(x)]) + return false + end + return true + end + end + end + return false +end + +unwrap_fakeunionall(x) = x isa SymbolServer.FakeUnionAll ? unwrap_fakeunionall(x.body) : x +function has_getproperty_method(b::SymbolServer.DataTypeStore, env) + getprop_vr = SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Base), :getproperty) + if haskey(getsymbolextendeds(env), getprop_vr) + for ext in getsymbolextendeds(env)[getprop_vr] + for m in SymbolServer._lookup(ext, getsymbols(env))[:getproperty].methods + t = unwrap_fakeunionall(m.sig[1][2]) + !(t isa SymbolServer.FakeUnion) && t.name == b.name.name && return true + end + end + else + for m in getsymbols(env)[:Base][:getproperty].methods + t = unwrap_fakeunionall(m.sig[1][2]) + !(t isa SymbolServer.FakeUnion) && t.name == b.name.name && return true + end + end + return false +end + +function has_getproperty_method(b::Binding) + if b.val isa Binding || b.val isa SymbolServer.DataTypeStore + return has_getproperty_method(b.val) + elseif b isa Binding && CoreTypes.isdatatype(b.type) + for ref in b.refs + if ref isa EXPR && is_type_of_call_to_getproperty(ref) + return true + end + end + end + return false +end + +function is_type_of_call_to_getproperty(x::EXPR) + function is_call_to_getproperty(x::EXPR) + if iscall(x) + func_name = x.args[1] + return (isidentifier(func_name) && valof(func_name) == "getproperty") || # getproperty() + (is_getfield_w_quotenode(func_name) && isidentifier(func_name.args[2].args[1]) && valof(func_name.args[2].args[1]) == "getproperty") # Base.getproperty() + end + return false + end + + return parentof(x) isa EXPR && parentof(parentof(x)) isa EXPR && + ((isdeclaration(parentof(x)) && x === parentof(x).args[2] && is_call_to_getproperty(parentof(parentof(x)))) || + (iscurly(parentof(x)) && x === parentof(x).args[1] && isdeclaration(parentof(parentof(x))) && parentof(parentof(parentof(x))) isa EXPR && is_call_to_getproperty(parentof(parentof(parentof(x)))))) +end + +isunionfaketype(t::SymbolServer.FakeTypeName) = t.name.name === :Union && t.name.parent isa SymbolServer.VarRef && t.name.parent.name === :Core + +function check_typeparams(x::EXPR) + if iswhere(x) + for i in 2:length(x.args) + a = x.args[i] + if hasbinding(a) && (bindingof(a).refs === nothing || length(bindingof(a).refs) < 2) + seterror!(a, UnusedTypeParameter) + end + end + end +end + +function check_for_pirates(x::EXPR) + if CSTParser.defines_function(x) + sig = CSTParser.rem_where_decl(CSTParser.get_sig(x)) + fname = CSTParser.get_name(sig) + if fname_is_noteq(fname) + seterror!(x, NotEqDef) + elseif iscall(sig) && hasbinding(x) && overwrites_imported_function(refof(fname)) + for i = 2:length(sig.args) + if hasbinding(sig.args[i]) && bindingof(sig.args[i]).type isa Binding + return + elseif refers_to_nonimported_type(sig.args[i]) + return + end + end + seterror!(x, TypePiracy) + end + end +end + +function fname_is_noteq(x) + if x isa EXPR + if isoperator(x) && valof(x) == "!=" + return true + elseif is_getfield_w_quotenode(x) + return fname_is_noteq(x.args[2].args[1]) + end + end + return false +end + +function refers_to_nonimported_type(arg::EXPR) + arg = CSTParser.rem_wheres(arg) + if hasref(arg) && refof(arg) isa Binding + return true + elseif isunarysyntax(arg) && (valof(headof(arg)) == "::" || valof(headof(arg)) == "<:") + return refers_to_nonimported_type(arg.args[1]) + elseif isdeclaration(arg) + return refers_to_nonimported_type(arg.args[2]) + elseif iscurly(arg) + for i = 1:length(arg.args) + if refers_to_nonimported_type(arg.args[i]) + return true + end + end + return false + end + return false +end + +overwrites_imported_function(b) = false +function overwrites_imported_function(b::Binding) + if ((b.val isa SymbolServer.FunctionStore || b.val isa SymbolServer.DataTypeStore) && + (is_in_fexpr(b.name, x -> headof(x) === :import)) || (b.refs isa Vector && length(b.refs) > 0 && (first(b.refs) isa SymbolServer.FunctionStore || first(b.refs) isa SymbolServer.DataTypeStore))) + return true + end + return false +end + +# Now called from add_binding +# Should return true/false indicating whether the binding should actually be added? +function check_const_decl(name::String, b::Binding, scope) + # assumes `scopehasbinding(scope, name)` + b.val isa Binding && return check_const_decl(name, b.val, scope) + if b.val isa EXPR && (CSTParser.defines_datatype(b.val) || is_const(bind)) + seterror!(b.val, CannotDeclareConst) + else + prev = scope.names[name] + if (CoreTypes.isdatatype(prev.type) && !is_mask_binding_of_datatype(prev)) || is_const(prev) + if b.val isa EXPR && prev.val isa EXPR && !in_same_if_branch(b.val, prev.val) + return + end + if b.val isa EXPR + seterror!(b.val, InvalidRedefofConst) + else + # TODO check what's going on here + seterror!(b.name, InvalidRedefofConst) + end + end + end +end + +function is_mask_binding_of_datatype(b::Binding) + b.val isa EXPR && CSTParser.isassignment(b.val) && (rhsref = refof(b.val.args[2])) !== nothing && (rhsref isa SymbolServer.DataTypeStore || (rhsref.val isa EXPR && rhsref.val isa SymbolServer.DataTypeStore) || (rhsref.val isa EXPR && CSTParser.defines_datatype(rhsref.val))) +end + +# check whether a and b are in all the same :if blocks and in the same branches +in_same_if_branch(a::EXPR, b::EXPR) = in_same_if_branch(find_if_parents(a), find_if_parents(b)) +in_same_if_branch(a::Dict, b::EXPR) = in_same_if_branch(a, find_if_parents(b)) +function in_same_if_branch(a::Dict, b::Dict) + return length(a) == length(b) && all(k in keys(b) for k in keys(a)) && all(a[k] == b[k] for k in keys(a)) +end + +# find any parent nodes that are :if blocks and a pseudo-index of which branch +# x is in +function find_if_parents(x::EXPR, current=Int[], list=Dict{EXPR,Vector{Int}}()) + if x.head in (:block, :elseif) && parentof(x) isa EXPR && headof(parentof(x)) in (:if, :elseif) + i = 1 + while i <= length(parentof(x).args) + if parentof(x).args[i] == x + pushfirst!(current, i) + break + end + i += 1 + end + if headof(parentof(x)) == :if + list[parentof(x)] = current + current = [] + end + end + return parentof(x) isa EXPR ? find_if_parents(parentof(x), current, list) : list +end + +is_const(x) = false +is_const(b::Binding) = is_const(b.val) +is_const(x::EXPR) = is_const_expr(parentof(x)) + +is_const_expr(x) = false +is_const_expr(x::EXPR) = headof(x) === :const + + +""" + check_kw_default(x::EXPR, server) + +Check that the default value matches the type for keyword arguments. Following types are +checked: `String, Symbol, Int, Char, Bool, Float32, Float64, UInt8, UInt16, UInt32, +UInt64, UInt128`. +""" +function check_kw_default(x::EXPR, env::ExternalEnv) + if headof(x) == :kw && isdeclaration(x.args[1]) && CSTParser.isliteral(x.args[2]) && hasref(x.args[1].args[2]) + decl_T = get_eventual_datatype(refof(x.args[1].args[2]), env) + rhs = x.args[2] + rhsval = valof(rhs) + if decl_T == getsymbols(env)[:Core][:String] && !CSTParser.isstringliteral(rhs) + seterror!(rhs, KwDefaultMismatch) + elseif decl_T == getsymbols(env)[:Core][:Symbol] && headof(rhs) !== :IDENTIFIER + seterror!(rhs, KwDefaultMismatch) + elseif decl_T == getsymbols(env)[:Core][:Int] && headof(rhs) !== :INTEGER + seterror!(rhs, KwDefaultMismatch) + elseif decl_T == getsymbols(env)[:Core][Sys.WORD_SIZE == 64 ? :Int64 : :Int32] && headof(rhs) !== :INTEGER + seterror!(rhs, KwDefaultMismatch) + elseif decl_T == getsymbols(env)[:Core][:Bool] && !(headof(rhs) === :TRUE || headof(rhs) === :FALSE) + seterror!(rhs, KwDefaultMismatch) + elseif decl_T == getsymbols(env)[:Core][:Char] && headof(rhs) !== :CHAR + seterror!(rhs, KwDefaultMismatch) + elseif decl_T == getsymbols(env)[:Core][:Float64] && headof(rhs) !== :FLOAT + seterror!(rhs, KwDefaultMismatch) + elseif decl_T == getsymbols(env)[:Core][:Float32] && !(headof(rhs) === :FLOAT && occursin("f", rhsval)) + seterror!(rhs, KwDefaultMismatch) + else + for T in (UInt8, UInt16, UInt32, UInt64, UInt128) + if decl_T == getsymbols(env)[:Core][Symbol(T)] + # count the digits without prefix (=0x, 0o, 0b) and make sure it fits + # between upper and lower literal boundaries for `T` where the boundaries + # depend on the type of literal (binary, octal, hex) + n = count(x -> x != '_', rhsval) - 2 + ub = sizeof(T) + lb = ub รท 2 + if headof(rhs) == :BININT + 8lb < n <= 8ub || seterror!(rhs, KwDefaultMismatch) + elseif headof(rhs) == :OCTINT + 3lb < n <= 3ub || seterror!(rhs, KwDefaultMismatch) + elseif headof(rhs) == :HEXINT + 2lb < n <= 2ub || seterror!(rhs, KwDefaultMismatch) + else + seterror!(rhs, KwDefaultMismatch) + end + end + end + # signed integers of non native size can't be declared as literal + for T in (Int8, Int16, Sys.WORD_SIZE == 64 ? Int32 : Int64, Int128) + if decl_T == getsymbols(env)[:Core][Symbol(T)] + seterror!(rhs, KwDefaultMismatch) + end + end + + end + end +end + +function check_use_of_literal(x::EXPR) + if CSTParser.defines_module(x) && length(x.args) > 1 && isbadliteral(x.args[2]) + seterror!(x.args[2], InappropriateUseOfLiteral) + elseif (CSTParser.defines_abstract(x) || CSTParser.defines_primitive(x)) && isbadliteral(x.args[1]) + seterror!(x.args[1], InappropriateUseOfLiteral) + elseif CSTParser.defines_struct(x) && isbadliteral(x.args[2]) + seterror!(x.args[2], InappropriateUseOfLiteral) + elseif (isassignment(x) || iskwarg(x)) && isbadliteral(x.args[1]) + seterror!(x.args[1], InappropriateUseOfLiteral) + elseif isdeclaration(x) && isbadliteral(x.args[2]) + seterror!(x.args[2], InappropriateUseOfLiteral) + elseif isbinarycall(x, "isa") && isbadliteral(x.args[3]) + seterror!(x.args[3], InappropriateUseOfLiteral) + end +end + +isbadliteral(x::EXPR) = CSTParser.isliteral(x) && (CSTParser.isstringliteral(x) || headof(x) === :INTEGER || headof(x) === :FLOAT || headof(x) === :CHAR || headof(x) === :TRUE || headof(x) === :FALSE) + +function check_break_continue(x::EXPR) + if iskeyword(x) && (headof(x) === :CONTINUE || headof(x) === :BREAK) && !is_in_fexpr(x, x -> headof(x) in (:for, :while)) + seterror!(x, ShouldBeInALoop) + end +end + +function check_const(x::EXPR) + if headof(x) === :const + if VERSION < v"1.8.0-DEV.1500" && CSTParser.isassignment(x.args[1]) && CSTParser.isdeclaration(x.args[1].args[1]) + seterror!(x, TypeDeclOnGlobalVariable) + elseif headof(x.args[1]) === :local + seterror!(x, UnsupportedConstLocalVariable) + end + end +end + +function check_unused_binding(b::Binding, scope::Scope) + if headof(scope.expr) !== :struct && headof(scope.expr) !== :tuple && !all_underscore(valof(b.name)) + refs = loose_refs(b) + if (isempty(refs) || length(refs) == 1 && refs[1] == b.name) && + !is_sig_arg(b.name) && !is_overwritten_in_loop(b.name) && + !is_overwritten_subsequently(b, scope) && !is_kw_of_macrocall(b) + seterror!(b.name, UnusedBinding) + end + end +end + +all_underscore(s) = false +all_underscore(s::String) = all(==(0x5f), codeunits(s)) + +function is_sig_arg(x) + is_in_fexpr(x, CSTParser.iscall) +end + +function is_kw_of_macrocall(b::Binding) + b.val isa EXPR && isassignment(b.val) && parentof(b.val) isa EXPR && CSTParser.ismacrocall(parentof(b.val)) +end + +function is_overwritten_in_loop(x) + # Cuts out false positives for check_unused_binding - the linear nature of our + # semantic passes mean a variable declared at the end of a loop's block but used at + # the start won't appear to be referenced. + + # Cheap version: + # is_in_fexpr(x, x -> x.head === :while || x.head === :for) + + # We really want to check whether the enclosing scope(s) of the loop has a binding + # with matching name. + # Is this too expensive? + loop = maybe_get_parent_fexpr(x, x -> x.head === :while || x.head === :for) + if loop !== nothing + s = scopeof(loop) + if s isa Scope && parentof(s) isa Scope + s2 = check_parent_scopes_for(s, valof(x)) + if s2 isa Scope + prev_binding = parentof(s2).names[valof(x)] + if prev_binding isa Binding + return true + # s = ComesBefore(prev_binding.name, s2.expr, 0) + # traverse(parentof(s2).expr, s) + # return s.result == 1 + # for r in prev_binding.refs + # if r isa EXPR && is_in_fexpr(r, x -> x === loop) + # return true + # end + # end + else + return false + end + end + else + return false + end + else + false + end + false +end + +""" + ComesBefore + +Check whether x1 comes before x2 +""" +mutable struct ComesBefore + x1::EXPR + x2::EXPR + result::Int +end + +function (state::ComesBefore)(x::EXPR) + state.result > 0 && return + if x == state.x1 + state.result = 1 + return + elseif x == state.x2 + state.result = 2 + return + end + if !hasscope(x) + traverse(x, state) + state.result > 0 && return + end +end + +""" + check_parent_scopes_for(s::Scope, name) + +Checks whether the parent scope of `s` has the name `name`. +""" +function check_parent_scopes_for(s::Scope, name) + # This returns `s` rather than the parent so that s.expr can be used in the linear + # search (e.g. `bound_before`) + if s.expr.head !== :module && parentof(s) isa Scope && haskey(parentof(s).names, name) + s + elseif s.parent isa Scope + check_parent_scopes_for(parentof(s), name) + end +end + + + +function is_overwritten_subsequently(b::Binding, scope::Scope) + valof(b.name) === nothing && return false + s = BoundAfter(b.name, valof(b.name), 0) + traverse(scope.expr, s) + return s.result == 2 +end + +""" + ComesBefore + +Check whether x1 comes before x2 +""" +mutable struct BoundAfter + x1::EXPR + name::String + result::Int +end + +function (state::BoundAfter)(x::EXPR) + state.result > 1 && return + if x == state.x1 + state.result = 1 + return + end + if scopeof(x) isa Scope && haskey(scopeof(x).names, state.name) + state.result = 2 + return + end + traverse(x, state) +end diff --git a/src/StaticLint/macros.jl b/src/StaticLint/macros.jl new file mode 100644 index 0000000..b340e5f --- /dev/null +++ b/src/StaticLint/macros.jl @@ -0,0 +1,268 @@ +function handle_macro(@nospecialize(x), state) end +function handle_macro(x::EXPR, state) + !CSTParser.ismacrocall(x) && return + if headof(x.args[1]) === :globalrefdoc + if length(x.args) == 4 + if isidentifier(x.args[4]) && !resolve_ref(x.args[4], state) + if state isa Toplevel + push!(state.resolveonly, x) + end + elseif CSTParser.is_func_call(x.args[4]) + sig = (x.args[4]) + if sig isa EXPR + hasscope(sig) && return # We've already done this, don't repeat + setscope!(sig, Scope(sig)) + mark_sig_args!(sig) + end + if state isa Toplevel + push!(state.resolveonly, x) + end + end + end + elseif CSTParser.ismacroname(x.args[1]) + state(x.args[1]) + if _points_to_Base_macro(x.args[1], Symbol("@deprecate"), state) && length(x.args) == 4 + if bindingof(x.args[3]) !== nothing + return + elseif CSTParser.is_func_call(x.args[3]) + # add deprecated method + # add deprecated function binding and args in new scope + mark_binding!(x.args[3], x) + mark_sig_args!(x.args[3]) + s0 = state.scope # store previous scope + state.scope = Scope(s0, x, Dict(), nothing, nothing) + setscope!(x, state.scope) # tag new scope to generating expression + state(x.args[3]) + state(x.args[4]) + state.scope = s0 + elseif isidentifier(x.args[3]) + mark_binding!(x.args[3], x) + end + elseif _points_to_Base_macro(x.args[1], Symbol("@deprecate_binding"), state) && length(x.args) == 4 && isidentifier(x.args[3]) && isidentifier(x.args[4]) + setref!(x.args[3], refof(x.args[4])) + elseif _points_to_Base_macro(x.args[1], Symbol("@eval"), state) && length(x.args) == 3 && state isa Toplevel + # Create scope around eval'ed expression. This ensures anybindings are + # correctly hoisted to the top-level scope. + setscope!(x, Scope(x)) + setparent!(scopeof(x), state.scope) + s0 = state.scope + state.scope = scopeof(x) + interpret_eval(x.args[3], state) + state.scope = s0 + elseif _points_to_Base_macro(x.args[1], Symbol("@irrational"), state) && length(x.args) == 5 + mark_binding!(x.args[3], x) + elseif _points_to_Base_macro(x.args[1], Symbol("@enum"), state) + for i = 3:length(x.args) + if bindingof(x.args[i]) !== nothing + break + end + if i == 4 && headof(x.args[4]) === :block + for j in 1:length(x.args[4].args) + mark_binding!(x.args[4].args[j], x) + end + break + end + mark_binding!(x.args[i], x) + end + elseif _points_to_Base_macro(x.args[1], Symbol("@goto"), state) + if length(x.args) == 3 && isidentifier(x.args[3]) + setref!(x.args[3], Binding(noname, nothing, nothing, EXPR[])) + end + elseif _points_to_Base_macro(x.args[1], Symbol("@label"), state) + if length(x.args) == 3 && isidentifier(x.args[3]) + mark_binding!(x.args[3]) + end + elseif _points_to_Base_macro(x.args[1], Symbol("@NamedTuple"), state) && length(x.args) > 2 && headof(x.args[3]) == :braces + for a in x.args[3].args + if CSTParser.isdeclaration(a) && isidentifier(a.args[1]) && !hasref(a.args[1]) + setref!(a.args[1], Binding(noname, nothing, nothing, EXPR[])) + end + end + elseif is_nospecialize(x.args[1]) + for i = 2:length(x.args) + if bindingof(x.args[i]) !== nothing + break + end + mark_binding!(x.args[i], x) + end + # elseif _points_to_arbitrary_macro(x.args[1], :Turing, :model, state) && length(x) == 3 && + # isassignment(x.args[3]) && + # headof(x.args[3].args[2]) === CSTParser.Begin && length(x.args[3].args[2]) == 3 && headof(x.args[3].args[2].args[2]) === :block + # for i = 1:length(x.args[3].args[2].args[2]) + # ex = x.args[3].args[2].args[2].args[i] + # if isbinarycall(ex, "~") + # mark_binding!(ex) + # end + # end + # elseif _points_to_arbitrary_macro(x.args[1], :JuMP, :variable, state) + # if length(x.args) < 3 + # return + # elseif length(x) >= 5 && ispunctuation(x[2]) + # _mark_JuMP_binding(x[5]) + # else + # _mark_JuMP_binding(x[3]) + # end + # elseif (_points_to_arbitrary_macro(x[1], :JuMP, :expression, state) || + # _points_to_arbitrary_macro(x[1], :JuMP, :NLexpression, state) || + # _points_to_arbitrary_macro(x[1], :JuMP, :constraint, state) || _points_to_arbitrary_macro(x[1], :JuMP, :NLconstraint, state)) && length(x) > 1 + # if ispunctuation(x[2]) + # if length(x) == 8 + # _mark_JuMP_binding(x[5]) + # end + # else + # if length(x) == 4 + # _mark_JuMP_binding(x[3]) + # end + # end + end + end +end + +function _rem_ref(x::EXPR) + if headof(x) === :ref && length(x.args) > 0 + return x.args[1] + end + return x +end + +is_nospecialize(x) = isidentifier(x) && valofid(x) == "@nospecialize" + +function _mark_JuMP_binding(arg) + if isidentifier(arg) || headof(arg) === :ref + mark_binding!(_rem_ref(arg)) + elseif isbinarycall(arg, "==") || isbinarycall(arg, "<=") || isbinarycall(arg, ">=") + if isidentifier(arg.args[1]) || headof(arg.args[1]) === :ref + mark_binding!(_rem_ref(arg.args[1])) + else + mark_binding!(_rem_ref(arg.args[3])) + end + elseif headof(arg) === :comparision && length(arg.args) == 5 + mark_binding!(_rem_ref(arg.args[3])) + end +end + +function _points_to_Base_macro(x::EXPR, name, state) + CSTParser.is_getfield_w_quotenode(x) && return _points_to_Base_macro(x.args[2].args[1], name, state) + haskey(getsymbols(state)[:Base], name) || return false + targetmacro = maybe_lookup(getsymbols(state)[:Base][name], state) + isidentifier(x) && Symbol(valofid(x)) == name && (ref = refof(x)) !== nothing && + (ref == targetmacro || (ref isa Binding && ref.val == targetmacro)) +end + +function _points_to_arbitrary_macro(x::EXPR, module_name, name, state) + length(x.args) == 2 && isidentifier(x.args[2]) && valof(x.args[2]) == name && haskey(getsymbols(state), Symbol(module_name)) && haskey(getsymbols(state)[Symbol(module_name)], Symbol("@", name)) && (refof(x.args[2]) == maybe_lookup(getsymbols(state)[Symbol(module_name)][Symbol("@", name)], state) || + (refof(x.args[2]) isa Binding && refof(x.args[2]).val == maybe_lookup(getsymbols(state)[Symbol(module_name)][Symbol("@", name)], state))) +end + +maybe_lookup(x, env::ExternalEnv) = x isa SymbolServer.VarRef ? SymbolServer._lookup(x, getsymbols(env), true) : x +maybe_lookup(x, state::State) = maybe_lookup(x, state.env) + +function maybe_eventually_get_id(x::EXPR) + if isidentifier(x) + return x + elseif isbracketed(x) + return maybe_eventually_get_id(x.args[1]) + end + return nothing +end + +is_eventually_interpolated(x::EXPR) = isbracketed(x) ? is_eventually_interpolated(x.args[1]) : isunarysyntax(x) && valof(headof(x)) == "\$" +isquoted(x::EXPR) = headof(x) === :quotenode && hastrivia(x) && isoperator(x.trivia[1]) && valof(x.trivia[1]) == ":" +maybeget_quotedsymbol(x::EXPR) = isquoted(x) ? maybe_eventually_get_id(x.args[1]) : nothing + +function is_loop_iterator(x::EXPR) + CSTParser.is_range(x) && + ((parentof(x) isa EXPR && headof(parentof(x)) === :for) || + (parentof(x) isa EXPR && parentof(parentof(x)) isa EXPR && headof(parentof(parentof(x))) === :for)) +end + +""" + maybe_quoted_list(x::EXPR) + +Try and get a list of quoted symbols from x. Return nothing if not possible. +""" +function maybe_quoted_list(x::EXPR) + names = EXPR[] + if headof(x) === :vect || headof(x) === :tuple + for i = 1:length(x.args) + name = maybeget_quotedsymbol(x.args[i]) + if name !== nothing + push!(names, name) + else + return nothing + end + end + return names + end +end + +""" +interpret_eval(x::EXPR, state) + +Naive attempt to interpret `x` as though it has been eval'ed. Lifts +any bindings made within the scope of `x` to the toplevel and replaces +(some) interpolated binding names with the value where possible. +""" +function interpret_eval(x::EXPR, state) + # make sure we have bindings etc + state(x) + tls = retrieve_toplevel_scope(x) + for ex in collect_expr_with_bindings(x) + b = bindingof(ex) + if isidentifier(b.name) + # The name of the binding is fixed + add_binding(ex, state, tls) + elseif isunarysyntax(b.name) && valof(headof(b.name)) == "\$" + # The name of the binding is variable, we need to work out what the + # interpolated symbol points to. + variable_name = b.name.args[1] + resolve_ref(variable_name, state.scope, state) + if (ref = refof(variable_name)) isa Binding + if isassignment(ref.val) && (rhs = maybeget_quotedsymbol(ref.val.args[2])) !== nothing + # `name = :something` + toplevel_binding = Binding(rhs, b.val, nothing, []) + settype!(toplevel_binding, b.type) + infer_type(toplevel_binding, tls, state) + if scopehasbinding(tls, valofid(toplevel_binding.name)) + tls.names[valofid(toplevel_binding.name)] = toplevel_binding # TODO: do we need to check whether this adds a method? + else + tls.names[valofid(toplevel_binding.name)] = toplevel_binding + end + elseif is_loop_iterator(ref.val) && (names = maybe_quoted_list(rhs_of_iterator(ref.val))) !== nothing + # name is of a collection of quoted symbols + for name in names + toplevel_binding = Binding(name, b.val, nothing, []) + settype!(toplevel_binding, b.type) + infer_type(toplevel_binding, tls, state) + if scopehasbinding(tls, valofid(toplevel_binding.name)) + tls.names[valofid(toplevel_binding.name)] = toplevel_binding # TODO: do we need to check whether this adds a method? + else + tls.names[valofid(toplevel_binding.name)] = toplevel_binding + end + end + end + end + end + end +end + + +function rhs_of_iterator(x::EXPR) + if isassignment(x) + x.args[2] + else + x.args[3] + end +end + +function collect_expr_with_bindings(x, bound_exprs=EXPR[]) + if hasbinding(x) + push!(bound_exprs, x) + # Assuming here that if an expression has a binding we don't want anything bound to chlid nodes. + elseif x.args !== nothing && !((CSTParser.defines_function(x) && !is_eventually_interpolated(x.args[1])) || CSTParser.defines_macro(x) || headof(x) === :export) + for a in x.args + collect_expr_with_bindings(a, bound_exprs) + end + end + return bound_exprs +end diff --git a/src/StaticLint/methodmatching.jl b/src/StaticLint/methodmatching.jl new file mode 100644 index 0000000..341c73b --- /dev/null +++ b/src/StaticLint/methodmatching.jl @@ -0,0 +1,236 @@ +function arg_type(arg, ismethod) + if ismethod + if hasbinding(arg) + if bindingof(arg) isa Binding && bindingof(arg).type !== nothing + type = bindingof(arg).type + if type isa Binding && type.val isa SymbolServer.DataTypeStore + type = type.val + end + return type + end + end + else + if hasref(arg) + if refof(arg) isa Binding && refof(arg).type !== nothing + type = refof(arg).type + if type isa Binding && type.val isa SymbolServer.DataTypeStore + type = type.val + end + return type + end + elseif headof(arg) === :STRING + return CoreTypes.String + elseif headof(arg) === :CHAR + return CoreTypes.Char + elseif headof(arg) === :FLOAT + return CoreTypes.Float64 + elseif headof(arg) === :INT + return CoreTypes.Int + elseif headof(arg) === :HEXINT + if length(arg.val) < 5 + return CoreTypes.UInt8 + elseif length(arg.val) < 7 + return CoreTypes.UInt16 + elseif length(arg.val) < 11 + return CoreTypes.UInt32 + else + return CoreTypes.UInt64 + end + elseif headof(arg) === :TRUE || headof(arg) === :FALSE + return CoreTypes.Bool + elseif isquotedsymbol(arg) + return SymbolServer.stdlibs[:Core][:Symbol] + end + end + # VarRef(VarRef(nothing, :Core), :Any) + CoreTypes.Any +end + +isquotedsymbol(x) = x isa EXPR && x.head === :quotenode && length(x.args) == 1 && x.args[1].head === :IDENTIFIER && hastrivia(x) + +function call_arg_types(call::EXPR, ismethod) + types, kws = [], [] + call.args === nothing && return types, kws + if length(call.args) > 1 && headof(call.args[2]) === :parameters + for i = 1:length(call.args[2].args) + push!(kws, call.args[2].args[i].args[1]) + end + for i = 3:length(call.args) + push!(types, arg_type(call.args[i], ismethod)) + end + else + for i = 2:length(call.args) + push!(types, arg_type(call.args[i], ismethod)) + end + end + types, kws +end + +function method_arg_types(call::EXPR) + types, opts, kws = [], [], [] + call.args === nothing && return types, opts, kws + if length(call.args) > 1 && headof(call.args[2]) === :parameters + for i = 1:length(call.args[2].args) + push!(kws, call.args[2].args[i].args[1]) + end + for i = 3:length(call.args) + if CSTParser.iskwarg(call.args[i]) + push!(opts, arg_type(call.args[i].args[1], true)) + else + push!(types, arg_type(call.args[i], true)) + end + end + else + for i = 2:length(call.args) + if CSTParser.iskwarg(call.args[i]) + push!(opts, arg_type(call.args[i].args[1], true)) + else + push!(types, arg_type(call.args[i], true)) + end + end + end + types, opts, kws +end + +function find_methods(x::EXPR, store) + possibles = [] + if iscall(x) + length(x.args) === 0 && return possibles + func_ref = refof_call_func(x) + func_ref === nothing && return possibles + args, kws = call_arg_types(x, false) + if func_ref isa Binding && func_ref.val isa SymbolServer.FunctionStore || + func_ref isa Binding && func_ref.val isa SymbolServer.DataTypeStore + func_ref = func_ref.val + end + if func_ref isa SymbolServer.FunctionStore || func_ref isa SymbolServer.DataTypeStore + for method in func_ref.methods + if match_method(args, kws, method, store) + push!(possibles, method) + end + end + elseif func_ref isa Binding + if (CoreTypes.isfunction(func_ref.type) || CoreTypes.isdatatype(func_ref.type)) && func_ref.val isa EXPR + for method in func_ref.refs + method = get_method(method) + if method !== nothing + if method isa SymbolServer.FunctionStore + for method1 in method.methods + if match_method(args, kws, method1, store) + push!(possibles, method1) + end + end + elseif match_method(args, kws, method, store) + push!(possibles, method) + end + end + end + elseif (method = method_of_callable_datatype(func_ref)) !== nothing + if match_method(args, kws, method, store) + push!(possibles, method) + end + end + end + end + possibles +end + +function match_method(args::Vector{Any}, kws::Vector{Any}, method::SymbolServer.MethodStore, store) + !isempty(kws) && isempty(method.kws) && return false + nmargs = length(method.sig) + varargval = nothing + if nmargs > 0 && last(method.sig)[2] isa SymbolServer.FakeTypeofVararg + if length(args) == nmargs - 1 + nmargs -= 1 + # vararg can be zero length + elseif length(args) >= nmargs + # set aside the type param of the Vararg for later use + varargval = last(method.sig)[2].T + end + end + if length(args) == nmargs + for i in 1:length(args) + if varargval !== nothing && i >= nmargs + !_issubtype(args[i], varargval, store) && !_issubtype(varargval, args[i], store) && return false + else + !_issubtype(args[i], method.sig[i][2], store) && !_issubtype(method.sig[i][2], args[i], store) && return false + end + + end + return true + end + return false +end + +function match_method(args::Vector{Any}, kws::Vector{Any}, method::EXPR, store) + margs, mkws = [], [] + vararg = false + if CSTParser.defines_struct(method) + for i in 1:length(method.args[3].args) + arg = method.args[3].args[i] + if defines_function(arg) + # Hit an inner constructor so forget about the default one. + for arg in method.args[3].args + if defines_function(arg) + !match_method(args, kws, arg, store) && return false + end + end + return true + end + push!(margs, arg_type(arg, true)) + end + else + sig = CSTParser.rem_decl(CSTParser.get_sig(method)) + margs, mopts, mkws = method_arg_types(sig) + # vararg + if length(sig.args) > 0 + if CSTParser.issplat(last(sig.args)) + vararg = true + end + end + end + !isempty(kws) && isempty(mkws) && return false + + if length(margs) < length(args) + for i in 1:min(length(mopts), length(args) - length(margs)) + push!(margs, mopts[i]) + end + if vararg + for _ in 1:length(args) - length(margs) + push!(margs, CoreTypes.Any) + end + end + end + + if length(args) == length(margs) || (vararg && length(args) == length(margs) - 1) + for i in 1:length(args) + !_issubtype(args[i], margs[i], store) && !_issubtype(margs[i], args[i], store) && return false + end + return true + end + return false +end + +function refof_call_func(x) + if isidentifier(first(x.args)) && hasref(first(x.args)) + return refof(first(x.args)) + elseif is_getfield_w_quotenode(x.args[1]) && (rhs = rhs_of_getfield(x.args[1])) !== nothing && hasref(rhs) + return refof(rhs) + else + return + end +end + +function is_sig_of_method(sig::EXPR, method = maybe_get_parent_fexpr(sig, defines_function)) + method !== nothing && sig == CSTParser.get_sig(method) +end + +function method_of_callable_datatype(b::Binding) + if b.type isa Binding && b.type.type === CoreTypes.DataType + for ref in b.type.refs + if ref isa EXPR && ref.parent isa EXPR && isdeclaration(ref.parent) && is_in_fexpr(ref.parent, x -> x.parent isa EXPR && x.parent.head === :call && x == x.parent.args[1] && is_in_funcdef(x.parent)) + return get_parent_fexpr(ref, defines_function) + end + end + end +end diff --git a/src/StaticLint/references.jl b/src/StaticLint/references.jl new file mode 100644 index 0000000..390303a --- /dev/null +++ b/src/StaticLint/references.jl @@ -0,0 +1,316 @@ +function setref!(x::EXPR, binding::Binding) + if !hasmeta(x) + x.meta = Meta() + end + x.meta.ref = binding + push!(binding.refs, x) +end + +function setref!(x::EXPR, binding) + if !hasmeta(x) + x.meta = Meta() + end + x.meta.ref = binding +end + + +# Main function to be called. Given the `state` tries to determine what `x` +# refers to. If it remains unresolved and is in a delayed evaluation scope +# (i.e. a function) it gets pushed to list (.urefs) to be resolved after we've +# run over the entire top-level scope. +function resolve_ref(x, state) + if !(parentof(x) isa EXPR && headof(parentof(x)) === :quotenode) + resolve_ref(x, state.scope, state) + end +end + + +# The first method that is tried. Searches the current scope for local bindings +# that match `x`. Steps: +# 1. Check whether we've already checked this scope (inifinite loops are +# possible when traversing nested modules.) +# 2. Check what sort of EXPR we're dealing with, separate name from EXPR that +# binds. +# 3. Look in the scope's variable list for a binding matching the name. +# 4. If 3. is unsuccessful, check whether the scope imports any modules then check them. +# 5. If no match is found within this scope check the parent scope. +# The return value is a boolean that is false if x should point to something but +# can't be resolved. + +function resolve_ref(x::EXPR, scope::Scope, state::State)::Bool + # if the current scope is a soft scope we should check the parent scope first + # before trying to resolve the ref locally + # if is_soft_scope(scope) && parentof(scope) isa Scope + # resolve_ref(x, parentof(scope), state) && return true + # end + + hasref(x) && return true + resolved = false + + if is_getfield(x) + return resolve_getfield(x, scope, state) + elseif iskwarg(x) + # Note to self: this seems wronge - Binding should be attached to entire Kw EXPR. + if isidentifier(x.args[1]) && !hasbinding(x.args[1]) + setref!(x.args[1], Binding(x.args[1], nothing, nothing, [])) + elseif isdeclaration(x.args[1]) && isidentifier(x.args[1].args[1]) && !hasbinding(x.args[1].args[1]) + if hasbinding(x.args[1]) + setref!(x.args[1].args[1], bindingof(x.args[1])) + else + setref!(x.args[1].args[1], Binding(x.args[1], nothing, nothing, [])) + end + end + return true + elseif is_special_macro_term(x) || new_within_struct(x) + setref!(x, Binding(noname, nothing, nothing, [])) + return true + end + mn = nameof_expr_to_resolve(x) + mn === nothing && return true + + if scopehasbinding(scope, mn) + if x.parent.head === :public + scope.names[mn].is_public = true + end + setref!(x, scope.names[mn]) + resolved = true + elseif scope.modules isa Dict && length(scope.modules) > 0 + for m in values(scope.modules) + resolved = resolve_ref_from_module(x, m, state) + resolved && return true + end + end + if !resolved && !CSTParser.defines_module(scope.expr) && parentof(scope) isa Scope + return resolve_ref(x, parentof(scope), state) + end + return resolved +end + +# Searches a module store for a binding/variable that matches the reference `x1`. +function resolve_ref_from_module(x1::EXPR, m::SymbolServer.ModuleStore, state::State)::Bool + hasref(x1) && return true + + if CSTParser.ismacroname(x1) + x = x1 + if valof(x) == "@." && m.name == VarRef(nothing, :Base) + # @. gets converted to @__dot__, probably during lowering. + setref!(x, m[:Broadcast][Symbol("@__dot__")]) + return true + end + + mn = Symbol(valof(x)) + if isexportedby(mn, m) + setref!(x, maybe_lookup(m[mn], state)) + return true + end + elseif isidentifier(x1) + x = x1 + if Symbol(valof(x)) == m.name.name + setref!(x, m) + return true + elseif isexportedby(x, m) + setref!(x, maybe_lookup(m[Symbol(valof(x))], state)) + return true + end + end + return false +end + +function resolve_ref_from_module(x::EXPR, scope::Scope, state::State)::Bool + hasref(x) && return true + + mn = nameof_expr_to_resolve(x) + mn === nothing && return true + + # 1) If the scope is a module, allow resolving the module name itself + if CSTParser.defines_module(scope.expr) + n = CSTParser.get_name(scope.expr) + if CSTParser.isidentifier(n) && mn == CSTParser.valof(n) + b = bindingof(scope.expr) # moduleโ€™s binding + if b isa Binding + setref!(x, b) + return true + end + end + end + + # 2) Resolve exported names from this module scope + if scope_exports(scope, mn, state) + setref!(x, scope.names[mn]) + return true + end + + return false +end + +""" + scope_exports(scope::Scope, name::String) + +Does the scope export a variable called `name`? +""" +function scope_exports(scope::Scope, name::String, state) + if scopehasbinding(scope, name) && (b = scope.names[name]) isa Binding + initial_pass_on_exports(scope.expr, name, state) + for ref in b.refs + if ref isa EXPR && parentof(ref) isa EXPR && headof(parentof(ref)) === :export + return true + end + end + end + return false +end + +""" + initial_pass_on_exports(x::EXPR, server) + +Export statements need to be (pseudo) evaluated each time we consider +whether a variable is made available by an import statement. +""" + +function initial_pass_on_exports(x::EXPR, name, state) + for a in x.args[3] # module block expressions + if headof(a) === :export + for i = 1:length(a.args) + if isidentifier(a.args[i]) && valof(a.args[i]) == name && !hasref(a.args[i]) + Delayed(scopeof(x), state.env, state.server)(a.args[i]) + end + end + end + end +end + +# Fallback method +function resolve_ref(x::EXPR, m, state::State)::Bool + return hasref(x)::Bool +end + +rhs_of_getfield(x::EXPR) = CSTParser.is_getfield_w_quotenode(x) ? x.args[2].args[1] : x +lhs_of_getfield(x::EXPR) = rhs_of_getfield(x.args[1]) + +""" + resolve_getfield(x::EXPR, parent::Union{EXPR,Scope,ModuleStore,Binding}, state::State)::Bool + +Given an expression of the form `parent.x` try to resolve `x`. The method +called with `parent::EXPR` resolves the reference for `parent`, other methods +then check whether the Binding/Scope/ModuleStore to which `parent` points has +a field matching `x`. +""" +function resolve_getfield(x::EXPR, scope::Scope, state::State)::Bool + hasref(x) && return true + resolved = resolve_ref(x.args[1], scope, state) + if isidentifier(x.args[1]) + lhs = x.args[1] + elseif CSTParser.is_getfield_w_quotenode(x.args[1]) + lhs = lhs_of_getfield(x) + else + return resolved + end + if resolved && (rhs = rhs_of_getfield(x)) !== nothing + resolved = resolve_getfield(rhs, refof(lhs), state) + end + return resolved +end + + +function resolve_getfield(x::EXPR, parent_type::EXPR, state::State)::Bool + hasref(x) && return true + resolved = false + if isidentifier(x) + if CSTParser.defines_module(parent_type) && scopeof(parent_type) isa Scope + resolved = resolve_ref(x, scopeof(parent_type), state) + elseif CSTParser.defines_struct(parent_type) + if scopehasbinding(scopeof(parent_type), valofid(x)) + setref!(x, scopeof(parent_type).names[valofid(x)]) + resolved = true + end + end + end + return resolved +end + + +function resolve_getfield(x::EXPR, b::Binding, state::State)::Bool + hasref(x) && return true + resolved = false + if b.val isa Binding + resolved = resolve_getfield(x, b.val, state) + elseif b.val isa SymbolServer.ModuleStore || (b.val isa EXPR && CSTParser.defines_module(b.val)) + resolved = resolve_getfield(x, b.val, state) + elseif b.type isa Binding + resolved = resolve_getfield(x, b.type.val, state) + elseif b.type isa SymbolServer.DataTypeStore + resolved = resolve_getfield(x, b.type, state) + end + return resolved +end + +function resolve_getfield(x::EXPR, parent_type, state::State)::Bool + hasref(x) +end + +function is_overloaded(val::SymbolServer.SymStore, scope::Scope) + vr = val.name isa SymbolServer.FakeTypeName ? val.name.name : val.name + haskey(scope.overloaded, vr) +end + +function resolve_getfield(x::EXPR, m::SymbolServer.ModuleStore, state::State)::Bool + hasref(x) && return true + resolved = false + if CSTParser.ismacroname(x) && (val = maybe_lookup(SymbolServer.maybe_getfield(Symbol(valofid(x)), m, getsymbols(state)), state)) !== nothing + setref!(x, val) + resolved = true + elseif isidentifier(x) && (val = maybe_lookup(SymbolServer.maybe_getfield(Symbol(valofid(x)), m, getsymbols(state)), state)) !== nothing + # Check whether variable is overloaded in top-level scope + tls = retrieve_toplevel_scope(state.scope) + # if tls.overloaded !== nothing && (vr = val.name isa SymbolServer.FakeTypeName ? val.name.name : val.name; haskey(tls.overloaded, vr)) + # @info 1 + # setref!(x, tls.overloaded[vr]) + # return true + # end + vr = val.name isa SymbolServer.FakeTypeName ? val.name.name : val.name + if haskey(tls.names, valof(x)) && tls.names[valof(x)] isa Binding && tls.names[valof(x)].val isa SymbolServer.FunctionStore + setref!(x, tls.names[valof(x)]) + return true + elseif tls.overloaded !== nothing && haskey(tls.overloaded, vr) + setref!(x, tls.overloaded[vr]) + return true + end + setref!(x, val) + resolved = true + end + return resolved +end + +function resolve_getfield(x::EXPR, parent::SymbolServer.DataTypeStore, state::State)::Bool + hasref(x) && return true + resolved = false + if isidentifier(x) && Symbol(valof(x)) in parent.fieldnames + fi = findfirst(f -> Symbol(valof(x)) == f, parent.fieldnames) + ft = parent.types[fi] + val = SymbolServer._lookup(ft, getsymbols(state), true) + # TODO: Need to handle the case where we get back a FakeUnion, etc. + setref!(x, Binding(noname, nothing, val, [])) + resolved = true + end + return resolved +end + +resolvable_macroname(x::EXPR) = isidentifier(x) && CSTParser.ismacroname(x) && refof(x) === nothing + +nameof_expr_to_resolve(x) = isidentifier(x) ? valofid(x) : nothing + +""" + valofid(x) + +Returns the string value of an expression for which `isidentifier` is true, +i.e. handles NONSTDIDENTIFIERs. +""" +valofid(x::EXPR) = headof(x) === :IDENTIFIER ? valof(x) : valof(x.args[2]) + +""" +new_within_struct(x::EXPR) + +Checks whether x is a reference to `new` within a datatype constructor. +""" +new_within_struct(x::EXPR) = isidentifier(x) && valofid(x) == "new" && is_in_fexpr(x, CSTParser.defines_struct) +is_special_macro_term(x::EXPR) = isidentifier(x) && (valofid(x) == "__source__" || valofid(x) == "__module__") && is_in_fexpr(x, CSTParser.defines_macro) diff --git a/src/StaticLint/scope.jl b/src/StaticLint/scope.jl new file mode 100644 index 0000000..b096b30 --- /dev/null +++ b/src/StaticLint/scope.jl @@ -0,0 +1,157 @@ +mutable struct Scope + parent::Union{Scope,Nothing} + expr::EXPR + names::Dict{String,Binding} + modules::Union{Nothing,Dict{Symbol,Any}} + overloaded::Union{Dict,Nothing} +end +Scope(expr) = Scope(nothing, expr, Dict{Symbol,Binding}(), nothing, nothing) +function Base.show(io::IO, s::Scope) + printstyled(io, headof(s.expr)) + printstyled(io, " ", join(keys(s.names), ","), color=:yellow) + s.modules isa Dict && printstyled(io, " ", join(keys(s.modules), ","), color=:blue) +end + +function overload_method(scope::Scope, b::Binding, vr::SymbolServer.VarRef) + if scope.overloaded === nothing + scope.overloaded = Dict() + end + if haskey(scope.overloaded, vr) + # TODO: need to check this hasn't already been done + push!(scope.overloaded[vr].refs, b.val) + else + scope.overloaded[vr] = b + end +end + +""" +scopehasmodule(s::Scope, mname::Symbol)::Bool + +Checks whether the module `mname` has been `using`ed in `s`. +""" +scopehasmodule(s::Scope, mname::Symbol) = s.modules !== nothing && haskey(s.modules, mname) + +""" + addmoduletoscope!(s, m, [mname::Symbol]) + +Adds module `m` to the list of used modules in scope `s`. +""" +function addmoduletoscope!(s::Scope, m, mname::Symbol) + if s.modules === nothing + s.modules = Dict{Symbol,Any}() + end + s.modules[mname] = m +end +addmoduletoscope!(s::Scope, m::SymbolServer.ModuleStore) = addmoduletoscope!(s, m, m.name.name) +addmoduletoscope!(s::Scope, m::EXPR) = CSTParser.defines_module(m) && addmoduletoscope!(s, scopeof(m), Symbol(valof(CSTParser.get_name(m)))) +addmoduletoscope!(s::Scope, s1::Scope) = CSTParser.defines_module(s1.expr) && addmoduletoscope!(s, s1, Symbol(valof(CSTParser.get_name(s1.expr)))) + + +getscopemodule(s::Scope, m::Symbol) = s.modules[m] + +""" + scopehasbinding(s::Scope, n::String) + +Checks whether s has a binding for variable named `n`. +""" +scopehasbinding(s::Scope, n::String) = haskey(s.names, n) + +is_soft_scope(scope::Scope) = scope.expr.head == :for || scope.expr.head == :while || scope.expr.head == :try + +""" + introduces_scope(x::EXPR, state) + +Does this expression introduce a new scope? +""" +function introduces_scope(x::EXPR, state) + # TODO: remove unused 2nd argument. + if CSTParser.isassignment(x) && (CSTParser.is_func_call(x.args[1]) || CSTParser.iscurly(x.args[1])) + return true + elseif CSTParser.defines_anon_function(x) + return true + elseif CSTParser.iswhere(x) + # unless in func def signature + return !_in_func_or_struct_def(x) + elseif CSTParser.istuple(x) && CSTParser.hastrivia(x) && ispunctuation(x.trivia[1]) && length(x.args) > 0 && isassignment(x.args[1]) + # named tuple + return true + elseif headof(x) === :function || + headof(x) === :macro || + headof(x) === :for || + headof(x) === :while || + headof(x) === :let || + headof(x) === :generator || # and Flatten? + headof(x) === :try || + headof(x) === :do || + headof(x) === :module || + headof(x) === :abstract || + headof(x) === :primitive || + headof(x) === :struct + return true + end + return false +end + + +hasscope(x::EXPR) = hasmeta(x) && hasscope(x.meta) +scopeof(x) = nothing +scopeof(x::EXPR) = scopeof(x.meta) +CSTParser.parentof(s::Scope) = s.parent + +function setscope!(x::EXPR, s) + if !hasmeta(x) + x.meta = Meta() + end + x.meta.scope = s +end + +""" + scopes(x::EXPR, state) + +Called when traversing the syntax tree and handles the association of +scopes with expressions. On the first pass this will add scopes as +necessary, on following passes it empties it. +""" +function scopes(x::EXPR, state) + clear_scope(x) + if scopeof(x) === nothing && introduces_scope(x, state) + setscope!(x, Scope(x)) + end + s0 = state.scope + if headof(x) === :file + setscope!(x, state.scope) + add_eval_method(x, state) + elseif scopeof(x) isa Scope + scopeof(x) != s0 && setparent!(scopeof(x), s0) + state.scope = scopeof(x) + if headof(x) === :module && headof(x.args[1]) === :TRUE # Add default modules to a new module + state.scope.modules = Dict{Symbol,Any}() # TODO: only create new Dict if not assigned? + state.scope.modules[:Base] = getsymbols(state)[:Base] + state.scope.modules[:Core] = getsymbols(state)[:Core] + add_eval_method(x, state) + elseif headof(x) === :module && headof(x.args[1]) === :FALSE + state.scope.modules = Dict{String,Any}() + state.scope.modules[:Core] = getsymbols(state)[:Core] + add_eval_method(x, state) + end + if headof(x) === :module && bindingof(x) !== nothing # Add reference to out of scope binding (i.e. itself) + # state.scope.names[bindingof(x).name] = bindingof(x) + # TODO: move this to the binding stage + add_binding(x, state) + # elseif headof(x) === :flatten && headof(x[1]) === CSTParser.Generator && length(x[1]) > 0 && headof(x[1][1]) === CSTParser.Generator + # setscope!(x[1][1], nothing) + end + end + return s0 +end + +# Add an `eval` method +function add_eval_method(x, state) + mod = if x.head === :module + CSTParser.isidentifier(x.args[3]) ? Symbol(valof(x.args[3])) : :unknown + else + Symbol("top-level") + end + meth = SymbolServer.MethodStore(:eval, mod, "", 0, [:expr => SymbolServer.FakeTypeName(SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Core), :Any), [])], [], Any) + state.scope.names["eval"] = Binding(x, SymbolServer.FunctionStore(SymbolServer.VarRef(nothing, :nothing), SymbolServer.MethodStore[meth],"", SymbolServer.VarRef(nothing, :nothing), false), getsymbols(state)[:Core][:DataType], []) +end diff --git a/src/StaticLint/server.jl b/src/StaticLint/server.jl new file mode 100644 index 0000000..2293aa8 --- /dev/null +++ b/src/StaticLint/server.jl @@ -0,0 +1,101 @@ +#= +Project trees are usually made up of multiple files. An AbstractServer holds the AbstractFiles that represent this tree. FileServer is the basic implementation and assumes files are available and readable from disc. (LanguageServer illustrates another implementaiton). The accompanying functions summarised below are required for making an alternative implementation. + +Interface spec. +AbstractServer :-> (has/canload/load/set/get)file, getsymbols, getsymbolextends +AbstractFile :-> (get/set)path, (get/set)root, (get/set)cst, semantic_pass, (get/set)server +=# +abstract type AbstractServer end +abstract type AbstractFile end + +mutable struct File + path::String + source::String + cst::EXPR + root::Union{Nothing,File} + server +end + +mutable struct FileServer <: AbstractServer + files::Dict{String,File} + roots::Set{File} + workspacepackages::Dict{String,File} # list of files that may represent within-workspace packages + external_env::ExternalEnv +end +FileServer() = FileServer(Dict{String,File}(), Set{File}(), Dict{String,File}(), ExternalEnv(Dict{Symbol,SymbolServer.ModuleStore}(:Base => SymbolServer.stdlibs[:Base], :Core => SymbolServer.stdlibs[:Core]), SymbolServer.collect_extended_methods(SymbolServer.stdlibs), Symbol[])) + + +hasfile(server::FileServer, path::String) = haskey(server.files, path) +canloadfile(server, path) = isfile(path) +function setfile(server::FileServer, path::String, file::File) + server.files[path] = file +end +getfile(server::FileServer, path::String) = server.files[path] +function loadfile(server::FileServer, path::String) + try + source = read(path, String) + cst = CSTParser.parse(source, true) + f = File(path, source, cst, nothing, server) + setroot(f, f) + setfile(server, path, f) + return getfile(server, path) + catch + @info "Could not load $(path) from disk." + rethrow() + end +end + +getsymbols(env::ExternalEnv) = env.symbols +getsymbols(state::State) = getsymbols(state.env) + +getsymbolextendeds(env::ExternalEnv) = env.extended_methods +getsymbolextendeds(state::State) = getsymbolextendeds(state.env) + + +""" + getenv(file::File, server::FileServer) + +Get the relevant `ExternalEnv` for a given file. +""" +function getenv(file::File, server::FileServer) + # For FileServer this approach is equivalent to the previous behaviour. Other AbstractServers + # (e.g. LanguageServerInstance) can use this function to associate different files (or trees of + # files) with different environments. + server.external_env +end + + +getpath(file::File) = file.path + +getroot(file::File) = file.root +function setroot(file::File, root::File) + file.root = root + return file +end + +getcst(file::File) = file.cst +function setcst(file::File, cst::EXPR) + file.cst = cst + return file +end + +getserver(file::File) = file.server +function setserver(file::File, server::FileServer) + file.server = server + return file +end + +function Base.display(f::File) + println(f.path) +end + +function Base.display(s::FileServer) + n = length(s.files) + println(n, "-file Server") + cnt = 0 + for p in keys(s.files) + cnt += 1 + println(" ", p) + cnt > 10 && break + end +end diff --git a/src/StaticLint/subtypes.jl b/src/StaticLint/subtypes.jl new file mode 100644 index 0000000..1c2a478 --- /dev/null +++ b/src/StaticLint/subtypes.jl @@ -0,0 +1,70 @@ +function _issubtype(a, b, store) + _isany(b) && return true + _type_compare(a, b) && return true + sup_a = _super(a, store) + _type_compare(sup_a, b) && return true + !_isany(sup_a) && return _issubtype(sup_a, b, store) + return false +end + +_isany(x::SymbolServer.FakeTypeName) = x.name == VarRef(VarRef(nothing, :Core), :Any) +_isany(x::SymbolServer.DataTypeStore) = x.name.name == VarRef(VarRef(nothing, :Core), :Any) +_isany(x) = false + +_type_compare(a::SymbolServer.DataTypeStore, b::SymbolServer.DataTypeStore) = a.name == b.name +_type_compare(a::SymbolServer.FakeTypeName, b::SymbolServer.FakeTypeName) = a == b +_type_compare(a::SymbolServer.FakeTypeName, b::SymbolServer.DataTypeStore) = a == b.name +_type_compare(a::SymbolServer.DataTypeStore, b::SymbolServer.FakeTypeName) = a.name == b +_type_compare(a::SymbolServer.DataTypeStore, b::SymbolServer.FakeUnion) = _type_compare(a, b.a) || +_type_compare(a, b.b) + +function _type_compare(a::SymbolServer.DataTypeStore, b::SymbolServer.FakeTypeVar) + if b.ub isa SymbolServer.FakeUnion + return _type_compare(a, b.ub) + end + a == b +end + +_type_compare(a, b) = a == b + +_super(a::SymbolServer.DataTypeStore, store) = SymbolServer._lookup(a.super.name, store) +_super(a::SymbolServer.FakeTypeVar, store) = a.ub +_super(a::SymbolServer.FakeUnionAll, store) = a.body +_super(a::SymbolServer.FakeTypeName, store) = _super(SymbolServer._lookup(a.name, store), store) +@static if !(Vararg isa Type) + _super(a::SymbolServer.FakeTypeofVararg, store) = CoreTypes.Any +end + +function _super(b::Binding, store) + StaticLint.CoreTypes.isdatatype(b.type) || error() + b.val isa Binding && return _super(b.val, store) + sup = _super(b.val, store) + if sup isa EXPR && StaticLint.hasref(sup) + StaticLint.refof(sup) + else + store[:Core][:Any] + end +end + +function _super(x::EXPR, store)::Union{EXPR,Nothing} + if x.head === :struct + _super(x.args[2], store) + elseif x.head === :abstract || x.head === :primtive + _super(x.args[1], store) + elseif CSTParser.issubtypedecl(x) + x.args[2] + elseif CSTParser.isbracketed(x) + _super(x.args[1], store) + end +end + +function subtypes(T::Binding) + @assert CSTParser.defines_abstract(T.val) + subTs = [] + for r in T.refs + if r isa EXPR && r.parent isa EXPR && CSTParser.issubtypedecl(r.parent) && r.parent.parent isa EXPR && CSTParser.defines_datatype(r.parent.parent) + push!(subTs, r.parent.parent) + end + end + subTs +end diff --git a/src/StaticLint/type_inf.jl b/src/StaticLint/type_inf.jl new file mode 100644 index 0000000..fb2e345 --- /dev/null +++ b/src/StaticLint/type_inf.jl @@ -0,0 +1,335 @@ +function settype!(b::Binding, type::Binding) + push!(type.refs, b) + b.type = type +end + +function settype!(b::Binding, type) + b.type = type +end + +function infer_type(binding::Binding, scope, state) + if binding isa Binding + binding.type !== nothing && return + if binding.val isa EXPR && CSTParser.defines_module(binding.val) + settype!(binding, CoreTypes.Module) + elseif binding.val isa EXPR && CSTParser.defines_function(binding.val) + settype!(binding, CoreTypes.Function) + elseif binding.val isa EXPR && CSTParser.defines_datatype(binding.val) + settype!(binding, CoreTypes.DataType) + elseif binding.val isa EXPR + if isassignment(binding.val) + if CSTParser.is_func_call(binding.val.args[1]) + settype!(binding, CoreTypes.Function) + else + infer_type_assignment_rhs(binding, state, scope) + end + elseif binding.val.head isa EXPR && valof(binding.val.head) == "::" + infer_type_decl(binding, state, scope) + elseif iswhere(parentof(binding.val)) + settype!(binding, CoreTypes.DataType) + end + end + end +end + +function infer_type_assignment_rhs(binding, state, scope) + is_destructuring = false + lhs = binding.val.args[1] + rhs = binding.val.args[2] + if is_loop_iter_assignment(binding.val) + settype!(binding, infer_eltype(rhs, state)) + elseif headof(rhs) === :ref && length(rhs.args) > 1 + ref = refof_maybe_getfield(rhs.args[1]) + if ref isa Binding && ref.val isa EXPR + settype!(binding, infer_eltype(ref.val, state)) + end + else + if CSTParser.is_func_call(rhs) + if CSTParser.istuple(lhs) + if CSTParser.isparameters(lhs.args[1]) + is_destructuring = true + else + return + end + end + callname = CSTParser.get_name(rhs) + if isidentifier(callname) + resolve_ref(callname, scope, state) + if hasref(callname) + rb = get_root_method(refof(callname), state.server) + if (rb isa Binding && (CoreTypes.isdatatype(rb.type) || rb.val isa SymbolServer.DataTypeStore)) || rb isa SymbolServer.DataTypeStore + if is_destructuring + infer_destructuring_type(binding, rb) + else + settype!(binding, rb) + end + end + end + end + elseif headof(rhs) === :INTEGER + settype!(binding, CoreTypes.Int) + elseif headof(rhs) === :HEXINT + if length(rhs.val) < 5 + settype!(binding, CoreTypes.UInt8) + elseif length(rhs.val) < 7 + settype!(binding, CoreTypes.UInt16) + elseif length(rhs.val) < 11 + settype!(binding, CoreTypes.UInt32) + else + settype!(binding, CoreTypes.UInt64) + end + elseif headof(rhs) === :FLOAT + settype!(binding, CoreTypes.Float64) + elseif CSTParser.isstringliteral(rhs) + settype!(binding, CoreTypes.String) + elseif headof(rhs) === :TRUE || headof(rhs) === :FALSE + settype!(binding, CoreTypes.Bool) + elseif isidentifier(rhs) || is_getfield_w_quotenode(rhs) + refof_rhs = isidentifier(rhs) ? refof(rhs) : refof_maybe_getfield(rhs) + if refof_rhs isa Binding + if refof_rhs.val isa SymbolServer.GenericStore && refof_rhs.val.typ isa SymbolServer.FakeTypeName + settype!(binding, maybe_lookup(refof_rhs.val.typ.name, state)) + elseif refof_rhs.val isa SymbolServer.FunctionStore + settype!(binding, CoreTypes.Function) + elseif refof_rhs.val isa SymbolServer.DataTypeStore + settype!(binding, CoreTypes.DataType) + else + settype!(binding, refof_rhs.type) + end + elseif refof_rhs isa SymbolServer.GenericStore && refof_rhs.typ isa SymbolServer.FakeTypeName + settype!(binding, maybe_lookup(refof_rhs.typ.name, state)) + elseif refof_rhs isa SymbolServer.FunctionStore + settype!(binding, CoreTypes.Function) + elseif refof_rhs isa SymbolServer.DataTypeStore + settype!(binding, CoreTypes.DataType) + end + end + end +end + +function infer_destructuring_type(binding, rb::SymbolServer.DataTypeStore) + assigned_name = CSTParser.get_name(binding.val) + for (fieldname, fieldtype) in zip(rb.fieldnames, rb.types) + if fieldname == assigned_name + settype!(binding, fieldtype) + return + end + end +end +function infer_destructuring_type(binding::Binding, rb::EXPR) + assigned_name = string(to_codeobject(binding.name)) + scope = scopeof(rb) + names = scope.names + if haskey(names, assigned_name) + b = names[assigned_name] + settype!(binding, b.type) + end +end +infer_destructuring_type(binding, rb::Binding) = infer_destructuring_type(binding, rb.val) + +function infer_type_decl(binding, state, scope) + t = binding.val.args[2] + if isidentifier(t) + resolve_ref(t, scope, state) + end + if iscurly(t) + t = t.args[1] + resolve_ref(t, scope, state) + end + if CSTParser.is_getfield_w_quotenode(t) + resolve_getfield(t, scope, state) + t = t.args[2].args[1] + end + if refof(t) isa Binding + rb = get_root_method(refof(t), state.server) + if rb isa Binding && CoreTypes.isdatatype(rb.type) + settype!(binding, rb) + else + settype!(binding, refof(t)) + end + else + edt = get_eventual_datatype(refof(t), state.env) + if edt !== nothing + settype!(binding, edt) + end + end +end + +get_eventual_datatype(_, _::ExternalEnv) = nothing +get_eventual_datatype(b::SymbolServer.DataTypeStore, _::ExternalEnv) = b +function get_eventual_datatype(b::SymbolServer.FunctionStore, env::ExternalEnv) + return SymbolServer._lookup(b.extends, getsymbols(env)) +end + +# Work out what type a bound variable has by functions that are called on it. +function infer_type_by_use(b::Binding, env::ExternalEnv) + b.type !== nothing && return # b already has a type + possibletypes = [] + visitedmethods = [] + ifbranch = nothing + for ref in b.refs + new_possibles = [] + ref isa EXPR || continue # skip non-EXPR (i.e. used for handling of globals) + # Some simple handling for :if blocks + if ifbranch === nothing + ifbranch = find_if_parents(ref) + else + newbranch = find_if_parents(ref) + if !in_same_if_branch(ifbranch, newbranch) + return + end + ifbranch = newbranch + end + check_ref_against_calls(ref, visitedmethods, new_possibles, env) + if !isempty(new_possibles) + if isempty(possibletypes) + possibletypes = new_possibles + else + possibletypes = intersect(possibletypes, new_possibles) + end + if isempty(possibletypes) + return + end + end + end + # Only do something if we're left with a singleton set at the end. + if length(possibletypes) == 1 + type = first(possibletypes) + if type isa Binding + settype!(b, type) + elseif type isa SymbolServer.DataTypeStore + settype!(b, type) + elseif type isa SymbolServer.VarRef + settype!(b, SymbolServer._lookup(type, getsymbols(env))) # could be nothing + elseif type isa SymbolServer.FakeTypeName && isempty(type.parameters) + settype!(b, SymbolServer._lookup(type.name, getsymbols(env))) # could be nothing + end + end +end + +function check_ref_against_calls(x, visitedmethods, new_possibles, env::ExternalEnv) + if is_arg_of_resolved_call(x) && !call_is_func_sig(x.parent) + sig = parentof(x) + # x is argument of function call (func) and we know what that function is + if CSTParser.isidentifier(sig.args[1]) + func = refof(sig.args[1]) + else + func = refof(sig.args[1].args[2].args[1]) + end + argi = get_arg_position_in_call(sig, x) # what slot does ref sit in? + tls = retrieve_toplevel_scope(x) + if func isa Binding + for method in func.refs + method = get_method(method) + method === nothing && continue + if method isa EXPR + if defines_function(method) + get_arg_type_at_position(method, argi, new_possibles) + # elseif CSTParser.defines_struct(method) + # Can we ignore this? Default constructor gives us no type info? + end + else # elseif what? + iterate_over_ss_methods(method, tls, env, m -> (get_arg_type_at_position(m, argi, new_possibles);false)) + end + end + else + iterate_over_ss_methods(func, tls, env, m -> (get_arg_type_at_position(m, argi, new_possibles);false)) + end + end +end + +function call_is_func_sig(call::EXPR) + # assume initially called on a :call + if call.parent isa EXPR + if call.parent.head === :function || CSTParser.is_eq(call.parent.head) + true + elseif isdeclaration(call.parent) || iswhere(call.parent) + call_is_func_sig(call.parent) + else + false + end + else + false + end +end + +function is_arg_of_resolved_call(x::EXPR) + parentof(x) isa EXPR && headof(parentof(x)) === :call && # check we're in a call signature + (caller = parentof(x).args[1]) !== x && # and that x is not the caller + ((CSTParser.isidentifier(caller) && hasref(caller)) || (is_getfield(caller) && headof(caller.args[2]) === :quotenode && hasref(caller.args[2].args[1]))) +end + +function get_arg_position_in_call(sig::EXPR, arg) + for i in 1:length(sig.args) + sig.args[i] == arg && return i + end +end + +function get_arg_type_at_position(method, argi, types) + if method isa EXPR + sig = CSTParser.get_sig(method) + if sig !== nothing && + sig.args !== nothing && argi <= length(sig.args) && + hasbinding(sig.args[argi]) && + (argb = bindingof(sig.args[argi]); argb isa Binding && argb.type !== nothing) && + !(argb.type in types) + push!(types, argb.type) + return + end + elseif method isa SymbolServer.DataTypeStore || method isa SymbolServer.FunctionStore + for m in method.methods + get_arg_type_at_position(m, argi, types) + end + end + return +end + +function get_arg_type_at_position(m::SymbolServer.MethodStore, argi, types) + if length(m.sig) >= argi && m.sig[argi][2] != SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Core), :Any) && !(m.sig[argi][2] in types) + push!(types, m.sig[argi][2]) + end +end + +# Assumes x.head.val == "=" +is_loop_iter_assignment(x::EXPR) = x.parent isa EXPR && ((x.parent.head == :for || x.parent.head == :generator) || (x.parent.head == :block && x.parent.parent isa EXPR && (x.parent.parent.head == :for || x.parent.parent.head == :generator))) + +function infer_eltype(x::EXPR, state) + if isidentifier(x) && hasref(x) # assume is IDENT + r = refof(x) + if r isa Binding && r.val isa EXPR + if isassignment(r.val) && r.val.args[2] != x + return infer_eltype(r.val.args[2], state) + end + end + elseif headof(x) === :ref && hasref(x.args[1]) + r = refof(x.args[1]) + if r isa Binding && CoreTypes.isdatatype(r.type) + return r + end + edt = get_eventual_datatype(r, state.env) + if edt isa SymbolServer.DataTypeStore + return edt + end + elseif headof(x) === :STRING + return CoreTypes.Char + elseif headof(x) === :call && length(x.args) > 2 && CSTParser.is_colon(x.args[1]) + if headof(x.args[2]) === :INTEGER && headof(x.args[3]) === :INTEGER + return CoreTypes.Int + elseif headof(x.args[2]) === :FLOAT && headof(x.args[3]) === :FLOAT + return CoreTypes.Float64 + elseif headof(x.args[2]) === :CHAR && headof(x.args[3]) === :CHAR + return CoreTypes.Char + end + elseif hasbinding(x) && isdeclaration(x) && length(x.args) == 2 + return maybe_get_vec_eltype(x.args[2]) + end +end + +function maybe_get_vec_eltype(t) + if iscurly(t) + lhs_ref = refof_maybe_getfield(t.args[1]) + if lhs_ref isa SymbolServer.DataTypeStore && CoreTypes.isarray(lhs_ref) && length(t.args) > 1 + refof(t.args[2]) + end + end +end diff --git a/src/StaticLint/utils.jl b/src/StaticLint/utils.jl new file mode 100644 index 0000000..fb187cc --- /dev/null +++ b/src/StaticLint/utils.jl @@ -0,0 +1,337 @@ +quoted(x) = headof(x) === :quote || headof(x) === :quotenode +unquoted(x) = isunarycall(x) && valof(x.args[1]) == "\$" + +function remove_ref(x::EXPR) + if hasref(x) && refof(x) isa Binding && refof(x).refs isa Vector + for ia in enumerate(refof(x).refs) + if ia[2] == x + deleteat!(refof(x).refs, ia[1]) + setref!(x, nothing) + return + end + end + error() + end +end + +function clear_binding(x::EXPR) + if bindingof(x) isa Binding + for r in bindingof(x).refs + if r isa EXPR + setref!(r, nothing) + elseif r isa Binding + if r.type == bindingof(x) + r.type = nothing + else + clear_binding(r) + end + end + end + x.meta.binding = nothing + end +end +function clear_scope(x::EXPR) + if hasmeta(x) && scopeof(x) isa Scope + setparent!(scopeof(x), nothing) + empty!(scopeof(x).names) + if headof(x) === :file && scopeof(x).modules isa Dict && scopehasmodule(scopeof(x), :Base) && scopehasmodule(scopeof(x), :Core) + m1, m2 = getscopemodule(scopeof(x), :Base), getscopemodule(scopeof(x), :Core) + empty!(scopeof(x).modules) + addmoduletoscope!(scopeof(x), m1) + addmoduletoscope!(scopeof(x), m2) + else + scopeof(x).modules = nothing + end + if scopeof(x).overloaded !== nothing + empty!(scopeof(x).overloaded) + end + end +end + +function clear_ref(x::EXPR) + if refof(x) isa Binding + if refof(x).refs isa Vector + for i in 1:length(refof(x).refs) + if refof(x).refs[i] == x + deleteat!(refof(x).refs, i) + break + end + end + end + setref!(x, nothing) + elseif refof(x) !== nothing + setref!(x, nothing) + end +end +function clear_error(x::EXPR) + if hasmeta(x) && x.meta.error !== nothing + x.meta.error = nothing + end +end +function clear_meta(x::EXPR) + clear_binding(x) + clear_ref(x) + clear_scope(x) + clear_error(x) + if x.args !== nothing + for a in x.args + clear_meta(a) + end + end + # if x.trivia !== nothing + # for a in x.trivia + # clear_meta(a) + # end + # end +end + +function get_root_method(b, server) + return b +end + +function get_root_method(b::Binding, server) + if CoreTypes.isfunction(b.type) && !isempty(b.refs) + first(b.refs) + else + b + end +end + +function retrieve_delayed_scope(x) + if (CSTParser.defines_function(x) || CSTParser.defines_macro(x)) && scopeof(x) !== nothing + if parentof(scopeof(x)) !== nothing + return parentof(scopeof(x)) + else + return scopeof(x) + end + else + return retrieve_scope(x) + end + return nothing +end + +function retrieve_scope(x) + if scopeof(x) !== nothing + return scopeof(x) + elseif parentof(x) isa EXPR + return retrieve_scope(parentof(x)) + end + return +end + + +# function find_return_statements(x::EXPR) +# rets = EXPR[] +# if CSTParser.defines_function(x) +# find_return_statements(x.args[2], true, rets) +# end +# return rets +# end + +# function find_return_statements(x::EXPR, last_stmt, rets) +# if last_stmt && !(headof(x) === :block || headof(x) === :if || iskw(x)) +# push!(rets, x) +# return rets, false +# end + +# if headof(x) === :return +# push!(rets, x) +# return rets, true +# end + + +# for i = 1:length(x) +# _, stop_iter = find_return_statements(x[i], last_stmt && (i == length(x) || (headof(x) === CSTParser.If && headof(x[i]) === CSTParser.Block)), rets) +# stop_iter && break +# end +# return rets, false +# end + +function find_exported_names(x::EXPR) + exported_vars = EXPR[] + for i in 1:length(x.args[3].args) + expr = x.args[3].args[i] + if headof(expr) === :export + for j = 2:length(expr.args) + if isidentifier(expr.args[j]) && hasref(expr.args[j]) + push!(exported_vars, expr.args[j]) + end + end + end + end + return exported_vars +end + +hasreadperm(p::String) = (uperm(p) & 0x04) == 0x04 + +# check whether a path is in (including subfolders) the julia base dir. Returns "" if not, and the path to the base dir if so. +function _is_in_basedir(path::String) + i = findfirst(r".*base", path) + i === nothing && return "" + path1 = path[i]::String + !hasreadperm(path1) && return "" + !isdir(path1) && return "" + files = readdir(path1) + if all(f -> f in files, ["Base.jl", "coreio.jl", "essentials.jl", "exports.jl"]) + return path1 + end + return "" +end + +_is_macrocall_to_BaseDIR(arg) = headof(arg) === :macrocall && length(arg.args) == 2 && valof(arg.args[1]) == "@__DIR__" + + +isexportedby(k::Symbol, m::SymbolServer.ModuleStore) = haskey(m, k) && k in m.exportednames +isexportedby(k::String, m::SymbolServer.ModuleStore) = isexportedby(Symbol(k), m) +isexportedby(x::EXPR, m::SymbolServer.ModuleStore) = isexportedby(valof(x), m) +isexportedby(k, m::SymbolServer.ModuleStore) = false + +function retrieve_toplevel_scope(x::EXPR) + if scopeof(x) !== nothing && is_toplevel_scope(x) + return scopeof(x) + elseif parentof(x) isa EXPR + return retrieve_toplevel_scope(parentof(x)) + else + @info "Tried to reach toplevel scope, no scope found. Final expression $(headof(x))" + return nothing + end +end +retrieve_toplevel_scope(s::Scope) = (is_toplevel_scope(s) || !(parentof(s) isa Scope)) ? s : retrieve_toplevel_scope(parentof(s)) +retrieve_toplevel_or_func_scope(s::Scope) = (is_toplevel_scope(s) || defines_function(s.expr) || !(parentof(s) isa Scope)) ? s : retrieve_toplevel_or_func_scope(parentof(s)) + +is_toplevel_scope(s::Scope) = is_toplevel_scope(s.expr) +is_toplevel_scope(x::EXPR) = CSTParser.defines_module(x) || headof(x) === :file + +# b::SymbolServer.FunctionStore or DataTypeStore +# tls is a top-level Scope (expected to contain loaded modules) +# for a FunctionStore b, checks whether additional methods are provided by other packages +# f is a function that returns `true` if we want to break early from the loop + +iterate_over_ss_methods(b, tls, env, f) = false +function iterate_over_ss_methods(b::SymbolServer.FunctionStore, tls::Scope, env::ExternalEnv, f) + for m in b.methods + ret = f(m) + ret && return true + end + if b.extends in keys(getsymbolextendeds(env)) && tls.modules !== nothing + # above should be modified, + rootmod = SymbolServer._lookup(b.extends.parent, getsymbols(env)) # points to the module containing the initial function declaration + if rootmod !== nothing && haskey(rootmod, b.extends.name) # check rootmod exists, and that it has the variable + # find extensoions + if haskey(getsymbolextendeds(env), b.extends) # method extensions listed + for vr in getsymbolextendeds(env)[b.extends] # iterate over packages with extensions + !(SymbolServer.get_top_module(vr) in keys(tls.modules)) && continue + rootmod = SymbolServer._lookup(vr, getsymbols(env)) + !(rootmod isa SymbolServer.ModuleStore) && continue + if haskey(rootmod.vals, b.extends.name) && (rootmod.vals[b.extends.name] isa SymbolServer.FunctionStore || rootmod.vals[b.extends.name] isa SymbolServer.DataTypeStore)# check package is available and has ref + for m in rootmod.vals[b.extends.name].methods # + ret = f(m) + ret && return true + end + end + end + end + end + end + return false +end + +function iterate_over_ss_methods(b::SymbolServer.DataTypeStore, tls::Scope, env::ExternalEnv, f) + if b.name isa SymbolServer.VarRef + bname = b.name + elseif b.name isa SymbolServer.FakeTypeName + bname = b.name.name + end + for m in b.methods + ret = f(m) + ret && return true + end + if (bname in keys(getsymbolextendeds(env))) && tls.modules !== nothing + # above should be modified, + rootmod = SymbolServer._lookup(bname.parent, getsymbols(env), true) # points to the module containing the initial function declaration + if rootmod !== nothing && haskey(rootmod, bname.name) # check rootmod exists, and that it has the variable + # find extensoions + if haskey(getsymbolextendeds(env), bname) # method extensions listed + for vr in getsymbolextendeds(env)[bname] # iterate over packages with extensions + !(SymbolServer.get_top_module(vr) in keys(tls.modules)) && continue + rootmod = SymbolServer._lookup(vr, getsymbols(env)) + !(rootmod isa SymbolServer.ModuleStore) && continue + if haskey(rootmod.vals, bname.name) && (rootmod.vals[bname.name] isa SymbolServer.FunctionStore || rootmod.vals[bname.name] isa SymbolServer.DataTypeStore)# check package is available and has ref + for m in rootmod.vals[bname.name].methods # + ret = f(m) + ret && return true + end + end + end + end + end + end + return false +end + + +""" + is_in_fexpr(x::EXPR, f) +Check whether `x` isa the child of an expression for which `f(parent) == true`. +""" +is_in_fexpr(x::EXPR, f) = f(x) || (parentof(x) isa EXPR && is_in_fexpr(parentof(x), f)) + +""" + get_in_fexpr(x::EXPR, f) +Get the `parent` of `x` for which `f(parent) == true`. (is_in_fexpr should be called first.) +""" +get_parent_fexpr(x::EXPR, f) = f(x) ? x : get_parent_fexpr(parentof(x), f) + +maybe_get_parent_fexpr(x::Nothing, f) = nothing +maybe_get_parent_fexpr(x::EXPR, f) = f(x) ? x : maybe_get_parent_fexpr(parentof(x), f) + +issigoffuncdecl(x::EXPR) = parentof(x) isa EXPR ? issigoffuncdecl(x, parentof(x)) : false +function issigoffuncdecl(x::EXPR, p::EXPR) + if CSTParser.iswhere(p) || CSTParser.isdeclaration(p) + return issigoffuncdecl(parentof(p)) + elseif CSTParser.defines_function(p) + return true + else + return false + end +end +issigoffuncdecl(x::EXPR, p) = false + +function is_nameof_func(name) + f = get_parent_fexpr(name, CSTParser.defines_function) + f !== nothing && CSTParser.get_name(f) == name +end + +function loose_refs(b::Binding) + b.val isa EXPR || return b.refs # to account for `#global` binding which doesn't have a val + scope = retrieve_scope(b.val) + scope isa Scope && isidentifier(b.name) || return b.refs + name_str = valofid(b.name) + name_str isa String || return b.refs + + if is_soft_scope(scope) && parentof(scope) isa Scope && scopehasbinding(parentof(scope), name_str) && !scopehasbinding(scope, name_str) + scope = parentof(scope) + end + state = LooseRefs(scope.expr, name_str, scope, []) + state(scope.expr) + vcat([r.refs for r in state.result]...) +end + +mutable struct LooseRefs + x::EXPR + name::String + scope::Scope + result::Vector{Binding} +end + +function (state::LooseRefs)(x::EXPR) + if hasbinding(x) + ex = bindingof(x).name + if isidentifier(ex) && valofid(ex) == state.name + push!(state.result, bindingof(x)) + end + end + if !hasscope(x) || (hasscope(x) && ((is_soft_scope(scopeof(x)) && !scopehasbinding(scopeof(x), state.name)) || scopeof(x) == state.scope)) + traverse(x, state) + end +end From fbd9e7b45f8302afbd14d01a5b9638d783c80465 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Mon, 24 Nov 2025 19:13:48 -0800 Subject: [PATCH 02/24] Add original SymbolServer files --- src/SymbolServer/SymbolServer.jl | 403 ++++++++++++++++ src/SymbolServer/faketypes.jl | 179 +++++++ src/SymbolServer/indexbasestdlib.jl | 56 +++ src/SymbolServer/indexpackage.jl | 65 +++ src/SymbolServer/serialize.jl | 290 ++++++++++++ src/SymbolServer/server.jl | 138 ++++++ src/SymbolServer/symbols.jl | 689 +++++++++++++++++++++++++++ src/SymbolServer/utils.jl | 704 ++++++++++++++++++++++++++++ 8 files changed, 2524 insertions(+) create mode 100644 src/SymbolServer/SymbolServer.jl create mode 100644 src/SymbolServer/faketypes.jl create mode 100644 src/SymbolServer/indexbasestdlib.jl create mode 100644 src/SymbolServer/indexpackage.jl create mode 100644 src/SymbolServer/serialize.jl create mode 100644 src/SymbolServer/server.jl create mode 100644 src/SymbolServer/symbols.jl create mode 100644 src/SymbolServer/utils.jl diff --git a/src/SymbolServer/SymbolServer.jl b/src/SymbolServer/SymbolServer.jl new file mode 100644 index 0000000..78bd0c5 --- /dev/null +++ b/src/SymbolServer/SymbolServer.jl @@ -0,0 +1,403 @@ +module SymbolServer + +export SymbolServerInstance, getstore + +using Pkg, SHA +using Base: UUID, Process +import Sockets, UUIDs + +# this is required to get parsedocs to work on Julia 1.11 and newer, since the implementation +# moved there +using REPL + +include("faketypes.jl") +include("symbols.jl") +include("utils.jl") +include("serialize.jl") +using .CacheStore + +mutable struct SymbolServerInstance + process::Union{Nothing,Base.Process} + depot_path::String + julia_exe_path::String + julia_exe_version::VersionNumber + canceled_processes::Set{Process} + store_path::String + symbolcache_upstream::String + + function SymbolServerInstance(depot_path::String="", store_path::Union{String,Nothing}=nothing, julia_exe::Union{NamedTuple{(:path,:version),Tuple{String,VersionNumber}},Nothing}=nothing; symbolcache_upstream = nothing) + if symbolcache_upstream === nothing + symbolcache_upstream = "https://www.julia-vscode.org/symbolcache" + end + return new(nothing, depot_path, julia_exe === nothing ? joinpath(Sys.BINDIR, Base.julia_exename()) : julia_exe.path, julia_exe === nothing ? VERSION : julia_exe.version, Set{Process}(), store_path === nothing ? abspath(joinpath(@__DIR__, "..", "store")) : store_path, symbolcache_upstream) + end +end + +const GENERAL_REGISTRY_UUID = UUID("23338594-aafe-5451-b93e-139f81909106") +function get_general_pkgs() + dp_before = copy(Base.DEPOT_PATH) + try + # because the env var JULIA_DEPOT_PATH is overritten this is probably the best + # guess depot location + push!(empty!(Base.DEPOT_PATH), joinpath(homedir(), ".julia")) + @static if VERSION >= v"1.7-" + regs = Pkg.Types.Context().registries + i = findfirst(r -> r.name == "General" && r.uuid == GENERAL_REGISTRY_UUID, regs) + i === nothing && return Dict() + return regs[i].pkgs + else + for r in Pkg.Types.collect_registries() + (r.name == "General" && r.uuid == GENERAL_REGISTRY_UUID) || continue + reg = Pkg.Types.read_registry(joinpath(r.path, "Registry.toml")) + return reg["packages"] + end + return Dict() + end + finally + append!(empty!(Base.DEPOT_PATH), dp_before) + end +end + +""" + remove_non_general_pkgs!(pkgs) + +Removes packages that aren't going to be on the symbol cache server because they aren't in the General registry. +This avoids leaking private package name & uuid pairs via the url requests to the symbol server. + +If the General registry cannot be found packages cannot be checked, so all packages will be removed. +""" +function remove_non_general_pkgs!(pkgs) + general_pkgs = get_general_pkgs() + if isempty(general_pkgs) + @warn """ + Could not find the General registry when checking for whether packages are public. + All package symbol caches will be generated locally""" + return empty!(pkgs) + end + filter!(pkgs) do pkg + packageuuid(pkg) === nothing && return false + packagename(pkg) === nothing && return false + tree_hash(pkg) === nothing && return false # stdlibs and dev-ed packages don't have tree_hash and aren't cached + @static if VERSION >= v"1.7-" + uuid_match = get(general_pkgs, packageuuid(pkg), nothing) + uuid_match === nothing && return false + uuid_match.name != packagename(pkg) && return false + return true + else + uuid_match = get(general_pkgs, string(packageuuid(pkg)), nothing) + uuid_match === nothing && return false + uuid_match["name"] != packagename(pkg) && return false + return true + end + end + return pkgs +end + +function download_cache_files(ssi, environment_path, progress_callback) + download_dir_parent = joinpath(ssi.store_path, "_downloads") + mkpath(download_dir_parent) + + mktempdir(download_dir_parent) do download_dir + candidates = [ + joinpath(environment_path, "JuliaManifest.toml"), + joinpath(environment_path, "Manifest.toml") + ] + + for manifest_filename in candidates + !isfile(manifest_filename) && continue + + manifest = read_manifest(manifest_filename) + manifest === nothing && continue + + @debug "Downloading cache files for manifest at $(manifest_filename)." + to_download = collect(validate_disc_store(ssi.store_path, manifest)) + try + remove_non_general_pkgs!(to_download) + catch err + # if any errors, err on the side of caution and mark all as private, and continue + @error """ + Symbol cache downloading: Failed to identify which packages to omit based on the General registry. + All packages will be processsed locally""" err + empty!(to_download) + end + isempty(to_download) && continue + + n_done = 0 + n_total = length(to_download) + progress_callback("Downloading cache files...", 0) + t0 = time() + for batch in Iterators.partition(to_download, 100) # 100 connections at a time + @sync for pkg in batch + @async begin + yield() + uuid = packageuuid(pkg) + get_file_from_cloud(manifest, uuid, environment_path, ssi.depot_path, ssi.store_path, download_dir, ssi.symbolcache_upstream) + yield() + n_done += 1 + percentage = round(Int, 100*(n_done/n_total)) + if percentage < 100 + progress_callback("Downloading cache files...", percentage) + end + end + end + end + took = round(time() - t0, sigdigits = 2) + progress_callback("All cache files downloaded (took $(took)s).", 100) + end + end +end + +function getstore(ssi::SymbolServerInstance, environment_path::AbstractString, progress_callback=nothing, error_handler=nothing; download = false) + !ispath(environment_path) && return :success, recursive_copy(stdlibs) + _progress_callback = (msg, p) -> progress_callback === nothing ? + println(lpad(p, 4), "% - ", msg) : progress_callback(msg, p) + + # see if we can download any package caches before local indexing + if download + download_cache_files(ssi, environment_path, _progress_callback) + end + + server_script = joinpath(@__DIR__, "server.jl") + + env_to_use = copy(ENV) + env_to_use["JULIA_REVISE"] = "manual" # Try to make sure Revise isn't enabled. + + if ssi.depot_path == "" + delete!(env_to_use, "JULIA_DEPOT_PATH") + else + env_to_use["JULIA_DEPOT_PATH"] = ssi.depot_path + end + + stderr_for_client_process = VERSION < v"1.1.0" ? nothing : IOBuffer() + + if ssi.process !== nothing + to_cancel_p = ssi.process + ssi.process = nothing + push!(ssi.canceled_processes, to_cancel_p) + kill(to_cancel_p) + end + + use_code_coverage = Base.JLOptions().code_coverage + + currently_loading_a_package = false + current_package_name = "" + + pipename = pipe_name() + + server_is_ready = Channel(1) + + @async try + server = Sockets.listen(pipename) + + put!(server_is_ready, nothing) + conn = Sockets.accept(server) + + while isopen(conn) + s = readline(conn) + if isempty(s) + continue + end + parts = split(s, ';') + if parts[1] == "STARTLOAD" + currently_loading_a_package = true + current_package_name = parts[2] + current_package_uuid = parts[3] + current_package_version = parts[4] + percentage = parts[5] == "missing" ? missing : parse(Int, parts[5]) + _progress_callback("Indexing $current_package_name...", percentage) + elseif parts[1] == "STOPLOAD" + currently_loading_a_package = false + elseif parts[1] == "PROCESSPKG" + current_package_name = parts[2] + percentage = parts[5] == "missing" ? missing : parse(Int, parts[5]) + _progress_callback("Processing $current_package_name...", percentage) + elseif parts[1] == "DONE" + break + else + error("Unknown command.") + end + end + catch err + bt = catch_backtrace() + if error_handler !== nothing + error_handler(err, bt) + else + Base.display_error(stderr, err, bt) + end + end + take!(server_is_ready) + + # 1.11 introduces the --compiled-modules=existing option, which should be much faster than no + # as of 2023-11-09, loading Pkg with --compiled-modules=no also changes something with the + # active project, which breaks the server.jl script + p = try + path = ssi.julia_exe_path + spath = split(path) + # is julia up spec + if length(spath) == 2 && occursin(r"^\+\d", spath[2]) + path = spath + else + path = [path] + end + cmd = Cmd(path) + if ssi.julia_exe_version > v"1.11-" + open(pipeline(Cmd(`$(cmd) --code-coverage=$(use_code_coverage==0 ? "none" : "user") --startup-file=no --compiled-modules=existing --history-file=no --project=$environment_path $server_script $(ssi.store_path) $pipename`, env=env_to_use), stderr=stderr), read=true, write=true) + else + open(pipeline(Cmd(`$(cmd) --code-coverage=$(use_code_coverage==0 ? "none" : "user") --startup-file=no --compiled-modules=no --history-file=no --project=$environment_path $server_script $(ssi.store_path) $pipename`, env=env_to_use), stderr=stderr), read=true, write=true) + end + catch err + if stderr_for_client_process !== nothing + showerror(stderr_for_client_process, err) + end + return :failure, stderr_for_client_process + end + + ssi.process = p + + yield() + + if success(p) + # Now we create a new symbol store and load everything into that + # from disc + new_store = recursive_copy(stdlibs) + load_project_packages_into_store!(ssi, environment_path, new_store, _progress_callback) + @debug "SymbolStore: store success" + return :success, new_store + elseif p in ssi.canceled_processes + delete!(ssi.canceled_processes, p) + @debug "SymbolStore: store canceled" + return :canceled, nothing + else + @debug "SymbolStore: store failure" + if currently_loading_a_package + return :package_load_crash, (package_name = current_package_name, stderr = stderr_for_client_process) + else + return :failure, stderr_for_client_process + end + end +end + +function pipe_name() + if Sys.iswindows() + return "\\\\.\\pipe\\vscjlsymserv-$(UUIDs.uuid4())" + end + # Pipe names on unix may only be 92 chars (JuliaLang/julia#43281), and since + # tempdir can be arbitrary long (in particular on macos) we try to keep the name + # within bounds here. + prefix = "vscjlsymserv-" + uuid = string(UUIDs.uuid4()) + pipename = joinpath(tempdir(), prefix * uuid[1:13]) + if length(pipename) >= 92 + # Try to use /tmp and if that fails, hope the long pipe name works anyway + maybe = "/tmp/" * prefix * uuid + try + touch(maybe); rm(maybe) # Check permissions on this path + pipename = maybe + catch + end + end + return pipename +end + +function load_project_packages_into_store!(ssi::SymbolServerInstance, environment_path, store, progress_callback = nothing) + project_filename = isfile(joinpath(environment_path, "JuliaProject.toml")) ? joinpath(environment_path, "JuliaProject.toml") : joinpath(environment_path, "Project.toml") + project = try + Pkg.API.read_project(project_filename) + catch err + @warn "Could not load project." + return + end + + manifest_filename = isfile(joinpath(environment_path, "JuliaManifest.toml")) ? joinpath(environment_path, "JuliaManifest.toml") : joinpath(environment_path, "Manifest.toml") + manifest = read_manifest(manifest_filename) + manifest === nothing && return + uuids = values(deps(project)) + num_uuids = length(values(deps(project))) + t0 = time() + for (i, uuid) in enumerate(uuids) + load_package_from_cache_into_store!(ssi, uuid isa UUID ? uuid : UUID(uuid), environment_path, manifest, store, progress_callback, round(Int, 100 * (i - 1) / num_uuids)) + end + took = round(time() - t0, sigdigits = 2) + progress_callback("Loaded all packages into cache in $(took)s", 100) +end + +""" + load_package_from_cache_into_store!(ssp::SymbolServerInstance, uuid, store) + +Tries to load the on-disc stored cache for a package (uuid). Attempts to generate (and save to disc) a new cache if the file does not exist or is unopenable. +""" +function load_package_from_cache_into_store!(ssi::SymbolServerInstance, uuid::UUID, environment_path, manifest, store, progress_callback = nothing, percentage = missing) + yield() + isinmanifest(manifest, uuid) || return + pe = frommanifest(manifest, uuid) + pe_name = packagename(manifest, uuid) + haskey(store, Symbol(pe_name)) && return + + + # further existence checks needed? + cache_path = joinpath(ssi.store_path, get_cache_path(manifest, uuid)...) + if isfile(cache_path) + t0 = time() + progress_callback("Loading $pe_name from cache...", percentage) + try + package_data = open(cache_path) do io + CacheStore.read(io) + end + + pkg_path = Base.locate_package(Base.PkgId(uuid, pe_name)) + if pkg_path === nothing || !isfile(pkg_path) + pkg_path = get_pkg_path(Base.PkgId(uuid, pe_name), environment_path, ssi.depot_path) + end + if pkg_path !== nothing + modify_dirs(package_data.val, f -> modify_dir(f, r"^PLACEHOLDER", joinpath(pkg_path, "src"))) + end + + store[Symbol(pe_name)] = package_data.val + took = round(time() - t0, sigdigits = 2) + msg = "Done loading $pe_name from cache..." + if took > 0.01 + msg *= " (took $(took)s)" + end + progress_callback(msg, percentage) + t0 = time() + for dep in deps(pe) + load_package_from_cache_into_store!(ssi, packageuuid(dep), environment_path, manifest, store, progress_callback, percentage) + end + catch err + Base.display_error(stderr, err, catch_backtrace()) + @warn "Tried to load $pe_name but failed to load from disc, re-caching." + try + rm(cache_path) + catch err2 + # There could have been a race condition that the file has been deleted in the meantime, + # we don't want to crash then. + err2 isa Base.IOError || rethrow(err2) + end + end + else + @warn "$(pe_name) not stored on disc" + store[Symbol(pe_name)] = ModuleStore(VarRef(nothing, Symbol(pe_name)), Dict{Symbol,Any}(), "$pe_name could not be indexed.", true, Symbol[], Symbol[]) + end +end + +function clear_disc_store(ssi::SymbolServerInstance) + for f in readdir(ssi.store_path) + if occursin(f, "ABCDEFGHIJKLMNOPQRSTUVWXYZ") + rm(joinpath(ssi.store_path, f), recursive = true) + end + end +end + +const stdlibs = load_core() + +function _precompile_() + ccall(:jl_generating_output, Cint, ()) == 1 || return nothing + Base.precompile(Tuple{Type{SymbolServer.DataTypeStore},SymbolServer.FakeTypeName,SymbolServer.FakeTypeName,Array{Any,1},Array{Any,1},Array{Symbol,1},Array{Any,1},String,Bool}) + Base.precompile(Tuple{typeof(SymbolServer.cache_methods),Any,Dict{Symbol,SymbolServer.ModuleStore}}) + Base.precompile(Tuple{typeof(SymbolServer.getenvtree)}) + Base.precompile(Tuple{typeof(SymbolServer.symbols),Dict{Symbol,SymbolServer.ModuleStore}}) + Base.precompile(Tuple{typeof(copy),Base.Broadcast.Broadcasted{Base.Broadcast.Style{Tuple},Nothing,typeof(SymbolServer._parameter),Tuple{NTuple{4,Symbol}}}}) +end +VERSION >= v"1.4.2" && _precompile_() + +end # module diff --git a/src/SymbolServer/faketypes.jl b/src/SymbolServer/faketypes.jl new file mode 100644 index 0000000..077289c --- /dev/null +++ b/src/SymbolServer/faketypes.jl @@ -0,0 +1,179 @@ +########## Fake type-system + + +# Used to label all objects +struct VarRef + parent::Union{VarRef,Nothing} + name::Symbol +end +VarRef(m::Module) = VarRef((parentmodule(m) == Main || parentmodule(m) == m) ? nothing : VarRef(parentmodule(m)), nameof(m)) + +# These mirror Julia types (w/o the Fake prefix) +struct FakeTypeName + name::VarRef + parameters::Vector{Any} +end + +function FakeTypeName(@nospecialize(x); justname=false) + @static if !(Vararg isa Type) + x isa typeof(Vararg) && return FakeTypeofVararg(x) + end + if x isa DataType + xname = x.name + xnamename = xname.name # necessary but unclear why. + if justname + FakeTypeName(VarRef(VarRef(x.name.module), x.name.name), []) + else + # FakeTypeName(VarRef(VarRef(x.name.module), x.name.name), _parameter.(x.parameters)) + ft = FakeTypeName(VarRef(VarRef(x.name.module), x.name.name), []) + for p in x.parameters + push!(ft.parameters, _parameter(p)) + end + ft + end + elseif x isa Union + FakeUnion(x) + elseif x isa UnionAll + FakeUnionAll(x) + elseif x isa TypeVar + FakeTypeVar(x) + elseif x isa Core.TypeofBottom + FakeTypeofBottom() + elseif x isa Module + VarRef(x) + else + error((x, typeof(x))) + end +end + +struct FakeTypeofBottom end +struct FakeUnion + a + b +end +FakeUnion(u::Union) = FakeUnion(FakeTypeName(u.a, justname=true), FakeTypeName(u.b, justname=true)) +struct FakeTypeVar + name::Symbol + lb + ub +end +FakeTypeVar(tv::TypeVar) = FakeTypeVar(tv.name, FakeTypeName(tv.lb, justname=true), FakeTypeName(tv.ub, justname=true)) +struct FakeUnionAll + var::FakeTypeVar + body::Any +end +FakeUnionAll(ua::UnionAll) = FakeUnionAll(FakeTypeVar(ua.var), FakeTypeName(ua.body, justname=true)) + +function _parameter(@nospecialize(p)) + if p isa Union{Int,Symbol,Bool,Char} + p + elseif !(p isa Type) && isbitstype(typeof(p)) + 0 + elseif p isa Tuple + _parameter.(p) + else + FakeTypeName(p, justname=true) + end +end + +Base.show(io::IO, vr::VarRef) = vr.parent === nothing ? print(io, vr.name) : print(io, vr.parent, ".", vr.name) +function Base.show(io::IO, tn::FakeTypeName) + print(io, tn.name) + if !isempty(tn.parameters) + print(io, "{") + for i = 1:length(tn.parameters) + print(io, tn.parameters[i]) + i != length(tn.parameters) && print(io, ",") + end + print(io, "}") + end +end +Base.show(io::IO, x::FakeUnionAll) = print(io, x.body, " where ", x.var) +function Base.show(io::IO, x::FakeUnion; inunion=false) + !inunion && print(io, "Union{") + print(io, x.a, ",") + if x.b isa FakeUnion + Base.show(io, x.b; inunion=true) + else + print(io, x.b, "}") + end +end +function Base.show(io::IO, x::FakeTypeVar) + if isfakebottom(x.lb) + if isfakeany(x.ub) + print(io, x.name) + else + print(io, x.name, "<:", x.ub) + end + elseif isfakeany(x.ub) + print(io, x.lb, "<:", x.name) + else + print(io, x.lb, "<:", x.name, "<:", x.ub) + end +end + +isfakeany(t) = false +isfakeany(t::FakeTypeName) = isfakeany(t.name) +isfakeany(vr::VarRef) = vr.name === :Any && vr.parent isa VarRef && vr.parent.name === :Core && vr.parent.parent === nothing + +isfakebottom(t) = false +isfakebottom(t::FakeTypeofBottom) = true + +Base.:(==)(a::FakeTypeName, b::FakeTypeName) = a.name == b.name && a.parameters == b.parameters +Base.:(==)(a::VarRef, b::VarRef) = a.parent == b.parent && a.name == b.name +Base.:(==)(a::FakeTypeVar, b::FakeTypeVar) = a.lb == b.lb && a.name == b.name && a.ub == b.ub +Base.:(==)(a::FakeUnionAll, b::FakeUnionAll) = a.var == b.var && a.body == b.body +Base.:(==)(a::FakeUnion, b::FakeUnion) = a.a == b.a && a.b == b.b +Base.:(==)(a::FakeTypeofBottom, b::FakeTypeofBottom) = true + +Base.hash(a::FakeTypeName, h::UInt) = hash(a.name, hash(a.parameters, hash(:FakeTypeName, h))) +Base.hash(a::VarRef, h::UInt) = hash(a.name, hash(a.parent, hash(:VarRef, h))) +Base.hash(a::FakeTypeVar, h::UInt) = hash(a.name, hash(a.lb, hash(a.ub, hash(:FakeTypeVar, h)))) +Base.hash(a::FakeUnionAll, h::UInt) = hash(a.var, hash(a.body, hash(:FakeUnionAll, h))) +Base.hash(a::FakeUnion, h::UInt) = hash(a.a, hash(a.b, hash(:FakeUnion, h))) +Base.hash(::FakeTypeofBottom, h::UInt) = hash(:FakeTypeofBottom, h) + +@static if !(Vararg isa Type) + struct FakeTypeofVararg + T + N + FakeTypeofVararg() = new() + FakeTypeofVararg(T) = (new(T)) + FakeTypeofVararg(T, N) = new(T, N) + end + function FakeTypeofVararg(va::typeof(Vararg)) + if isdefined(va, :N) + vaN = va.N isa TypeVar ? FakeTypeVar(va.N) : va.N + FakeTypeofVararg(FakeTypeName(va.T; justname=true), vaN) # This should be FakeTypeName(va.N) but seems to crash inference. + elseif isdefined(va, :T) + FakeTypeofVararg(FakeTypeName(va.T; justname=true)) + else + FakeTypeofVararg() + end + end + function Base.print(io::IO, va::FakeTypeofVararg) + print(io, "Vararg") + if isdefined(va, :T) + print(io, "{", va.T) + if isdefined(va, :N) + print(io, ",", va.N) + end + print(io, "}") + end + end + function Base.:(==)(a::FakeTypeofVararg, b::FakeTypeofVararg) + if isdefined(a, :T) + if isdefined(b, :T) && a.T == b.T + if isdefined(a, :N) + isdefined(b, :N) && a.N == b.N + else + !isdefined(b, :N) + end + else + false + end + else + !isdefined(b, :T) + end + end +end diff --git a/src/SymbolServer/indexbasestdlib.jl b/src/SymbolServer/indexbasestdlib.jl new file mode 100644 index 0000000..c598c05 --- /dev/null +++ b/src/SymbolServer/indexbasestdlib.jl @@ -0,0 +1,56 @@ +module SymbolServer + +using Pkg, SHA +using Base: UUID + +@info "Indexing Julia $VERSION..." + +# This path will always be mounted in the docker container in which we are running +store_path = "/symcache" + +cache_package_folder_path = joinpath(store_path, "v1", "stdlib") + +mkpath(cache_package_folder_path) + +module LoadingBay end + +include("faketypes.jl") +include("symbols.jl") +include("utils.jl") +include("serialize.jl") +using .CacheStore + +# TODO Make this load all the stdlibs and save them + +# m = try +# LoadingBay.eval(:(import $current_package_name)) +# getfield(LoadingBay, current_package_name) +# catch e +# @info "Could not load package, exiting." +# exit(10) +# end + +# # Get the symbols +# env = getenvtree([current_package_name]) +# symbols(env, m) + +# # Strip out paths +# modify_dirs(env[current_package_name], f -> modify_dir(f, pkg_src_dir(Base.loaded_modules[Base.PkgId(current_package_uuid, string(current_package_name))]), "PLACEHOLDER")) + +# # There's an issue here - @enum used within CSTParser seems to add a method that is introduced from Enums.jl... + +# Pkg.PlatformEngines.probe_platform_engines!() + +# mktempdir() do path +# # Write them to a file +# open(joinpath(path, filename_with_extension), "w") do io +# CacheStore.write(io, Package(string(current_package_name), env[current_package_name], current_package_uuid, nothing)) +# end + +# # cp(joinpath(path, filename_with_extension), cache_path) +# Pkg.PlatformEngines.package(path, cache_path_compressed) +# end + +@info "Finished indexing." + +end diff --git a/src/SymbolServer/indexpackage.jl b/src/SymbolServer/indexpackage.jl new file mode 100644 index 0000000..421a0bf --- /dev/null +++ b/src/SymbolServer/indexpackage.jl @@ -0,0 +1,65 @@ +module SymbolServer + +using Pkg, SHA +using Base: UUID + +current_package_name = Symbol(ARGS[1]) +current_package_version = VersionNumber(ARGS[2]) +current_package_uuid = UUID(ARGS[3]) +current_package_treehash = ARGS[4] + +@info "Indexing package $current_package_name $current_package_version..." + +# This path will always be mounted in the docker container in which we are running +store_path = "/symcache" + +current_package_versionwithoutplus = replace(string(current_package_version), '+'=>'_') +filename_with_extension = "v$(current_package_versionwithoutplus)_$current_package_treehash.jstore" + +module LoadingBay end + +try + Pkg.add(name=string(current_package_name), version=current_package_version) +catch err + @info "Could not install package, exiting" + exit(20) +end + +# TODO Make the code below ONLY write a cache file for the package we just added here. +include("faketypes.jl") +include("symbols.jl") +include("utils.jl") +include("serialize.jl") +using .CacheStore + +# Load package +m = try + LoadingBay.eval(:(import $current_package_name)) + getfield(LoadingBay, current_package_name) +catch e + @info "Could not load package, exiting." + exit(10) +end + +# Get the symbols +env = getenvtree([current_package_name]) +symbols(env, m, get_return_type=true) + + # Strip out paths +modify_dirs(env[current_package_name], f -> modify_dir(f, pkg_src_dir(Base.loaded_modules[Base.PkgId(current_package_uuid, string(current_package_name))]), "PLACEHOLDER")) + +# There's an issue here - @enum used within CSTParser seems to add a method that is introduced from Enums.jl... + +# Write them to a file +open(joinpath(store_path, filename_with_extension), "w") do io + CacheStore.write(io, Package(string(current_package_name), env[current_package_name], current_package_uuid, nothing)) +end + +@info "Finished indexing." + +# We are exiting with a custom error code to indicate success. This allows +# the parent process to distinguish between a successful run and one +# where the package exited the process. +exit(37) + +end diff --git a/src/SymbolServer/serialize.jl b/src/SymbolServer/serialize.jl new file mode 100644 index 0000000..5568a9f --- /dev/null +++ b/src/SymbolServer/serialize.jl @@ -0,0 +1,290 @@ +module CacheStore +using ..SymbolServer: VarRef, FakeTypeName, FakeTypeofBottom, FakeTypeVar, FakeUnion, FakeUnionAll +using ..SymbolServer: ModuleStore, Package, FunctionStore, MethodStore, DataTypeStore, GenericStore +@static if !(Vararg isa Type) + using ..SymbolServer: FakeTypeofVararg +end + +const NothingHeader = 0x01 +const SymbolHeader = 0x02 +const CharHeader = 0x03 +const IntegerHeader = 0x04 +const StringHeader = 0x05 +const VarRefHeader = 0x06 +const FakeTypeNameHeader = 0x07 +const FakeTypeofBottomHeader = 0x08 +const FakeTypeVarHeader = 0x09 +const FakeUnionHeader = 0x0a +const FakeUnionAllHeader = 0xb +const ModuleStoreHeader = 0x0c +const MethodStoreHeader = 0x0d +const FunctionStoreHeader = 0x0e +const DataTypeStoreHeader = 0x0f +const GenericStoreHeader = 0x10 +const PackageHeader = 0x11 +const TrueHeader = 0x12 +const FalseHeader = 0x13 +const TupleHeader = 0x14 +const FakeTypeofVarargHeader = 0x15 +const UndefHeader = 0x16 + + +function write(io, x::VarRef) + Base.write(io, VarRefHeader) + write(io, x.parent) + write(io, x.name) +end +function write(io, x::Nothing) + Base.write(io, NothingHeader) +end +function write(io, x::Char) + Base.write(io, CharHeader) + Base.write(io, UInt32(x)) +end +function write(io, x::Bool) + x ? Base.write(io, TrueHeader) : Base.write(io, FalseHeader) +end +function write(io, x::Int) + Base.write(io, IntegerHeader) + Base.write(io, x) +end +function write(io, x::Symbol) + Base.write(io, SymbolHeader) + Base.write(io, sizeof(x)) + Base.write(io, String(x)) +end +function write(io, x::NTuple{N,Any}) where N + Base.write(io, TupleHeader) + Base.write(io, N) + for i = 1:N + write(io, x[i]) + end +end +function write(io, x::String) + Base.write(io, StringHeader) + Base.write(io, sizeof(x)) + Base.write(io, x) +end +function write(io, x::FakeTypeName) + Base.write(io, FakeTypeNameHeader) + write(io, x.name) + write_vector(io, x.parameters) +end +write(io, x::FakeTypeofBottom) = Base.write(io, FakeTypeofBottomHeader) +function write(io, x::FakeTypeVar) + Base.write(io, FakeTypeVarHeader) + write(io, x.name) + write(io, x.lb) + write(io, x.ub) +end +function write(io, x::FakeUnion) + Base.write(io, FakeUnionHeader) + write(io, x.a) + write(io, x.b) +end +function write(io, x::FakeUnionAll) + Base.write(io, FakeUnionAllHeader) + write(io, x.var) + write(io, x.body) +end + +@static if !(Vararg isa Type) + function write(io, x::FakeTypeofVararg) + Base.write(io, FakeTypeofVarargHeader) + isdefined(x, :T) ? write(io, x.T) : Base.write(io, UndefHeader) + isdefined(x, :N) ? write(io, x.N) : Base.write(io, UndefHeader) + end +end + +function write(io, x::MethodStore) + Base.write(io, MethodStoreHeader) + write(io, x.name) + write(io, x.mod) + write(io, x.file) + Base.write(io, x.line) + Base.write(io, length(x.sig)) + for p in x.sig + write(io, p[1]) + write(io, p[2]) + end + write_vector(io, x.kws) + write(io, x.rt) +end + +function write(io, x::FunctionStore) + Base.write(io, FunctionStoreHeader) + write(io, x.name) + write_vector(io, x.methods) + write(io, x.doc) + write(io, x.extends) + write(io, x.exported) +end + +function write(io, x::DataTypeStore) + Base.write(io, DataTypeStoreHeader) + write(io, x.name) + write(io, x.super) + write_vector(io, x.parameters) + write_vector(io, x.types) + write_vector(io, x.fieldnames) + write_vector(io, x.methods) + write(io, x.doc) + write(io, x.exported) +end + +function write(io, x::GenericStore) + Base.write(io, GenericStoreHeader) + write(io, x.name) + write(io, x.typ) + write(io, x.doc) + write(io, x.exported) +end + +function write(io, x::ModuleStore) + Base.write(io, ModuleStoreHeader) + write(io, x.name) + Base.write(io, length(x.vals)) + for p in x.vals + write(io, p[1]) + write(io, p[2]) + end + write(io, x.doc) + write(io, x.exported) + write_vector(io, x.exportednames) + write_vector(io, x.used_modules) +end + +function write(io, x::Package) + Base.write(io, PackageHeader) + write(io, x.name) + write(io, x.val) + Base.write(io, UInt128(x.uuid)) + Base.write(io, x.sha === nothing ? zeros(UInt8, 32) : x.sha) +end + +function write_vector(io, x) + Base.write(io, length(x)) + for p in x + write(io, p) + end +end + +function read(io, t = Base.read(io, UInt8)) + # There are a bunch of `yield`s in potentially expensive code paths. + # One top-level `yield` would probably increase responsiveness in the + # LS, but increases runtime by 3x. This seems like a good compromise. + + if t === VarRefHeader + VarRef(read(io), read(io)) + elseif t === NothingHeader + nothing + elseif t === SymbolHeader + n = Base.read(io, Int) + out = Vector{UInt8}(undef, n) + readbytes!(io, out, n) + Symbol(String(out)) + elseif t === StringHeader + yield() + n = Base.read(io, Int) + out = Vector{UInt8}(undef, n) + readbytes!(io, out, n) + String(out) + elseif t === CharHeader + Char(Base.read(io, UInt32)) + elseif t === IntegerHeader + Base.read(io, Int) + elseif t === FakeTypeNameHeader + FakeTypeName(read(io), read_vector(io, Any)) + elseif t === FakeTypeofBottomHeader + FakeTypeofBottom() + elseif t === FakeTypeVarHeader + FakeTypeVar(read(io), read(io), read(io)) + elseif t === FakeUnionHeader + FakeUnion(read(io), read(io)) + elseif t === FakeUnionAllHeader + FakeUnionAll(read(io), read(io)) + elseif t === FakeTypeofVarargHeader + T, N = read(io), read(io) + if T === nothing + FakeTypeofVararg() + elseif N === nothing + FakeTypeofVararg(T) + else + FakeTypeofVararg(T, N) + end + elseif t === UndefHeader + nothing + elseif t === MethodStoreHeader + yield() + name = read(io) + mod = read(io) + file = read(io) + line = Base.read(io, UInt32) + nsig = Base.read(io, Int) + sig = Vector{Pair{Any, Any}}(undef, nsig) + for i in 1:nsig + sig[i] = read(io) => read(io) + end + kws = read_vector(io, Symbol) + rt = read(io) + MethodStore(name, mod, file, line, sig, kws, rt) + elseif t === FunctionStoreHeader + yield() + FunctionStore(read(io), read_vector(io, MethodStore), read(io), read(io), read(io)) + elseif t === DataTypeStoreHeader + yield() + DataTypeStore(read(io), read(io), read_vector(io, Any), read_vector(io, Any), read_vector(io, Any), read_vector(io, MethodStore), read(io), read(io)) + elseif t === GenericStoreHeader + yield() + GenericStore(read(io), read(io), read(io), read(io)) + elseif t === ModuleStoreHeader + yield() + name = read(io) + n = Base.read(io, Int) + vals = Dict{Symbol,Any}() + sizehint!(vals, n) + for _ = 1:n + k = read(io) + v = read(io) + vals[k] = v + end + doc = read(io) + exported = read(io) + exportednames = read_vector(io, Symbol) + used_modules = read_vector(io, Symbol) + ModuleStore(name, vals, doc, exported, exportednames, used_modules) + elseif t === TrueHeader + true + elseif t === FalseHeader + false + elseif t === TupleHeader + N = Base.read(io, Int) + ntuple(i->read(io), N) + elseif t === PackageHeader + yield() + name = read(io) + val = read(io) + uuid = Base.UUID(Base.read(io, UInt128)) + sha = Base.read(io, 32) + Package(name, val, uuid, all(x == 0x00 for x in sha) ? nothing : sha) + else + error("Unknown type: $t") + end +end + +function read_vector(io, T) + n = Base.read(io, Int) + v = Vector{T}(undef, n) + for i in 1:n + v[i] = read(io) + end + v +end + +function storeunstore(x) + io = IOBuffer() + write(io, x) + bs = take!(io) + read(IOBuffer(bs)) +end +end diff --git a/src/SymbolServer/server.jl b/src/SymbolServer/server.jl new file mode 100644 index 0000000..d892b1f --- /dev/null +++ b/src/SymbolServer/server.jl @@ -0,0 +1,138 @@ +module SymbolServer + +!in("@stdlib", LOAD_PATH) && push!(LOAD_PATH, "@stdlib") # Make sure we can load stdlibs + +import Sockets +pipename = length(ARGS) > 1 ? ARGS[2] : nothing +conn = pipename !== nothing ? Sockets.connect(pipename) : nothing + +start_time = time_ns() + +# Try to lower the priority of this process so that it doesn't block the +# user system. +@static if Sys.iswindows() + # Get process handle + p_handle = ccall(:GetCurrentProcess, stdcall, Ptr{Cvoid}, ()) + + # Set BELOW_NORMAL_PRIORITY_CLASS + ret = ccall(:SetPriorityClass, stdcall, Cint, (Ptr{Cvoid}, Culong), p_handle, 0x00004000) + ret != 1 && @warn "Something went wrong when setting BELOW_NORMAL_PRIORITY_CLASS." +else + ret = ccall(:nice, Cint, (Cint,), 1) + # We don't check the return value because it doesn't really matter +end + +module LoadingBay +end + +using Pkg, SHA +using Base: UUID + +include("faketypes.jl") +include("symbols.jl") +include("utils.jl") +include("serialize.jl") +using .CacheStore + +store_path = length(ARGS) > 0 ? ARGS[1] : abspath(joinpath(@__DIR__, "..", "store")) + +ctx = try + Pkg.Types.Context() +catch err + @info "Package environment can't be read." + exit() +end +# Add some methods to check whether a package is part of the standard library and so +# won't need recaching. +if isdefined(Pkg.Types, :is_stdlib) + is_stdlib(uuid::UUID) = Pkg.Types.is_stdlib(uuid) +else + is_stdlib(uuid::UUID) = uuid in keys(ctx.stdlibs) +end + +server = Server(store_path, ctx, Dict{UUID,Package}()) + +written_caches = String[] # List of caches that have already been written +toplevel_pkgs = deps(project(ctx)) # First get a list of all package UUIds that we want to cache +packages_to_load = [] + +# Obtain the directory containing the active Manifest.toml. Any 'develop'ed dependencies +# will contain a path that is relative to this directory. +manifest_dir = dirname(ctx.env.manifest_file) + +# Next make sure the cache is up-to-date for all of these. +for (pk_name, uuid) in toplevel_pkgs + uuid isa UUID || (uuid = UUID(uuid)) + if !isinmanifest(ctx, uuid) + @info "$pk_name not in manifest, skipping." + continue + end + pe = frommanifest(manifest(ctx), uuid) + cache_path = joinpath(server.storedir, SymbolServer.get_cache_path(manifest(ctx), uuid)...) + + if isfile(cache_path) + if is_package_deved(manifest(ctx), uuid) + try + cached_version = open(cache_path) do io + CacheStore.read(io) + end + if sha_pkg(manifest_dir, frommanifest(manifest(ctx), uuid)) != cached_version.sha + @info "Outdated sha, will recache package $pk_name ($uuid)" + push!(packages_to_load, uuid) + else + @info "Package $pk_name ($uuid) is cached." + end + catch err + @info "Couldn't load $pk_name ($uuid) from file, will recache." + end + else + @info "Package $pk_name ($uuid) is cached." + end + else + @info "Will cache package $pk_name ($uuid)" + push!(packages_to_load, uuid) + end +end + +# Load all packages together +# This is important, or methods added to functions in other packages that are loaded earlier would not be in the cache +for (i, uuid) in enumerate(packages_to_load) + load_package(ctx, uuid, conn, LoadingBay, round(Int, 100*(i - 1)/length(packages_to_load))) +end + +# Create image of whole package env. This creates the module structure only. +env_symbols = getenvtree() + +# Populate the above with symbols, skipping modules that don't need caching. +# symbols (env_symbols) +visited = Base.IdSet{Module}([Base, Core]) + +for (pid, m) in Base.loaded_modules + if pid.uuid !== nothing && is_stdlib(pid.uuid) && + isinmanifest(ctx, pid.uuid) && + isfile(joinpath(server.storedir, SymbolServer.get_cache_path(manifest(ctx), pid.uuid)...)) + push!(visited, m) + delete!(env_symbols, Symbol(pid.name)) + end +end + +symbols(env_symbols, nothing, getallns(), visited) + +# Wrap the `ModuleStore`s as `Package`s. +for (pkg_name, cache) in env_symbols + !isinmanifest(ctx, String(pkg_name)) && continue + uuid = packageuuid(ctx, String(pkg_name)) + pe = frommanifest(ctx, uuid) + server.depot[uuid] = Package(String(pkg_name), cache, uuid, sha_pkg(manifest_dir, pe)) +end + +write_depot(server, server.context, written_caches) + +@info "Symbol server indexing took $((time_ns() - start_time) / 1e9) seconds." + +if conn !== nothing + println(conn, "DONE") + close(conn) +end + +end diff --git a/src/SymbolServer/symbols.jl b/src/SymbolServer/symbols.jl new file mode 100644 index 0000000..b0c9169 --- /dev/null +++ b/src/SymbolServer/symbols.jl @@ -0,0 +1,689 @@ +using LibGit2, InteractiveUtils + +mutable struct Server + storedir::String + context::Pkg.Types.Context + depot::Dict +end + +abstract type SymStore end +struct ModuleStore <: SymStore + name::VarRef + vals::Dict{Symbol,Any} + doc::String + exported::Bool + exportednames::Vector{Symbol} + used_modules::Vector{Symbol} +end + +ModuleStore(m) = ModuleStore(VarRef(m), Dict{Symbol,Any}(), _doc(m, nameof(m)), true, unsorted_names(m), Symbol[]) +Base.getindex(m::ModuleStore, k) = m.vals[k] +Base.setindex!(m::ModuleStore, v, k) = (m.vals[k] = v) +Base.haskey(m::ModuleStore, k) = haskey(m.vals, k) + +Base.show(io::IO, ms::ModuleStore) = print(io, "ModuleStore($(ms.name)) with $(length(ms.vals)) entries") + +const EnvStore = Dict{Symbol,ModuleStore} + +struct Package + name::String + val::ModuleStore + uuid::Base.UUID + sha::Union{Vector{UInt8},Nothing} +end +Package(name::String, val::ModuleStore, uuid::String, sha) = Package(name, val, Base.UUID(uuid), sha) + +struct MethodStore + name::Symbol + mod::Symbol + file::String + line::Int32 + sig::Vector{Pair{Any,Any}} + kws::Vector{Symbol} + rt::Any +end + +function Base.show(io::IO, ms::MethodStore) + print(io, ms.mod, ".", ms.name, "(") + for (a,b) in ms.sig + print(io, a, "::", b) + end + print(io, ") at ", ms.file, ":", ms.line, ) +end + +struct DataTypeStore <: SymStore + name::FakeTypeName + super::FakeTypeName + parameters::Vector{Any} + types::Vector{Any} + fieldnames::Vector{Any} + methods::Vector{MethodStore} + doc::String + exported::Bool + function DataTypeStore(names, super, parameters, fieldtypes, fieldnames, methods, doc, exported) + if length(fieldtypes) < length(fieldnames) + append!(fieldtypes, [Any for _ in 1:(length(fieldnames)-length(fieldtypes))]) + end + new(names, super, parameters, fieldtypes, fieldnames, methods, doc, exported) + end +end + +function DataTypeStore(@nospecialize(t), symbol, parent_mod, exported) + ur_t = Base.unwrap_unionall(t) + parameters = if isdefined(ur_t, :parameters) + map(ur_t.parameters) do p + _parameter(p) + end + else + [] + end + types = if isdefined(ur_t, :types) + map(ur_t.types) do p + FakeTypeName(p) + end + else + [] + end + DataTypeStore(FakeTypeName(ur_t), FakeTypeName(ur_t.super), parameters, types, isconcretetype(ur_t) && fieldcount(ur_t) > 0 ? collect(fieldnames(ur_t)) : Symbol[], MethodStore[], _doc(parent_mod, symbol), exported) +end + +function Base.show(io::IO, dts::DataTypeStore) + print(io, dts.name, " <: ", dts.super, " with $(length(dts.methods)) methods") +end + +struct FunctionStore <: SymStore + name::VarRef + methods::Vector{MethodStore} + doc::String + extends::VarRef + exported::Bool +end + +function FunctionStore(@nospecialize(f), symbol, parent_mod, exported) + if f isa Core.IntrinsicFunction + FunctionStore(VarRef(VarRef(Core.Intrinsics), nameof(f)), MethodStore[], _doc(parent_mod, symbol), VarRef(VarRef(parentmodule(f)), nameof(f)), exported) + else + FunctionStore(VarRef(VarRef(parent_mod), nameof(f)), MethodStore[], _doc(parent_mod, symbol), VarRef(VarRef(parentmodule(f)), nameof(f)), exported) + end +end + +function Base.show(io::IO, fs::FunctionStore) + print(io, fs.name, " with $(length(fs.methods)) methods") +end + +struct GenericStore <: SymStore + name::VarRef + typ::Any + doc::String + exported::Bool +end + +# adapted from https://github.com/timholy/CodeTracking.jl/blob/afc73a957f5034cc7f02e084a91283c47882f92b/src/utils.jl#L87-L122 + +""" + path = maybe_fix_path(path) + +Return a normalized, absolute path for a source file `path`. +""" +function maybe_fix_path(file) + if !isabspath(file) + # This may be a Base or Core method + newfile = Base.find_source_file(file) + if isa(newfile, AbstractString) + file = normpath(newfile) + end + end + return maybe_fixup_stdlib_path(file) +end + +safe_isfile(x) = try isfile(x); catch; false end +const BUILDBOT_STDLIB_PATH = dirname(abspath(joinpath(String((@which versioninfo()).file), "..", "..", ".."))) +replace_buildbot_stdlibpath(str::String) = replace(str, BUILDBOT_STDLIB_PATH => Sys.STDLIB) +""" + path = maybe_fixup_stdlib_path(path::String) + +Return `path` corrected for julia issue [#26314](https://github.com/JuliaLang/julia/issues/26314) if applicable. +Otherwise, return the input `path` unchanged. + +Due to the issue mentioned above, location info for methods defined one of Julia's standard libraries +are, for non source Julia builds, given as absolute paths on the worker that built the `julia` executable. +This function corrects such a path to instead refer to the local path on the users drive. +""" +function maybe_fixup_stdlib_path(path) + if !safe_isfile(path) + maybe_stdlib_path = replace_buildbot_stdlibpath(path) + safe_isfile(maybe_stdlib_path) && return maybe_stdlib_path + end + return path +end + +_default_world_age() = + if isdefined(Base, :get_world_counter) + Base.get_world_counter() + else + typemax(UInt) + end + +const _global_method_cache = IdDict{Any,Vector{Any}}() +function methodinfo(@nospecialize(f); types = Tuple, world = _default_world_age()) + key = (f, types, world) + if haskey(_global_method_cache, key) + return _global_method_cache[key] + else + ms = Base._methods(f, types, -1, world) + ms isa Vector || (ms = []) + _global_method_cache[key] = ms + return ms + end +end + +function methodlist(@nospecialize(f)) + ms = methodinfo(f) + Method[x[3]::Method for x in ms] +end + +function sparam_syms(meth::Method) + s = Symbol[] + sig = meth.sig + while sig isa UnionAll + push!(s, Symbol(sig.var.name)) + sig = sig.body + end + return s +end + +function cache_methods(@nospecialize(f), name, env, get_return_type) + if isa(f, Core.Builtin) + return MethodStore[] + end + types = Tuple + world = _default_world_age() + ms = Tuple{Module,MethodStore}[] + methods0 = try + methodinfo(f; types = types, world = world) + catch err + @debug "Error in method lookup for $f" ex=(err, catch_backtrace()) + return ms + end + ind_of_method_w_kws = Int[] # stores the index of methods with kws. + i = 1 + for m in methods0 + # Get inferred method return type + if get_return_type + sparams = Core.svec(sparam_syms(m[3])...) + rt = try + @static if isdefined(Core.Compiler, :NativeInterpreter) + Core.Compiler.typeinf_type(Core.Compiler.NativeInterpreter(), m[3], m[3].sig, sparams) + else + Core.Compiler.typeinf_type(m[3], m[3].sig, sparams, Core.Compiler.Params(world)) + end + catch e + Any + end + else + rt = Any + end + file = maybe_fix_path(String(m[3].file)) + MS = MethodStore(m[3].name, nameof(m[3].module), file, m[3].line, [], Symbol[], FakeTypeName(rt)) + # Get signature + sig = Base.unwrap_unionall(m[1]) + argnames = getargnames(m[3]) + for i = 2:m[3].nargs + push!(MS.sig, argnames[i] => FakeTypeName(sig.parameters[i])) + end + kws = getkws(m[3]) + if !isempty(kws) + push!(ind_of_method_w_kws, i) + end + for kw in kws + push!(MS.kws, kw) + end + push!(ms, (m[3].module, MS)) + i += 1 + end + + # Go back and add kws to methods defined in the same place as others with kws. + for i in ind_of_method_w_kws + for mj in ms + if mj[2].file == ms[i][2].file && mj[2].line == ms[i][2].line && isempty(mj[2].kws) + for kw in ms[i][2].kws + push!(mj[2].kws, kw) + end + end + end + end + + func_vr = VarRef(VarRef(parentmodule(f)), name) + for m in ms + mvr = VarRef(m[1]) + modstore = _lookup(mvr, env) + modstore === nothing && continue + + if !haskey(modstore, name) + modstore[name] = FunctionStore(VarRef(mvr, name), MethodStore[m[2]], "", func_vr, false) + elseif !(modstore[name] isa DataTypeStore || modstore[name] isa FunctionStore) + modstore[name] = FunctionStore(VarRef(mvr, name), MethodStore[m[2]], "", func_vr, false) + else + if !(m[2] in modstore[name].methods) + push!(modstore[name].methods, m[2]) + end + end + end + return ms +end + +getargnames(m::Method) = Base.method_argnames(m) +@static if length(first(methods(Base.kwarg_decl)).sig.parameters) == 2 + getkws = Base.kwarg_decl +else + function getkws(m::Method) + sig = Base.unwrap_unionall(m.sig) + length(sig.parameters) == 0 && return [] + sig.parameters[1] isa Union && return [] + !isdefined(Base.unwrap_unionall(sig.parameters[1]), :name) && return [] + fname = Base.unwrap_unionall(sig.parameters[1]).name + if isdefined(fname.mt, :kwsorter) + Base.kwarg_decl(m, typeof(fname.mt.kwsorter)) + else + [] + end + end +end + +function apply_to_everything(f, m = nothing, visited = Base.IdSet{Module}()) + if m isa Module + push!(visited, m) + for s in unsorted_names(m, all = true, imported = true, usings = true) + (!isdefined(m, s) || s == nameof(m)) && continue + x = getfield(m, s) + f(x) + if x isa Module && !in(x, visited) + apply_to_everything(f, x, visited) + end + end + else + for m in Base.loaded_modules_array() + in(m, visited) || apply_to_everything(f, m, visited) + end + end +end + + + +function oneverything(f, m = nothing, visited = Base.IdSet{Module}()) + if m isa Module + push!(visited, m) + state = nothing + for s in unsorted_names(m, all = true, imported = true, usings = true) + !isdefined(m, s) && continue + x = getfield(m, s) + state = f(m, s, x, state) + if x isa Module && !in(x, visited) + oneverything(f, x, visited) + end + end + else + for m in Base.loaded_modules_array() + in(m, visited) || oneverything(f, m, visited) + end + end +end + +const _global_symbol_cache_by_mod = IdDict{Module,Base.IdSet{Symbol}}() +function build_namecache(m, s, @nospecialize(x), state::Union{Base.IdSet{Symbol},Nothing} = nothing) + if state === nothing + state = get(_global_symbol_cache_by_mod, m, nothing) + if state === nothing + state = _global_symbol_cache_by_mod[m] = Base.IdSet{Symbol}() + end + end + push!(state, s) +end + +function getnames(m::Module) + cache = get(_global_symbol_cache_by_mod, m, nothing) + if cache === nothing + oneverything(build_namecache, m) + cache = _global_symbol_cache_by_mod[m] + end + return cache +end + +function allmodulenames() + symbols = Base.IdSet{Symbol}() + oneverything((m, s, x, state) -> (x isa Module && push!(symbols, s); return state)) + return symbols +end + +function allthingswithmethods() + symbols = Base.IdSet{Any}() + oneverything(function (m, s, x, state) + if !Base.isvarargtype(x) && !isempty(methodlist(x)) + push!(symbols, x) + end + return state + end) + return symbols +end + +function allmethods() + ms = Method[] + oneverything(function (m, s, x, state) + if !Base.isvarargtype(x) && !isempty(methodlist(x)) + append!(ms, methodlist(x)) + end + return state + end) + return ms +end + +usedby(outer, inner) = outer !== inner && isdefined(outer, nameof(inner)) && getproperty(outer, nameof(inner)) === inner && all(isdefined(outer, name) || !isdefined(inner, name) for name in unsorted_names(inner)) +istoplevelmodule(m) = parentmodule(m) === m || parentmodule(m) === Main + +function getmoduletree(m::Module, amn, visited = Base.IdSet{Module}()) + push!(visited, m) + cache = ModuleStore(m) + for s in unsorted_names(m, all = true, imported = true, usings = true) + !isdefined(m, s) && continue + x = getfield(m, s) + if x isa Module + if istoplevelmodule(x) + cache[s] = VarRef(x) + elseif m === parentmodule(x) + cache[s] = getmoduletree(x, amn, visited) + else + cache[s] = VarRef(x) + end + end + end + for n in amn + if n !== nameof(m) && isdefined(m, n) + x = getfield(m, n) + if x isa Module + if !haskey(cache, n) + cache[n] = VarRef(x) + end + if x !== Main && usedby(m, x) + push!(cache.used_modules, n) + end + end + end + end + cache +end + +function getenvtree(names = nothing) + amn = allmodulenames() + EnvStore(nameof(m) => getmoduletree(m, amn) for m in Base.loaded_modules_array() if names === nothing || nameof(m) in names) +end + +# faster and more correct split_module_names +all_names(m) = all_names(m, x -> isdefined(m, x)) +function all_names(m, pred, symbols = Set(Symbol[]), seen = Set(Module[])) + push!(seen, m) + ns = unsorted_names(m; all = true, imported = false, usings = false) + for n in ns + isdefined(m, n) || continue + Base.isdeprecated(m, n) && continue + val = getfield(m, n) + if val isa Module && !(val in seen) + all_names(val, pred, symbols, seen) + end + if pred(n) + push!(symbols, n) + end + end + symbols +end + +# On 1.12, names() includes bindings from Core in Base even not requested, +# so we filter those out below. This could also be a version check, but doing it +# this way should be more robust +const CORE_BASE_NAMES_CONFUSION = :Bool in names(Base) + +function symbols(env::EnvStore, m::Union{Module,Nothing} = nothing, allnames::Base.IdSet{Symbol} = getallns(), visited = Base.IdSet{Module}(); get_return_type = false) + if m isa Module + cache = _lookup(VarRef(m), env, true) + cache === nothing && return + push!(visited, m) + ns = all_names(m) + for s in ns + !isdefined(m, s) && continue + x = getfield(m, s) + + if CORE_BASE_NAMES_CONFUSION && m === Base && isdefined(Core, s) && getfield(Core, s) === x + continue + end + + if Base.unwrap_unionall(x) isa DataType # Unions aren't handled here. + if parentmodule(x) === m + cache[s] = DataTypeStore(x, s, m, s in getnames(m)) + cache_methods(x, s, env, get_return_type) + elseif nameof(x) !== s + # This needs some finessing. + cache[s] = DataTypeStore(x, s, m, s in getnames(m)) + ms = cache_methods(x, s, env, get_return_type) + # A slightly difficult case. `s` is probably a shadow binding of `x` but we should store the methods nonetheless. + # Example: DataFrames.Not points to InvertedIndices.InvertedIndex + for m in ms + push!(cache[s].methods, m[2]) + end + else + # These are imported variables that are reexported. + cache[s] = VarRef(VarRef(parentmodule(x)), nameof(x)) + end + elseif x isa Function + if parentmodule(x) === m || (x isa Core.IntrinsicFunction && m === Core.Intrinsics) + cache[s] = FunctionStore(x, s, m, s in getnames(m)) + cache_methods(x, s, env, get_return_type) + elseif !haskey(cache, s) + # This will be replaced at a later point by a FunctionStore if methods for `x` are defined within `m`. + if x isa Core.IntrinsicFunction + cache[s] = VarRef(VarRef(Core.Intrinsics), nameof(x)) + else + cache[s] = VarRef(VarRef(parentmodule(x)), nameof(x)) + end + elseif !((cache[s] isa FunctionStore || cache[s] isa DataTypeStore) && !isempty(cache[s].methods)) + # These are imported variables that are reexported. + # We don't want to remove Func/DT stores that have methods (these will be specific to the module) + if x isa Core.IntrinsicFunction + cache[s] = VarRef(VarRef(Core.Intrinsics), nameof(x)) + else + cache[s] = VarRef(VarRef(parentmodule(x)), nameof(x)) + end + end + elseif x isa Module + if x === m + cache[s] = VarRef(x) + elseif parentmodule(x) === m + symbols(env, x, allnames, visited, get_return_type = get_return_type) + else + cache[s] = VarRef(x) + end + else + cache[s] = GenericStore(VarRef(VarRef(m), s), FakeTypeName(typeof(x)), _doc(m, s), s in getnames(m)) + end + end + else + for m in Base.loaded_modules_array() + in(m, visited) || symbols(env, m, allnames, visited, get_return_type = get_return_type) + end + end +end + + +function load_core(; get_return_type = false) + c = Pkg.Types.Context() + cache = getenvtree([:Core,:Base]) + symbols(cache, get_return_type = get_return_type) + cache[:Main] = ModuleStore(VarRef(nothing, :Main), Dict(), "", true, [], []) + + # This is wrong. Every module contains it's own include function. + push!(cache[:Base].exportednames, :include) + let f = cache[:Base][:include] + if haskey(cache[:Base][:MainInclude], :include) + cache[:Base][:include] = FunctionStore(f.name, cache[:Base][:MainInclude][:include].methods, f.doc, f.extends, true) + else + m1 = first(f.methods) + push!(f.methods, MethodStore( + m1.name, + m1.mod, + m1.file, + m1.line, + Pair{Any,Any}[ + :x => SymbolServer.FakeTypeName(SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Core), :AbstractString), Any[]) + ], + [], + m1.rt + )) + end + end + + cache[:Base][Symbol("@.")] = cache[:Base][Symbol("@__dot__")] + cache[:Core][:Main] = GenericStore(VarRef(nothing, :Main), FakeTypeName(Module), _doc(Main, :Main), true) + # Add built-ins + builtins = Symbol[nameof(getfield(Core, n).instance) for n in unsorted_names(Core, all = true) if isdefined(Core, n) && getfield(Core, n) isa DataType && isdefined(getfield(Core, n), :instance) && getfield(Core, n).instance isa Core.Builtin] + cnames = unsorted_names(Core) + for f in builtins + if !haskey(cache[:Core], f) + cache[:Core][f] = FunctionStore(getfield(Core, Symbol(f)), Symbol(f), Core, Symbol(f) in cnames) + end + end + haskey(cache[:Core], :_typevar) && push!(cache[:Core][:_typevar].methods, MethodStore(:_typevar, :Core, "built-in", 0, [:n => FakeTypeName(Symbol), :lb => FakeTypeName(Any), :ub => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:_apply].methods, MethodStore(:_apply, :Core, "built-in", 0, [:f => FakeTypeName(Function), :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + haskey(cache[:Core].vals, :_apply_iterate) && push!(cache[:Core][:_apply_iterate].methods, MethodStore(:_apply_iterate, :Core, "built-in", 0, [:f => FakeTypeName(Function), :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + if isdefined(Core, :_call_latest) + push!(cache[:Core][:_call_latest].methods, MethodStore(:_call_latest, :Core, "built-in", 0, [:f => FakeTypeName(Function), :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:_call_in_world].methods, MethodStore(:_call_in_world, :Core, "built-in", 0, [:world => FakeTypeName(UInt), :f => FakeTypeName(Function), :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + else + if isdefined(Core, :_apply_in_world) + push!(cache[:Core][:_apply_in_world].methods, MethodStore(:_apply_in_world, :Core, "built-in", 0, [:world => FakeTypeName(UInt), :f => FakeTypeName(Function), :args => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + end + push!(cache[:Core][:_apply_latest].methods, MethodStore(:_apply_latest, :Core, "built-in", 0, [:f => FakeTypeName(Function), :args => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + end + push!(cache[:Core][:_apply_pure].methods, MethodStore(:_apply_pure, :Core, "built-in", 0, [:f => FakeTypeName(Function), :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:_expr].methods, MethodStore(:_expr, :Core, "built-in", 0, [:head => FakeTypeName(Symbol), :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Expr))) + haskey(cache[:Core].vals, :_typevar) && push!(cache[:Core][:_typevar].methods, MethodStore(:_typevar, :Core, "built-in", 0, [:name => FakeTypeName(Symbol), :lb => FakeTypeName(Any), :ub => FakeTypeName(Any)], Symbol[], FakeTypeName(TypeVar))) + push!(cache[:Core][:applicable].methods, MethodStore(:applicable, :Core, "built-in", 0, [:f => FakeTypeName(Function), :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Bool))) + push!(cache[:Core][:apply_type].methods, MethodStore(:apply_type, :Core, "built-in", 0, [:T => FakeTypeName(UnionAll), :types => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(UnionAll))) + push!(cache[:Core][:arrayref].methods, MethodStore(:arrayref, :Core, "built-in", 0, [:a => FakeTypeName(Any), :b => FakeTypeName(Any), :c => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:arrayset].methods, MethodStore(:arrayset, :Core, "built-in", 0, [:a => FakeTypeName(Any), :b => FakeTypeName(Any), :c => FakeTypeName(Any), :d => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:arraysize].methods, MethodStore(:arraysize, :Core, "built-in", 0, [:a => FakeTypeName(Array), :i => FakeTypeName(Int)], Symbol[], FakeTypeName(Int))) + haskey(cache[:Core], :const_arrayref) && push!(cache[:Core][:const_arrayref].methods, MethodStore(:const_arrayref, :Core, "built-in", 0, [:args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:fieldtype].methods, MethodStore(:fieldtype, :Core, "built-in", 0, [:t => FakeTypeName(DataType), :field => FakeTypeName(Symbol)], Symbol[], FakeTypeName(Type{T} where T))) + push!(cache[:Core][:getfield].methods, MethodStore(:setfield, :Core, "built-in", 0, [:object => FakeTypeName(Any), :item => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:ifelse].methods, MethodStore(:ifelse, :Core, "built-in", 0, [:condition => FakeTypeName(Bool), :x => FakeTypeName(Any), :y => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:invoke].methods, MethodStore(:invoke, :Core, "built-in", 0, [:f => FakeTypeName(Function), :x => FakeTypeName(Any), :argtypes => FakeTypeName(Type{T} where T) , :args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:isa].methods, MethodStore(:isa, :Core, "built-in", 0, [:a => FakeTypeName(Any), :T => FakeTypeName(Type{T} where T)], Symbol[], FakeTypeName(Bool))) + push!(cache[:Core][:isdefined].methods, MethodStore(:getproperty, :Core, "built-in", 0, [:value => FakeTypeName(Any), :field => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:nfields].methods, MethodStore(:nfields, :Core, "built-in", 0, [:x => FakeTypeName(Any)], Symbol[], FakeTypeName(Int))) + push!(cache[:Core][:setfield!].methods, MethodStore(:setfield!, :Core, "built-in", 0, [:value => FakeTypeName(Any), :name => FakeTypeName(Symbol), :x => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:sizeof].methods, MethodStore(:sizeof, :Core, "built-in", 0, [:obj => FakeTypeName(Any)], Symbol[], FakeTypeName(Int))) + push!(cache[:Core][:svec].methods, MethodStore(:svec, :Core, "built-in", 0, [:args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:throw].methods, MethodStore(:throw, :Core, "built-in", 0, [:e => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:tuple].methods, MethodStore(:tuple, :Core, "built-in", 0, [:args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:typeassert].methods, MethodStore(:typeassert, :Core, "built-in", 0, [:x => FakeTypeName(Any), :T => FakeTypeName(Type{T} where T)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:typeof].methods, MethodStore(:typeof, :Core, "built-in", 0, [:x => FakeTypeName(Any)], Symbol[], FakeTypeName(Type{T} where T))) + + push!(cache[:Core][:getproperty].methods, MethodStore(:getproperty, :Core, "built-in", 0, [:value => FakeTypeName(Any), :name => FakeTypeName(Symbol)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:setproperty!].methods, MethodStore(:setproperty!, :Core, "built-in", 0, [:value => FakeTypeName(Any), :name => FakeTypeName(Symbol), :x => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:setproperty!].methods, MethodStore(:setproperty!, :Core, "built-in", 0, [:value => FakeTypeName(Any), :name => FakeTypeName(Symbol), :x => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + haskey(cache[:Core], :_abstracttype) && push!(cache[:Core][:_abstracttype].methods, MethodStore(:_abstracttype, :Core, "built-in", 0, [:m => FakeTypeName(Module), :x => FakeTypeName(Symbol), :p => FakeTypeName(Core.SimpleVector)], Symbol[], FakeTypeName(Any))) + haskey(cache[:Core], :_primitivetype) && push!(cache[:Core][:_primitivetype].methods, MethodStore(:_primitivetype, :Core, "built-in", 0, [:m => FakeTypeName(Module), :x => FakeTypeName(Symbol), :p => FakeTypeName(Core.SimpleVector), :n => FakeTypeName(Core.Int)], Symbol[], FakeTypeName(Any))) + haskey(cache[:Core], :_equiv_typedef) && push!(cache[:Core][:_equiv_typedef].methods, MethodStore(:_equiv_typedef, :Core, "built-in", 0, [:a => FakeTypeName(Any), :b => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + haskey(cache[:Core], :_setsuper!) && push!(cache[:Core][:_setsuper!].methods, MethodStore(:_setsuper!, :Core, "built-in", 0, [:a => FakeTypeName(Any), :b => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + haskey(cache[:Core], :_structtype) && push!(cache[:Core][:_structtype].methods, MethodStore(:_structtype, :Core, "built-in", 0, [:m => FakeTypeName(Module), :x => FakeTypeName(Symbol), :p => FakeTypeName(Core.SimpleVector), :fields => FakeTypeName(Core.SimpleVector), :mut => FakeTypeName(Bool), :z => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + haskey(cache[:Core], :_typebody) && push!(cache[:Core][:_typebody!].methods, MethodStore(:_typebody!, :Core, "built-in", 0, [:a => FakeTypeName(Any), :b => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:(===)].methods, MethodStore(:(===), :Core, "built-in", 0, [:a => FakeTypeName(Any), :b => FakeTypeName(Any)], Symbol[], FakeTypeName(Any))) + push!(cache[:Core][:(<:)].methods, MethodStore(:(<:), :Core, "built-in", 0, [:a => FakeTypeName(Type{T} where T), :b => FakeTypeName(Type{T} where T)], Symbol[], FakeTypeName(Any))) + # Add unspecified methods for Intrinsics, working out the actual methods will need to be done by hand? + for n in names(Core.Intrinsics) + if getfield(Core.Intrinsics, n) isa Core.IntrinsicFunction + push!(cache[:Core][:Intrinsics][n].methods, MethodStore(n, :Intrinsics, "built-in", 0, [:args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + :args => FakeTypeName(Vararg{Any}) + end + end + + for bi in builtins + if haskey(cache[:Core], bi) && isempty(cache[:Core][bi].methods) + # Add at least one arbitrary method for anything left over + push!(cache[:Core][bi].methods, MethodStore(bi, :none, "built-in", 0, [:x => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any))) + end + end + + cache[:Core][:ccall] = FunctionStore(VarRef(VarRef(Core), :ccall), + MethodStore[ + MethodStore(:ccall, :Core, "built-in", 0, [:args => FakeTypeName(Vararg{Any})], Symbol[], FakeTypeName(Any)) # General method - should be fixed + ], + "`ccall((function_name, library), returntype, (argtype1, ...), argvalue1, ...)`\n`ccall(function_name, returntype, (argtype1, ...), argvalue1, ...)`\n`ccall(function_pointer, returntype, (argtype1, ...), argvalue1, ...)`\n\nCall a function in a C-exported shared library, specified by the tuple (`function_name`, `library`), where each component is either a string or symbol. Instead of specifying a library, one\ncan also use a `function_name` symbol or string, which is resolved in the current process. Alternatively, `ccall` may also be used to call a function pointer `function_pointer`, such as one\nreturned by `dlsym`.\n\nNote that the argument type tuple must be a literal tuple, and not a tuple-valued variable or expression.\n\nEach `argvalue` to the `ccall` will be converted to the corresponding `argtype`, by automatic insertion of calls to `unsafe_convert(argtype, cconvert(argtype, argvalue))`. (See also the documentation for `unsafe_convert` and `cconvert` for further details.) In most cases, this simply results in a call to `convert(argtype, argvalue)`.", + VarRef(VarRef(Core), :ccall), + true) + push!(cache[:Core].exportednames, :ccall) + cache[:Core][Symbol("@__doc__")] = FunctionStore(VarRef(VarRef(Core), Symbol("@__doc__")), [], "", VarRef(VarRef(Core), Symbol("@__doc__")), true) + cache_methods(getfield(Core, Symbol("@__doc__")), Symbol("@__doc__"), cache, false) + # Accounts for the dd situation where Base.rand only has methods from Random which doesn't appear to be explicitly used. + # append!(cache[:Base][:rand].methods, cache_methods(Base.rand, cache)) + for m in cache_methods(Base.rand, :rand, cache, get_return_type) + push!(cache[:Base][:rand].methods, m[2]) + end + for m in cache_methods(Base.randn, :randn, cache, get_return_type) + push!(cache[:Base][:randn].methods, m[2]) + end + + # Intrinsics + cache[:Core][:add_int] = VarRef(VarRef(VarRef(nothing, :Core), :Intrinsics), :add_int) + cache[:Core][:sle_int] = VarRef(VarRef(VarRef(nothing, :Core), :Intrinsics), :sle_int) + return cache +end + + +function collect_extended_methods(depot::EnvStore, extendeds = Dict{VarRef,Vector{VarRef}}()) + for m in depot + collect_extended_methods(m[2], extendeds, m[2].name) + end + extendeds +end + +function collect_extended_methods(mod::ModuleStore, extendeds, mname) + for (n, v) in mod.vals + if (v isa FunctionStore) && v.extends != v.name + haskey(extendeds, v.extends) ? push!(extendeds[v.extends], mname) : (extendeds[v.extends] = VarRef[v.extends.parent, mname]) + elseif v isa ModuleStore + collect_extended_methods(v, extendeds, v.name) + end + end +end + +getallns() = let allns = Base.IdSet{Symbol}(); oneverything((m, s, x, state) -> push!(allns, s)); allns end + +""" + split_module_names(m::Module, allns) + +Return two lists of names accessible from calling `getfield(m, somename)`. The first +contains those symbols returned by `Base.names(m, all = true)`. The second contains +all others, including imported symbols and those introduced by the `using` of modules. +""" +function split_module_names(m::Module, allns) + internal_names = getnames(m) + availablenames = Set{Symbol}([s for s in allns if isdefined(m, s)]) + # usinged_names = Set{Symbol}() + + for n in availablenames + if (n in internal_names) + pop!(availablenames, n) + end + end + allms = get_all_modules() + for u in get_used_modules(m, allms) + for n in unsorted_names(u) + if n in availablenames + pop!(availablenames, n) + # push!(usinged_names, pop!(availablenames, n)) + end + end + end + internal_names, availablenames +end + +get_all_modules() = let allms = Base.IdSet{Module}(); apply_to_everything(x -> if x isa Module push!(allms, x) end); allms end +get_used_modules(M, allms = get_all_modules()) = [m for m in allms if usedby(M, m)] diff --git a/src/SymbolServer/utils.jl b/src/SymbolServer/utils.jl new file mode 100644 index 0000000..bdcb47a --- /dev/null +++ b/src/SymbolServer/utils.jl @@ -0,0 +1,704 @@ +using Pkg + +@static if VERSION < v"1.1" + const PackageEntry = Vector{Dict{String,Any}} +else + using Pkg.Types: PackageEntry +end + +@static if isdefined(Base, :parsed_toml) + parsed_toml(args...) = Base.parsed_toml(args...) +else + parsed_toml(file) = Pkg.TOML.parsefile(file) +end + +""" + manifest(c::Pkg.Types.Context) +Retrieves the UUID -> PackageEntry map from the manifest of a Context. +""" +function manifest(c::Pkg.Types.Context) + m = c.env.manifest + if m isa Dict + return m + else + return m.deps + end +end + +""" + read_manifest(manifest_filename) + +Read the manifest from the path and return the UUID -> PackageEntry map. +If the file can't be read, return `nothing`. +""" +function read_manifest(manifest_filename) + try + m = Pkg.API.read_manifest(manifest_filename) + if m isa Dict + return m + else + return m.deps + end + catch err + @warn "Could not load manifest." exception=(err, catch_backtrace()) + return nothing + end +end + +""" + project(c::Pkg.Types.Context) +Retrieves the project of a Context. +""" +project(c::Pkg.Types.Context) = c.env.project + +""" + isinproject(context, package::Union{String,UUID}) +Checks whether a package is in the dependencies of a given context, e.g. is directly loadable. +""" +function isinproject end + +""" + isinmanifest(context, package::Union{String,UUID}) +Checks whether a package is in the manifest of a given context, e.g. is either directly loadable or is a dependency of an loadable package. +""" +function isinmanifest end + +@static if VERSION < v"1.1" + isinmanifest(context::Pkg.Types.Context, module_name::String) = module_name in keys(manifest(context)) + isinmanifest(context::Pkg.Types.Context, uuid::UUID) = any(get(p[1], "uuid", "") == string(uuid) for (u, p) in manifest(context)) + isinmanifest(manifest::Dict{String,Any}, uuid::AbstractString) = any(get(p[1], "uuid", "") == uuid for (u, p) in manifest) + isinmanifest(manifest::Dict{String,Any}, uuid::UUID) = isinmanifest(manifest, string(uuid)) + + isinproject(context::Pkg.Types.Context, package_name::String) = haskey(deps(project(context)), package_name) + isinproject(context::Pkg.Types.Context, package_uuid::UUID) = any(u == package_uuid for (n, u) in deps(project(context))) + + function packageuuid(c::Pkg.Types.Context, name::String) + for pkg in manifest(c) + if first(pkg) == name + return UUID(last(pkg)[1]["uuid"]) + end + end + end + packageuuid(pkg::Pair{Any,Any}) = last(pkg) isa String ? UUID(last(pkg)) : UUID(first(last(pkg))["uuid"]) + packageuuid(pkg::Pair{String,Any}) = last(pkg) isa String ? UUID(last(pkg)) : UUID(first(last(pkg))["uuid"]) + + packagename(pkg::Pair{String,Any})::String = first(pkg) + function packagename(c::Pkg.Types.Context, uuid) + for (n, p) in manifest(c) + if get(first(p), "uuid", "") == string(uuid) + return n + end + end + return nothing + end + function packagename(manifest::Dict{String,Any}, uuid::String) + for (n, p) in manifest + if get(first(p), "uuid", "") == string(uuid) + return n + end + end + return nothing + end + packagename(manifest::Dict{String,Any}, uuid::UUID) = packagename(manifest, string(uuid)) + + function deps(uuid::UUID, c::Pkg.Types.Context) + if any(p[1]["uuid"] == string(uuid) for (n, p) in manifest(c)) + return manifest(c)[string(uuid)][1].deps + else + return Dict{Any,Any}() + end + end + deps(d::Dict{String,Any}) = get(d, "deps", Dict{String,Any}()) + deps(pe::PackageEntry) = get(pe[1], "deps", Dict{String,Any}()) + path(pe::PackageEntry) = get(pe[1], "path", nothing) + version(pe::PackageEntry) = get(pe[1], "version", nothing) + tree_hash(pe) = get(pe[1], "git-tree-sha1", nothing) + + frommanifest(c::Pkg.Types.Context, uuid) = frommanifest(manifest(c), uuid) + + function frommanifest(manifest::Dict{String,Any}, uuid) + for p in values(manifest) + if get(first(p), "uuid", "") == string(uuid) + return (p) + end + end + return nothing + end + is_package_deved(manifest, uuid) = get(first([p[2][1] for p in manifest if get(p[2][1], "uuid", "") == string(uuid)]), "path", "") != "" +else + isinmanifest(context::Pkg.Types.Context, module_name::String) = any(p.name == module_name for (u, p) in manifest(context)) + isinmanifest(context::Pkg.Types.Context, uuid::UUID) = haskey(manifest(context), uuid) + isinmanifest(manifest::Dict{UUID,PackageEntry}, uuid::UUID) = haskey(manifest, uuid) + + isinproject(context::Pkg.Types.Context, package_name::String) = haskey(deps(project(context)), package_name) + isinproject(context::Pkg.Types.Context, package_uuid::UUID) = any(u == package_uuid for (n, u) in deps(project(context))) + + function packageuuid(c::Pkg.Types.Context, name::String) + for pkg in manifest(c) + if last(pkg).name == name + return first(pkg) + end + end + end + packageuuid(pkg::Pair{String,UUID}) = last(pkg) + packageuuid(pkg::Pair{UUID,PackageEntry}) = first(pkg) + + packagename(pkg::Pair{UUID,PackageEntry})::Union{Nothing,String} = last(pkg).name + packagename(c::Pkg.Types.Context, uuid::UUID) = manifest(c)[uuid].name + packagename(manifest::Dict{UUID,PackageEntry}, uuid::UUID) = manifest[uuid].name + + function deps(uuid::UUID, c::Pkg.Types.Context) + if haskey(manifest(c), uuid) + return deps(manifest(c)[uuid]) + else + return Dict{String,Base.UUID}() + end + end + deps(pe::PackageEntry) = pe.deps + deps(proj::Pkg.Types.Project) = proj.deps + deps(pkg::Pair{String,UUID}, c::Pkg.Types.Context) = deps(packageuuid(pkg), c) + path(pe::PackageEntry) = pe.path + version(pe::PackageEntry) = pe.version + version(pe::Pair{UUID,PackageEntry}) = last(pe).version + frommanifest(c::Pkg.Types.Context, uuid) = manifest(c)[uuid] + frommanifest(manifest::Dict{UUID,PackageEntry}, uuid) = manifest[uuid] + tree_hash(pkg::Pair{UUID,PackageEntry}) = tree_hash(last(pkg)) + + @static if VERSION >= v"1.3" + tree_hash(pe::PackageEntry) = pe.tree_hash + else + tree_hash(pe::PackageEntry) = (pe.other === nothing ? nothing : get(pe.other, "git-tree-sha1", nothing)) + end + + is_package_deved(manifest, uuid) = manifest[uuid].path !== nothing +end + +function sha2_256_dir(path, sha=zeros(UInt8, 32)) + (uperm(path) & 0x04) != 0x04 && return + startswith(path, ".") && return + if isfile(path) && endswith(path, ".jl") + s1 = open(path) do f + sha2_256(f) + end + sha .+= s1 + elseif isdir(path) + for f in readdir(path) + sha = sha2_256_dir(joinpath(path, f), sha) + end + end + return sha +end + +function sha_pkg(manifest_dir::AbstractString, pe::PackageEntry) + relpath = path(pe) + isa(relpath, String) || return nothing + src_path = normpath(joinpath(manifest_dir, relpath, "src")) + return isdir(src_path) ? sha2_256_dir(src_path) : nothing +end + +function _doc(mod::Module, sym::Symbol) + try + # constructing the binding may fail with e.g. "Constant binding was imported from multiple modules", + # so we just wrap this in yet another try-catch + _doc(Base.Docs.Binding(mod, sym)) + catch err + @debug "Error computing docs for binding ($mod, $sym)" ex=(err, catch_backtrace()) + return "" + end +end + +function _doc(binding::Base.Docs.Binding) + try + sig = Union{} + if Base.Docs.defined(binding) + result = Base.Docs.getdoc(Base.Docs.resolve(binding), sig) + result === nothing || return string(result) + end + results, groups = Base.Docs.DocStr[], Base.Docs.MultiDoc[] + # Lookup `binding` and `sig` for matches in all modules of the docsystem. + for mod in Base.Docs.modules + dict = Base.Docs.meta(mod)::IdDict{Any,Any} + if haskey(dict, binding) + multidoc = dict[binding] + push!(groups, multidoc) + for msig in multidoc.order + sig <: msig && push!(results, multidoc.docs[msig]) + end + end + end + if isempty(results) + for group in groups, each in group.order + push!(results, group.docs[each]) + end + end + md = try + Base.Docs.catdoc(map(Base.Docs.parsedoc, results)...) + catch + nothing + end + return md === nothing ? "" : string(md) + catch + return "" + end +end + +_lookup(vr::FakeUnion, depot::EnvStore, cont=false) = nothing +_lookup(vr::FakeTypeName, depot::EnvStore, cont=false) = _lookup(vr.name, depot, cont) +_lookup(vr::FakeUnionAll, depot::EnvStore, cont=false) = _lookup(vr.body, depot, cont) +function _lookup(vr::VarRef, depot::EnvStore, cont=false) + if vr.parent === nothing + if haskey(depot, vr.name) + val = depot[vr.name] + if cont && val isa VarRef + return _lookup(val, depot, cont) + else + return val + end + else + return nothing + end + else + par = _lookup(vr.parent, depot, cont) + if par !== nothing && par isa ModuleStore && haskey(par, vr.name) + val = par[vr.name] + if cont && val isa VarRef + return _lookup(val, depot, cont) + else + return val + end + else + return nothing + end + end +end + +maybe_lookup(x, env) = x isa VarRef ? _lookup(x, env, true) : x + +""" + maybe_getfield(k::Symbol , m::ModuleStore, server) + +Try to get `k` from `m`. This includes: unexported variables, and variables +exported by modules used within `m`. +""" +function maybe_getfield(k::Symbol, m::ModuleStore, envstore) + if haskey(m.vals, k) + return m.vals[k] + else + for v in m.used_modules + !haskey(m.vals, v) && continue + submod = m.vals[v] + if submod isa ModuleStore && k in submod.exportednames && haskey(submod.vals, k) + return submod.vals[k] + elseif submod isa VarRef + submod = _lookup(submod, envstore, true) + if submod isa ModuleStore && k in submod.exportednames && haskey(submod.vals, k) + return submod.vals[k] + end + end + end + end +end + +function issubmodof(m::Module, M::Module) + if m == M + return true + elseif parentmodule(m) === m + return false + elseif parentmodule(m) == M + return true + else + return issubmodof(parentmodule(m), M) + end +end + +function Base.print(io::IO, f::FunctionStore) + println(io, f.name, " is a Function.") + nm = length(f.methods) + println(io, "# $nm method", nm == 1 ? "" : "s", " for function ", f.name) + for i = 1:nm + print(io, "[$i] ") + println(io, f.methods[i]) + end +end + +const JULIA_DIR = normpath(joinpath(Sys.BINDIR, Base.DATAROOTDIR, "julia")) + +function Base.print(io::IO, m::MethodStore) + print(io, m.name, "(") + for i = 1:length(m.sig) + if m.sig[i][1] != Symbol("#unused#") + print(io, m.sig[i][1]) + end + print(io, "::", m.sig[i][2]) + i != length(m.sig) && print(io, ", ") + end + print(io, ")") + path = replace(m.file, JULIA_DIR => "") + print(io, " in ", m.mod, " at ", path, ':', m.line) +end + +function Base.print(io::IO, t::DataTypeStore) + print(io, t.name, " <: ", t.super) + for i = 1:length(t.fieldnames) + print(io, "\n ", t.fieldnames[i], "::", t.types[i]) + end +end + +Base.print(io::IO, m::ModuleStore) = print(io, m.name) +Base.print(io::IO, x::GenericStore) = print(io, x.name, "::", x.typ) + +extends_methods(f) = false +extends_methods(f::FunctionStore) = f.name != f.extends +get_top_module(vr::VarRef) = vr.parent === nothing ? vr.name : get_top_module(vr.parent) + +# Sorting is the main performance of calling `names` +@static if VERSION < v"1.12-" + unsorted_names(m::Module; all::Bool=false, imported::Bool=false, usings=false) = + ccall(:jl_module_names, Array{Symbol,1}, (Any, Cint, Cint), m, all, imported) +else + unsorted_names(m::Module; all::Bool=false, imported::Bool=false, usings=false) = + ccall(:jl_module_names, Array{Symbol,1}, (Any, Cint, Cint, Cint), m, all, imported, usings) +end + +## recursive_copy +# +# `deepcopy` is reliable but incredibly slow. Its slowness comes from two factors: +# - generically iterating over, e.g., `fieldnames(typeof(x))` rather than having a method +# optimized for each struct type +# - its care to protect against circular depenency graphs +# When you don't need to worry about cycles, you can do much better by defining your own function. + +recursive_copy(x) = deepcopy(x) + +recursive_copy(::Nothing) = nothing + +recursive_copy(s::Symbol) = s + +recursive_copy(c::Char) = c + +recursive_copy(str::String) = str + +recursive_copy(x::Number) = x + +recursive_copy(p::Pair) = typeof(p)(recursive_copy(p.first), recursive_copy(p.second)) + +recursive_copy(A::Array) = eltype(A)[recursive_copy(a) for a in A] + +recursive_copy(d::Dict) = typeof(d)(recursive_copy(p) for p in d) + + +recursive_copy(ref::VarRef) = VarRef(recursive_copy(ref.parent), ref.name) + +recursive_copy(tn::FakeTypeName) = FakeTypeName(recursive_copy(tn.name), recursive_copy(tn.parameters)) + +recursive_copy(tb::FakeTypeofBottom) = tb + +recursive_copy(u::FakeUnion) = FakeUnion(recursive_copy(u.a), recursive_copy(u.b)) + +recursive_copy(tv::FakeTypeVar) = FakeTypeVar(tv.name, recursive_copy(tv.lb), recursive_copy(tv.ub)) + +recursive_copy(ua::FakeUnionAll) = FakeUnionAll(recursive_copy(ua.var), recursive_copy(ua.body)) + +@static if !(Vararg isa Type) + function recursive_copy(va::FakeTypeofVararg) + if isdefined(va, :N) + FakeTypeofVararg(recursive_copy(va.T), va.N) + elseif isdefined(va, :T) + FakeTypeofVararg(recursive_copy(va.T)) + else + FakeTypeofVararg() + end + end +end + +recursive_copy(m::ModuleStore) = ModuleStore(recursive_copy(m.name), recursive_copy(m.vals), m.doc, + m.exported, copy(m.exportednames), copy(m.used_modules)) + +recursive_copy(p::Package) = Package(p.name, + recursive_copy(p.val), + p.uuid, + recursive_copy(p.sha)) + +recursive_copy(ms::MethodStore) = MethodStore(ms.name, + ms.mod, + ms.file, + ms.line, + recursive_copy(ms.sig), + copy(ms.kws), + recursive_copy(ms.rt)) + +recursive_copy(dts::DataTypeStore) = DataTypeStore(recursive_copy(dts.name), + recursive_copy(dts.super), + recursive_copy(dts.parameters), + recursive_copy(dts.types), + recursive_copy(dts.fieldnames), + recursive_copy(dts.methods), + dts.doc, + dts.exported) + +recursive_copy(fs::FunctionStore) = FunctionStore(recursive_copy(fs.name), + recursive_copy(fs.methods), + fs.doc, + recursive_copy(fs.extends), + fs.exported) + +recursive_copy(gs::GenericStore) = GenericStore(recursive_copy(gs.name), + recursive_copy(gs.typ), + gs.doc, + gs.exported) + + +# Tools for modifying source location +# env = getenvtree([:somepackage]) +# symbols(env, somepackage) +# m = env[:somepackage] +# To strip actual src path: +# modify_dirs(m, f -> modify_dir(f, pkg_src_dir(somepackage), "PLACEHOLDER")) +# To replace the placeholder: +# modify_dirs(m, f -> modify_dir(f, "PLACEHOLDER", new_src_dir)) +function modify_dirs(m::ModuleStore, f) + for (k, v) in m.vals + if v isa FunctionStore + m.vals[k] = FunctionStore(v.name, MethodStore[MethodStore(m.name, m.mod, f(m.file), m.line, m.sig, m.kws, m.rt) for m in v.methods], v.doc, v.extends, v.exported) + elseif v isa DataTypeStore + m.vals[k] = DataTypeStore(v.name, v.super, v.parameters, v.types, v.fieldnames, MethodStore[MethodStore(m.name, m.mod, f(m.file), m.line, m.sig, m.kws, m.rt) for m in v.methods], v.doc, v.exported) + elseif v isa ModuleStore + modify_dirs(v, f) + end + end +end + +pkg_src_dir(m::Module) = dirname(pathof(m)) + +# replace s1 with s2 at the start of a string +function modify_dir(f, s1, s2) + # @assert startswith(f, s1) + # Removed assertion because of Enums issue + replace(f, s1 => s2) +end + + +# tools to retrieve cache from the cloud + +function get_file_from_cloud(manifest, uuid, environment_path, depot_dir, cache_dir="../cache", download_dir="../downloads/", symbolcache_upstream="https://www.julia-vscode.org/symbolcache") + paths = get_cache_path(manifest, uuid) + name = packagename(manifest, uuid) + link = string(first(splitext(join([symbolcache_upstream, "store/v1/packages", paths...], '/'))), ".tar.gz") + + dest_filepath = joinpath(cache_dir, paths...) + dest_filepath_unavailable = string(first(splitext(dest_filepath)), ".unavailable") + + download_dir = joinpath(download_dir, first(splitext(last(paths)))) + download_filepath = joinpath(download_dir, last(paths)) + download_filepath_unavailable = string(first(splitext(download_filepath)), ".unavailable") + + @debug "Downloading cache file for $name." + if isfile(dest_filepath_unavailable) + @debug "Cloud was unable to cache $name in the past, we won't try to retrieve it again." + return false + end + file = try + if Pkg.PlatformEngines.download_verify_unpack(link, nothing, download_dir) + mkpath(dirname(dest_filepath)) + if !isfile(download_filepath) && isfile(download_filepath_unavailable) + mv(download_filepath_unavailable, dest_filepath_unavailable) + @info "Cloud is unable to cache $name, we won't try to retrieve it again." + return false + end + mv(download_filepath, dest_filepath) + dest_filepath + else + @debug "Couldn't retrieve cache file for $name." + return false + end + catch err + @debug "Couldn't retrieve cache file for $name." exception = (err, catch_backtrace()) + return false + end + + cache = try + open(file, "r") do io + CacheStore.read(io) + end + catch + @warn "Couldn't read cache file for $name, deleting." + rm(file) + return false + end + + pkg_entry = Base.locate_package(Base.PkgId(uuid, name)) + if pkg_entry !== nothing && isfile(pkg_entry) + pkg_src = dirname(pkg_entry) + else + pkg_root = get_pkg_path(Base.PkgId(uuid, name), environment_path, depot_dir) + if pkg_root === nothing + @debug "Successfully downloaded and saved $(name), but with placeholder paths" + return false + end + pkg_src = joinpath(pkg_root, "src") + end + + # TODO: it would be better if the PLACEHOLDER replacement happens at runtime + # instead of "unpack-time", because we can use the current depot path + # in case the user switched to another one after downloading + + @debug "Replacing PLACEHOLDER with:" pkg_src + modify_dirs(cache.val, f -> modify_dir(f, r"^PLACEHOLDER", pkg_src)) + open(file, "w") do io + CacheStore.write(io, cache) + end + + @debug "Successfully downloaded, scrubbed and saved $(name)" + return true +end + +""" + validate_disc_store(store_path, manifest) + +This returns a list of non-jll packages in the manifest that don't have caches on disc. +""" +function validate_disc_store(store_path, manifest) + filter(manifest) do pkg + uuid = packageuuid(pkg) + endswith(packagename(manifest, uuid), "_jll") && return false + + file_name = joinpath(get_cache_path(manifest, uuid)...) + yield() + return !isfile(joinpath(store_path, file_name)) + end +end + +function find_project_file(env) + isdir(env) || return false + for filename in ("Project.toml", "JuliaProject.toml") + maybe_project_file = joinpath(env, filename) + if isfile(maybe_project_file) + return maybe_project_file + end + end + return false +end + +""" + get_pkg_path(pkg::Base.PkgId, env, depot_path) + +Find out where a package is installed without having to load it. +""" +function get_pkg_path(pkg::Base.PkgId, env, depot_path) + project_file = find_project_file(env) + project_file isa Bool && return nothing + manifest_file = Base.project_file_manifest_path(project_file) + + d = parsed_toml(manifest_file) + if get(d, "manifest_format", "0.0") == "2.0" + entries = get(d, "deps", nothing) + entries === nothing && return nothing + entries = map(e -> e[1], values(entries)) + else + entries = get(d, pkg.name, nothing) + end + entries === nothing && return nothing # TODO: allow name to mismatch? + for entry in entries + entry = entry::Dict{String,Any} + uuid = get(entry, "uuid", nothing)::Union{Nothing,String} + uuid === nothing && continue + if UUID(uuid) === pkg.uuid + path = get(entry, "path", nothing)::Union{Nothing,String} + # this can only be true for explicitly dev'ed packages + if path !== nothing + path = normpath(abspath(dirname(manifest_file), path)) + return path + end + hash = get(entry, "git-tree-sha1", nothing)::Union{Nothing,String} + hash === nothing && return nothing + hash = Base.SHA1(hash) + # empty default path probably means that we should use the default Julia depots + if depot_path == "" + depot_paths = [] + if isdefined(Base, :append_default_depot_path!) + Base.append_default_depot_path!(depot_paths) + else + depot_paths = Pkg.depots() + end + else + depot_paths = [depot_path] + end + for depot in depot_paths + # Keep the 4 since it used to be the default + for slug in (Base.version_slug(pkg.uuid, hash, 4), Base.version_slug(pkg.uuid, hash)) + path = abspath(depot, "packages", pkg.name, slug) + ispath(path) && return path + end + end + return nothing + end + end + return nothing +end + +function load_package(c::Pkg.Types.Context, uuid, conn, loadingbay, percentage = missing) + isinmanifest(c, uuid isa String ? Base.UUID(uuid) : uuid) || return + pe_name = packagename(c, uuid) + + pid = Base.PkgId(uuid isa String ? Base.UUID(uuid) : uuid, pe_name) + if pid in keys(Base.loaded_modules) + conn !== nothing && println(conn, "PROCESSPKG;$pe_name;$uuid;noversion;$percentage") + loadingbay.eval(:($(Symbol(pe_name)) = $(Base.loaded_modules[pid]))) + m = Base.invokelatest(() -> getfield(loadingbay, Symbol(pe_name))) + else + m = try + conn !== nothing && println(conn, "STARTLOAD;$pe_name;$uuid;noversion;$percentage") + loadingbay.eval(:(import $(Symbol(pe_name)))) + conn !== nothing && println(conn, "STOPLOAD;$pe_name") + m = Base.invokelatest(() -> getfield(loadingbay, Symbol(pe_name))) + catch + return + end + end +end + +function write_cache(uuid, pkg::Package, outpath) + mkpath(dirname(outpath)) + @info "Now writing to disc $uuid" + open(outpath, "w") do io + CacheStore.write(io, pkg) + end + outpath +end + +""" + get_cache_path(manifest, uuid) + +Returns a vector containing the cache storage path for a package structured: [folder, folder, file]. +""" +function get_cache_path(manifest, uuid) + name = packagename(manifest, uuid) + pkg_info = frommanifest(manifest, uuid) + ver = version(pkg_info) + if ver === nothing + ver = "nothing" + if isdefined(Pkg.Types, :is_stdlib) && Pkg.Types.is_stdlib(uuid) + ver = VERSION + end + end + ver = replace(string(ver), '+'=>'_') + th = tree_hash(pkg_info) + th = th === nothing ? "nothing" : th + + [ + string(uppercase(string(name)[1])) + string(name, "_", uuid) + string("v", ver, "_", th, ".jstore") + ] +end + +function write_depot(server::Server, ctx, written_caches) + for (uuid, pkg) in server.depot + cache_paths = get_cache_path(manifest(ctx), uuid) + outpath = joinpath(server.storedir, cache_paths...) + outpath in written_caches && continue + + written_path = write_cache(uuid, pkg, outpath) + !isempty(written_path) && push!(written_caches, written_path) + end +end From 5319c49ec7d78aeb3229184d38ed7c101ce13087 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Mon, 24 Nov 2025 19:14:11 -0800 Subject: [PATCH 03/24] First pass at StaticLint integration --- Project.toml | 12 ++ src/JuliaWorkspaces.jl | 13 +- src/StaticLint/StaticLint.jl | 247 ++++++++++++------------ src/StaticLint/bindings.jl | 182 +++++++++--------- src/StaticLint/imports.jl | 84 ++++----- src/StaticLint/interface.jl | 57 ------ src/StaticLint/linting/checks.jl | 310 +++++++++++++++---------------- src/StaticLint/macros.jl | 104 +++++------ src/StaticLint/methodmatching.jl | 40 ++-- src/StaticLint/references.jl | 134 +++++++------ src/StaticLint/scope.jl | 40 ++-- src/StaticLint/server.jl | 101 ---------- src/StaticLint/subtypes.jl | 4 +- src/StaticLint/type_inf.jl | 92 ++++----- src/StaticLint/utils.jl | 138 +++++++------- src/layer_diagnostics.jl | 5 + src/layer_semantics.jl | 84 +++++++++ src/layer_syntax_trees.jl | 10 + 18 files changed, 805 insertions(+), 852 deletions(-) delete mode 100644 src/StaticLint/interface.jl delete mode 100644 src/StaticLint/server.jl create mode 100644 src/layer_semantics.jl diff --git a/Project.toml b/Project.toml index e0fb651..de44da3 100644 --- a/Project.toml +++ b/Project.toml @@ -4,6 +4,7 @@ authors = ["David Anthoff "] version = "7.0.1-DEV" [deps] +CSTParser = "00ebfdb7-1f24-5e51-bd34-a7502290713f" JuliaSyntax = "70703baa-626e-46a2-a12c-08ffd08c73b4" Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" AutoHashEquals = "15f4f7f2-30c1-5605-9d31-71845cf9641f" @@ -11,18 +12,29 @@ CancellationTokens = "2e8d271d-f2e2-407b-a864-17eb2156783e" Salsa = "1fbf2c77-44e2-4d5d-8131-0fa618a5c278" UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" TestItemDetection = "76b0de8b-5c4b-48ef-a724-914b33ca988d" +SHA = "ea8e919c-243c-51af-8825-aaa63cd721ce" +Sockets = "6462fe0b-24de-5631-8697-dd941f90decc" +REPL = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" +LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433" +InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [extras] TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [compat] +CSTParser = "3.3" JuliaSyntax = "0.4, 1" julia = "1.10" AutoHashEquals = "2" CancellationTokens = "1" Salsa = "2.2.0" TestItemDetection = "1.1" +SHA = "<0.0.1, 0.7, 1" +Sockets = "<0.0.1, 1" +REPL = "<0.0.1, 1" +LibGit2 = "<0.0.1, 1" +InteractiveUtils = "<0.0.1, 1" [targets] test = ["Test", "TestItemRunner"] diff --git a/src/JuliaWorkspaces.jl b/src/JuliaWorkspaces.jl index b3fce9e..a792ad6 100644 --- a/src/JuliaWorkspaces.jl +++ b/src/JuliaWorkspaces.jl @@ -1,17 +1,25 @@ module JuliaWorkspaces -import UUIDs, JuliaSyntax, TestItemDetection +import UUIDs, JuliaSyntax, TestItemDetection, CSTParser using UUIDs: UUID, uuid4 using JuliaSyntax: SyntaxNode using Salsa using AutoHashEquals +include("URIs2/URIs2.jl") + +include("SymbolServer/SymbolServer.jl") +import .SymbolServer + +include("StaticLint/StaticLint.jl") +import .StaticLint + include("compat.jl") import Pkg -include("URIs2/URIs2.jl") + import .URIs2 using .URIs2: filepath2uri, uri2filepath @@ -23,6 +31,7 @@ include("sourcetext.jl") include("inputs.jl") include("layer_files.jl") include("layer_syntax_trees.jl") +include("layer_semantics.jl") include("layer_projects.jl") include("layer_testitems.jl") include("layer_diagnostics.jl") diff --git a/src/StaticLint/StaticLint.jl b/src/StaticLint/StaticLint.jl index 0d6a5e6..499e63e 100644 --- a/src/StaticLint/StaticLint.jl +++ b/src/StaticLint/StaticLint.jl @@ -1,13 +1,16 @@ module StaticLint +function hasfile end + include("exception_types.jl") -using SymbolServer, CSTParser +using ..SymbolServer, CSTParser, ..URIs2 +using ..URIs2: URI using CSTParser: EXPR, isidentifier, setparent!, valof, headof, hastrivia, parentof, isoperator, ispunctuation, to_codeobject # CST utils using CSTParser: is_getfield, isassignment, isdeclaration, isbracketed, iskwarg, iscall, iscurly, isunarycall, isunarysyntax, isbinarycall, isbinarysyntax, issplat, defines_function, is_getfield_w_quotenode, iswhere, iskeyword, isstringliteral, isparameters, isnonstdid, istuple -using SymbolServer: VarRef +using ..SymbolServer: VarRef const noname = EXPR(:noname, nothing, nothing, 0, 0, nothing, nothing, nothing) @@ -33,7 +36,9 @@ function Base.show(io::IO, m::Meta) m.scope !== nothing && printstyled(io, " new scope", color = :green) m.error !== nothing && printstyled(io, " lint ", color = :red) end -hasmeta(x::EXPR) = x.meta isa Meta +hasmeta(x::EXPR, meta_dict::Dict{UInt64,StaticLint.Meta}) = haskey(meta_dict, objectid(x)) +getmeta(x::EXPR, meta_dict) = meta_dict[objectid(x)] +ensuremeta(x::EXPR, meta_dict) = hasmeta(x, meta_dict) || (meta_dict[objectid(x)] = Meta()) hasbinding(m::Meta) = m.binding isa Binding hasref(m::Meta) = m.ref !== nothing hasscope(m::Meta) = m.scope isa Scope @@ -52,32 +57,41 @@ mutable struct ExternalEnv project_deps::Vector{Symbol} end +getsymbols(env::ExternalEnv) = env.symbols +getsymbolextendeds(env::ExternalEnv) = env.extended_methods + + abstract type State end -mutable struct Toplevel{T} <: State - file::T - included_files::Vector{String} + +getsymbols(state::State) = getsymbols(state.env) +getsymbolextendeds(state::State) = getsymbolextendeds(state.env) + +mutable struct Toplevel <: State + uri::URI + included_files::Vector{URI} scope::Scope in_modified_expr::Bool modified_exprs::Union{Nothing,Vector{EXPR}} delayed::Vector{EXPR} resolveonly::Vector{EXPR} env::ExternalEnv - server flags::Int end -Toplevel(file, included_files, scope, in_modified_expr, modified_exprs, delayed, resolveonly, env, server) = - Toplevel(file, included_files, scope, in_modified_expr, modified_exprs, delayed, resolveonly, env, server, 0) +getpath(state::Toplevel) = URIs2.uri2filepath(state.uri) -function (state::Toplevel)(x::EXPR) - resolve_import(x, state) - mark_bindings!(x, state) - add_binding(x, state) +Toplevel(uri, included_files, scope, in_modified_expr, modified_exprs, delayed, resolveonly, env) = + Toplevel(uri, included_files, scope, in_modified_expr, modified_exprs, delayed, resolveonly, env, 0) + +function (state::Toplevel)(x::EXPR, meta_dict, root_dict, rt) + resolve_import(x, state, meta_dict) + mark_bindings!(x, state, meta_dict) + add_binding(x, state, meta_dict) mark_globals(x, state) - handle_macro(x, state) - s0 = scopes(x, state) - resolve_ref(x, state) - followinclude(x, state) + handle_macro(x, state, meta_dict, root_dict, rt) + s0 = scopes(x, state, meta_dict) + resolve_ref(x, state, meta_dict) + followinclude(x, state, meta_dict, root_dict, rt) old_in_modified_expr = state.in_modified_expr if state.modified_exprs !== nothing && x in state.modified_exprs @@ -91,7 +105,7 @@ function (state::Toplevel)(x::EXPR) end else old = flag!(state, x) - traverse(x, state) + traverse(x, state, meta_dict, root_dict, rt) state.flags = old end @@ -103,28 +117,27 @@ end mutable struct Delayed <: State scope::Scope env::ExternalEnv - server flags::Int end -Delayed(scope, env, server) = Delayed(scope, env, server, 0) +Delayed(scope, env) = Delayed(scope, env, 0) -function (state::Delayed)(x::EXPR) - mark_bindings!(x, state) - add_binding(x, state) +function (state::Delayed)(x::EXPR, meta_dict, root_dict, rt) + mark_bindings!(x, state, meta_dict) + add_binding(x, state, meta_dict) mark_globals(x, state) handle_macro(x, state) - s0 = scopes(x, state) + s0 = scopes(x, state, meta_dict) - resolve_ref(x, state) + resolve_ref(x, state, meta_dict) old = flag!(state, x) - traverse(x, state) + traverse(x, state, meta_dict, root_dict, rt) state.flags = old if state.scope != s0 for b in values(state.scope.names) - infer_type_by_use(b, state.env) - check_unused_binding(b, state.scope) + infer_type_by_use(b, state.env, meta_dict) + check_unused_binding(b, state.scope, meta_dict, root_dict, rt) end state.scope = s0 end @@ -134,23 +147,22 @@ end mutable struct ResolveOnly <: State scope::Scope env::ExternalEnv - server end -function (state::ResolveOnly)(x::EXPR) - if hasscope(x) +function (state::ResolveOnly)(x::EXPR, meta_dict, root_dict, rt) + if hasscope(x, meta_dict) s0 = state.scope - state.scope = scopeof(x) + state.scope = scopeof(x, meta_dict) else s0 = state.scope end # NEW: late import resolution (idempotent for already-resolved imports) - resolve_import(x, state) + resolve_import(x, state, meta_dict) - resolve_ref(x, state) + resolve_ref(x, state, meta_dict) - traverse(x, state) + traverse(x, state, meta_dict, root_dict, rt) if state.scope != s0 state.scope = s0 end @@ -173,29 +185,27 @@ end Performs a semantic pass across a project from the entry point `file`. A first pass traverses the top-level scope after which secondary passes handle delayed scopes (e.g. functions). These secondary passes can be, optionally, very light and only seek to resovle references (e.g. link symbols to bindings). This can be done by supplying a list of expressions on which the full secondary pass should be made (`modified_expr`), all others will receive the light-touch version. """ -function semantic_pass(file, modified_expr = nothing) - server = file.server - env = getenv(file, server) - setscope!(getcst(file), Scope(nothing, getcst(file), Dict(), Dict{Symbol,Any}(:Base => env.symbols[:Base], :Core => env.symbols[:Core]), nothing)) - state = Toplevel(file, [getpath(file)], scopeof(getcst(file)), modified_expr === nothing, modified_expr, EXPR[], EXPR[], env, server) - state(getcst(file)) +function semantic_pass(uri, cst, env, meta_dict, root_dict, rt, modified_expr = nothing) + setscope!(cst, Scope(nothing, cst, Dict(), Dict{Symbol,Any}(:Base => env.symbols[:Base], :Core => env.symbols[:Core]), nothing), meta_dict) + state = Toplevel(uri, [uri], scopeof(cst, meta_dict), modified_expr === nothing, modified_expr, EXPR[], EXPR[], env) + state(cst, meta_dict, root_dict, rt) for x in state.delayed - if hasscope(x) - traverse(x, Delayed(scopeof(x), env, server)) - for (k, b) in scopeof(x).names - infer_type_by_use(b, env) - check_unused_binding(b, scopeof(x)) + if hasscope(x, meta_dict) + traverse(x, Delayed(scopeof(x, meta_dict), env), meta_dict, root_dict, rt) + for (k, b) in scopeof(x, meta_dict).names + infer_type_by_use(b, env, meta_dict) + check_unused_binding(b, scopeof(x, meta_dict), meta_dict, root_dict, rt) end else - traverse(x, Delayed(retrieve_delayed_scope(x), env, server)) + traverse(x, Delayed(retrieve_delayed_scope(x, meta_dict), env), meta_dict, root_dict, rt) end end if state.resolveonly !== nothing for x in state.resolveonly - if hasscope(x) - traverse(x, ResolveOnly(scopeof(x), env, server)) + if hasscope(x, meta_dict) + traverse(x, ResolveOnly(scopeof(x, meta_dict), env), meta_dict, root_dict, rt) else - traverse(x, ResolveOnly(retrieve_delayed_scope(x), env, server)) + traverse(x, ResolveOnly(retrieve_delayed_scope(x, meta_dict), env), meta_dict, root_dict, rt) end end end @@ -207,44 +217,44 @@ end Iterates across the child nodes of an EXPR in execution order (rather than storage order) calling `state` on each node. """ -function traverse(x::EXPR, state) +function traverse(x::EXPR, state, meta_dict, root_dict, rt) if (isassignment(x) && !(CSTParser.is_func_call(x.args[1]) || CSTParser.iscurly(x.args[1]))) || CSTParser.isdeclaration(x) - state(x.args[2]) - state(x.args[1]) + state(x.args[2], meta_dict, root_dict, rt) + state(x.args[1], meta_dict, root_dict, rt) elseif CSTParser.iswhere(x) for i = 2:length(x.args) - state(x.args[i]) + state(x.args[i], meta_dict, root_dict, rt) end - state(x.args[1]) + state(x.args[1], meta_dict, root_dict, rt) elseif headof(x) === :generator || headof(x) === :filter @inbounds for i = 2:length(x.args) - state(x.args[i]) + state(x.args[i], meta_dict, root_dict, rt) end - state(x.args[1]) + state(x.args[1], meta_dict, root_dict, rt) elseif headof(x) === :call && length(x.args) > 1 && headof(x.args[2]) === :parameters - state(x.args[1]) + state(x.args[1], meta_dict, root_dict, rt) @inbounds for i = 3:length(x.args) - state(x.args[i]) + state(x.args[i], meta_dict, root_dict, rt) end - state(x.args[2]) + state(x.args[2], meta_dict, root_dict, rt) elseif x.args !== nothing && length(x.args) > 0 @inbounds for i = 1:length(x.args) - state(x.args[i]) + state(x.args[i], meta_dict, root_dict, rt) end end end -function check_filesize(x, path) +function check_filesize(x, path, meta_dict) nb = try filesize(path) catch - seterror!(x, FileNotAvailable) + seterror!(x, FileNotAvailable, meta_dict) return false end toobig = nb > LARGE_FILE_LIMIT if toobig - seterror!(x, FileTooBig) + seterror!(x, FileTooBig, meta_dict) end return !toobig end @@ -257,17 +267,17 @@ If successful it checks whether a file with that path is loaded on the server or a file exists on the disc that can be loaded. If this is successful it traverses the code associated with the loaded file. """ -function followinclude(x, state::State) +function followinclude(x, state::State, meta_dict, root_dict, rt) # this runs on the `include` symbol instead of a function call so that we # can be sure the ref has already been resolved isinclude = isincludet = false p = x - if isidentifier(x) && hasref(x) - r = x.meta.ref + if isidentifier(x) && hasref(x, meta_dict) + r = getmeta(x, meta_dict).ref if is_in_fexpr(x, iscall) p = get_parent_fexpr(x, iscall) - if r == refof_call_func(p) + if r == refof_call_func(p, meta_dict) isinclude = r.name == SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Base), :include) isincludet = r.name == SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Revise), :includet) end @@ -280,68 +290,69 @@ function followinclude(x, state::State) x = p - init_path = path = get_path(x, state) + init_path = path = get_path(x, state, meta_dict) if isempty(path) elseif isabspath(path) - if hasfile(state.server, path) - elseif canloadfile(state.server, path) - if check_filesize(x, path) - loadfile(state.server, path) - else - return - end + if hasfile(rt, path) + # elseif canloadfile(state.server, path) + # if check_filesize(x, path) + # loadfile(state.server, path) + # else + # return + # end else path = "" end - elseif !isempty(getpath(state.file)) && isabspath(joinpath(dirname(getpath(state.file)), path)) + elseif !isempty(getpath(state)) && isabspath(joinpath(dirname(getpath(state)), path)) # Relative path from current - if hasfile(state.server, joinpath(dirname(getpath(state.file)), path)) - path = joinpath(dirname(getpath(state.file)), path) - elseif canloadfile(state.server, joinpath(dirname(getpath(state.file)), path)) - path = joinpath(dirname(getpath(state.file)), path) - if check_filesize(x, path) - loadfile(state.server, path) - else - return - end + if hasfile(rt, joinpath(dirname(getpath(state)), path)) + path = joinpath(dirname(getpath(state)), path) + # elseif canloadfile(state.server, joinpath(dirname(getpath(state.file)), path)) + # path = joinpath(dirname(getpath(state.file)), path) + # if check_filesize(x, path) + # loadfile(state.server, path) + # else + # return + # end else path = "" end - elseif !isempty((basepath = _is_in_basedir(getpath(state.file)); basepath)) + elseif !isempty((basepath = _is_in_basedir(getpath(state)); basepath)) # Special handling for include method used within Base path = joinpath(basepath, path) - if hasfile(state.server, path) + if hasfile(rt, path) # skip - elseif canloadfile(state.server, path) - loadfile(state.server, path) + # elseif canloadfile(state.server, path) + # loadfile(state.server, path) else path = "" end else path = "" end - if hasfile(state.server, path) - if path in state.included_files - seterror!(x, IncludeLoop) - return - end - f = getfile(state.server, path) - - if f.cst.fullspan > LARGE_FILE_LIMIT - seterror!(x, FileTooBig) - return - end - oldfile = state.file - state.file = f - push!(state.included_files, getpath(state.file)) - setroot(state.file, getroot(oldfile)) - setscope!(getcst(state.file), nothing) - state(getcst(state.file)) - state.file = oldfile - pop!(state.included_files) - elseif !is_in_fexpr(x, CSTParser.defines_function) && !isempty(init_path) - seterror!(x, MissingFile) - end + # TODO DA FIX + # if hasfile(rt, path) + # if path in state.included_files + # seterror!(x, IncludeLoop) + # return + # end + # f = getfile(state.server, path) + + # if f.cst.fullspan > LARGE_FILE_LIMIT + # seterror!(x, FileTooBig) + # return + # end + # oldfile = state.file + # state.file = f + # push!(state.included_files, getpath(state)) + # root_dict[state.file] = root_dict[oldfile] + # setscope!(getcst(state.file), nothing) + # state(getcst(state.file)) + # state.file = oldfile + # pop!(state.included_files) + # elseif !is_in_fexpr(x, CSTParser.defines_function) && !isempty(init_path) + # seterror!(x, MissingFile) + # end end """ @@ -350,13 +361,13 @@ end Usually called on the argument to `include` calls, and attempts to determine the path of the file to be included. Has limited support for `joinpath` calls. """ -function get_path(x::EXPR, state) +function get_path(x::EXPR, state, meta_dict) if CSTParser.iscall(x) && length(x.args) == 2 parg = x.args[2] if CSTParser.isstringliteral(parg) if occursin("\0", valof(parg)) - seterror!(parg, IncludePathContainsNULL) + seterror!(parg, IncludePathContainsNULL, meta_dict) return "" end path = CSTParser.str_value(parg) @@ -365,7 +376,7 @@ function get_path(x::EXPR, state) return path elseif CSTParser.ismacrocall(parg) && valof(parg.args[1]) == "@raw_str" && CSTParser.isstringliteral(parg.args[3]) if occursin("\0", valof(parg.args[3])) - seterror!(parg.args[3], IncludePathContainsNULL) + seterror!(parg.args[3], IncludePathContainsNULL, meta_dict) return "" end path = normpath(CSTParser.str_value(parg.args[3])) @@ -377,10 +388,10 @@ function get_path(x::EXPR, state) for i = 2:length(parg.args) arg = parg[i] if _is_macrocall_to_BaseDIR(arg) # Assumes @__DIR__ points to Base macro. - push!(path_elements, dirname(getpath(state.file))) + push!(path_elements, dirname(getpath(state))) elseif CSTParser.isstringliteral(arg) if occursin("\0", valof(arg)) - seterror!(arg, IncludePathContainsNULL) + seterror!(arg, IncludePathContainsNULL, meta_dict) return "" end push!(path_elements, string(valof(arg))) @@ -398,12 +409,10 @@ function get_path(x::EXPR, state) return "" end -include("server.jl") include("imports.jl") include("references.jl") include("macros.jl") include("linting/checks.jl") include("type_inf.jl") include("utils.jl") -include("interface.jl") end diff --git a/src/StaticLint/bindings.jl b/src/StaticLint/bindings.jl index d32a896..eb1b449 100644 --- a/src/StaticLint/bindings.jl +++ b/src/StaticLint/bindings.jl @@ -23,17 +23,17 @@ function Base.show(io::IO, b::Binding) b.refs isa Vector ? " ($(length(b.refs)) refs))" : ")", color=:blue) end -hasbinding(x::EXPR) = hasmeta(x) && hasbinding(x.meta) -bindingof(x) = nothing -bindingof(x::EXPR) = bindingof(x.meta) +hasbinding(x::EXPR, meta_dict) = hasmeta(x, meta_dict) && hasbinding(getmeta(x, meta_dict)) +bindingof(x, meta_dict) = nothing +bindingof(x::EXPR, meta_dict) = hasbinding(x, meta_dict) ? bindingof(getmeta(x, meta_dict)) : nothing -hasref(x::EXPR) = hasmeta(x) && hasref(x.meta) -refof(x::EXPR) = hasmeta(x) ? x.meta.ref : nothing +hasref(x::EXPR, meta_dict) = hasmeta(x, meta_dict) && hasref(getmeta(x, meta_dict)) +refof(x::EXPR, meta_dict) = hasmeta(x, meta_dict) ? getmeta(x, meta_dict).ref : nothing -function gotoobjectofref(x::EXPR) - r = refof(x) +function gotoobjectofref(x::EXPR, meta_dict) + r = refof(x, meta_dict) if r isa SymbolServer.SymStore return r elseif r isa Binding @@ -47,58 +47,56 @@ end Checks whether the expression `x` should introduce new names and marks them as needed. Generally this marks expressions that would introdce names to the current scope (i.e. that x sits in) but in cases marks expressions that will add names to lower scopes. This is done when it is not knowable that a child node of `x` will introduce a new name without the context of where it sits in `x` -for example the arguments of the signature of a function definition. """ -function mark_bindings!(x::EXPR, state) - if hasbinding(x) +function mark_bindings!(x::EXPR, state, meta_dict) + if hasbinding(x, meta_dict) return end - if !hasmeta(x) - x.meta = Meta() - end + ensuremeta(x, meta_dict) if isassignment(x) if CSTParser.is_func_call(x.args[1]) name = CSTParser.get_name(x) - mark_binding!(x) - mark_sig_args!(x.args[1]) + mark_binding!(x, meta_dict) + mark_sig_args!(x.args[1], meta_dict) elseif CSTParser.iscurly(x.args[1]) - mark_typealias_bindings!(x) + mark_typealias_bindings!(x, meta_dict) elseif !is_getfield(x.args[1]) && state.flags & NO_NEW_BINDINGS == 0 - mark_binding!(x.args[1], x) + mark_binding!(x.args[1], meta_dict, x) end elseif CSTParser.defines_anon_function(x) - mark_binding!(x.args[1], x) + mark_binding!(x.args[1], meta_dict, x) elseif CSTParser.iswhere(x) for i = 2:length(x.args) - mark_binding!(x.args[i]) + mark_binding!(x.args[i], meta_dict) end elseif headof(x) === :for - markiterbinding!(x.args[2]) + markiterbinding!(x.args[2], meta_dict) elseif headof(x) === :generator || headof(x) === :filter for i = 2:length(x.args) - markiterbinding!(x.args[i]) + markiterbinding!(x.args[i], meta_dict) end elseif headof(x) === :do for i in 1:length(x.args[2].args) - mark_binding!(x.args[2].args[i]) + mark_binding!(x.args[2].args[i], meta_dict) end elseif headof(x) === :function || headof(x) === :macro name = CSTParser.get_name(x) - x.meta.binding = Binding(name, x, CoreTypes.Function, []) + getmeta(x, meta_dict).binding = Binding(name, x, CoreTypes.Function, []) if isidentifier(name) && headof(x) === :macro - setref!(name, bindingof(x)) + setref!(name, bindingof(x, meta_dict), meta_dict) end - mark_sig_args!(CSTParser.get_sig(x)) + mark_sig_args!(CSTParser.get_sig(x), meta_dict) elseif CSTParser.defines_module(x) - x.meta.binding = Binding(x.args[2], x, CoreTypes.Module, []) - setref!(x.args[2], bindingof(x)) + getmeta(x, meta_dict).binding = Binding(x.args[2], x, CoreTypes.Module, []) + setref!(x.args[2], bindingof(x, meta_dict), meta_dict) elseif headof(x) === :try && isidentifier(x.args[2]) - mark_binding!(x.args[2]) - setref!(x.args[2], bindingof(x.args[2])) + mark_binding!(x.args[2], meta_dict) + setref!(x.args[2], bindingof(x.args[2], meta_dict), meta_dict) elseif CSTParser.defines_datatype(x) name = CSTParser.get_name(x) - x.meta.binding = Binding(name, x, CoreTypes.DataType, []) - kwdef = parentof(x) isa EXPR && _points_to_Base_macro(parentof(x).args[1], Symbol("@kwdef"), state) + getmeta(x, meta_dict).binding = Binding(name, x, CoreTypes.DataType, []) + kwdef = parentof(x) isa EXPR && _points_to_Base_macro(parentof(x).args[1], Symbol("@kwdef"), state, meta_dict) if isidentifier(name) - setref!(name, bindingof(x)) + setref!(name, bindingof(x, meta_dict), meta_dict) end mark_parameters(CSTParser.get_sig(x)) if CSTParser.defines_struct(x) # mark field block @@ -110,36 +108,34 @@ function mark_bindings!(x::EXPR, state) if kwdef && CSTParser.isassignment(arg) arg = arg.args[1] end - mark_binding!(arg) + mark_binding!(arg, meta_dict) end end elseif headof(x) === :local for i = 1:length(x.args) if isidentifier(x.args[i]) - mark_binding!(x.args[i]) - setref!(x.args[i], bindingof(x.args[i])) + mark_binding!(x.args[i], meta_dict) + setref!(x.args[i], bindingof(x.args[i], meta_dict), meta_dict) end end end end -function mark_binding!(x::EXPR, val=x) +function mark_binding!(x::EXPR, meta_dict, val=x) if CSTParser.iskwarg(x) || (CSTParser.isdeclaration(x) && CSTParser.istuple(x.args[1])) - mark_binding!(x.args[1], x) + mark_binding!(x.args[1], meta_dict, x) elseif CSTParser.istuple(x) || CSTParser.isparameters(x) for arg in x.args - mark_binding!(arg, val) + mark_binding!(arg, meta_dict, val) end elseif CSTParser.isbracketed(x) - mark_binding!(CSTParser.rem_invis(x), val) + mark_binding!(CSTParser.rem_invis(x), meta_dict, val) elseif CSTParser.issplat(x) - mark_binding!(x.args[1], x) + mark_binding!(x.args[1], meta_dict, x) elseif !(isunarysyntax(x) && valof(headof(x)) == "::") - if !hasmeta(x) - x.meta = Meta() - end - x.meta.binding = Binding(CSTParser.get_name(x), val, nothing, []) + ensuremeta(x, meta_dict) + getmeta(x, meta_dict).binding = Binding(CSTParser.get_name(x), val, nothing, []) end return x end @@ -149,8 +145,8 @@ function mark_parameters(sig::EXPR, params = String[]) mark_parameters(sig.args[1], params) elseif iswhere(sig) for i = 2:length(sig.args) - x = mark_binding!(sig.args[i]) - val = valof(bindingof(x).name) + x = mark_binding!(sig.args[i], meta_dict) + val = valof(bindingof(x, meta_dict).name) if val isa String push!(params, val) end @@ -158,11 +154,11 @@ function mark_parameters(sig::EXPR, params = String[]) mark_parameters(sig.args[1], params) elseif CSTParser.iscurly(sig) for i = 2:length(sig.args) - x = mark_binding!(sig.args[i]) - if bindingof(x) isa Binding && valof(bindingof(x).name) in params + x = mark_binding!(sig.args[i], meta_dict) + if bindingof(x, meta_dict) isa Binding && valof(bindingof(x, meta_dict).name) in params # Don't mark a new binding if a parameter has already been # introduced from a :where - x.meta.binding = nothing + getmeta(x, meta_dict).binding = nothing end end end @@ -170,68 +166,66 @@ function mark_parameters(sig::EXPR, params = String[]) end -function markiterbinding!(iter::EXPR) +function markiterbinding!(iter::EXPR, meta_dict) if CSTParser.isassignment(iter) - mark_binding!(iter.args[1], iter) + mark_binding!(iter.args[1], meta_dict, iter) elseif CSTParser.iscall(iter) && CSTParser.isoperator(iter.args[1]) && (valof(iter.args[1]) == "in" || valof(iter.args[1]) == "โˆˆ") - mark_binding!(iter.args[2], iter) + mark_binding!(iter.args[2], meta_dict, iter) elseif headof(iter) === :block for i = 1:length(iter.args) - markiterbinding!(iter.args[i]) + markiterbinding!(iter.args[i], meta_dict) end end return iter end -function mark_sig_args!(x::EXPR) +function mark_sig_args!(x::EXPR, meta_dict) if CSTParser.iscall(x) || CSTParser.istuple(x) if x.args !== nothing && length(x.args) > 0 if CSTParser.isbracketed(x.args[1]) && length(x.args[1].args) > 0 && CSTParser.isdeclaration(x.args[1].args[1]) - mark_binding!(x.args[1].args[1]) + mark_binding!(x.args[1].args[1], meta_dict) end for i = (CSTParser.iscall(x) ? 2 : 1):length(x.args) a = x.args[i] if CSTParser.isparameters(a) for j = 1:length(a.args) aa = a.args[j] - mark_binding!(aa) + mark_binding!(aa, meta_dict) end elseif CSTParser.ismacrocall(a) && CSTParser.isidentifier(a.args[1]) && valofid(a.args[1]) == "@nospecialize" && length(a.args) == 3 - mark_binding!(a.args[3]) + mark_binding!(a.args[3], meta_dict) else - mark_binding!(a) + mark_binding!(a, meta_dict) end end end elseif CSTParser.iswhere(x) for i in 2:length(x.args) - mark_binding!(x.args[i]) + mark_binding!(x.args[i], meta_dict) end - mark_sig_args!(x.args[1]) + mark_sig_args!(x.args[1], meta_dict) elseif CSTParser.isbracketed(x) - mark_sig_args!(x.args[1]) + mark_sig_args!(x.args[1], meta_dict) elseif CSTParser.isdeclaration(x) - mark_sig_args!(x.args[1]) + mark_sig_args!(x.args[1], meta_dict) elseif CSTParser.isbinarycall(x) - mark_binding!(x.args[1]) - mark_binding!(x.args[2]) + mark_binding!(x.args[1], meta_dict) + mark_binding!(x.args[2], meta_dict) elseif CSTParser.isunarycall(x) && length(x.args) == 2 && (CSTParser.isbracketed(x.args[2]) || CSTParser.isdeclaration(x.args[2])) - mark_binding!(x.args[2]) + mark_binding!(x.args[2], meta_dict) end end -function mark_typealias_bindings!(x::EXPR) - if !hasmeta(x) - x.meta = Meta() - end - x.meta.binding = Binding(CSTParser.get_name(x.args[1]), x, CoreTypes.DataType, []) - setscope!(x, Scope(x)) +function mark_typealias_bindings!(x::EXPR, meta_dict) + ensuremeta(x, meta_dict) + getmeta(x, meta_dict).binding = Binding(CSTParser.get_name(x.args[1]), x, CoreTypes.DataType, []) + setscope!(x, Scope(x), meta_dict) for i = 2:length(x.args[1].args) arg = x.args[1].args[i] if isidentifier(arg) - mark_binding!(arg) + mark_binding!(arg, meta_dict) elseif CSTParser.issubtypedecl(arg) && isidentifier(arg.args[1]) - mark_binding!(arg.args[1]) + mark_binding!(arg.args[1], meta_dict) end end return x @@ -272,9 +266,9 @@ Add the binding of `x` to the current scope. Special handling is required for: Some simple type inference is run. """ -function add_binding(x, state, scope=state.scope) - if bindingof(x) isa Binding - b = bindingof(x) +function add_binding(x, state, meta_dict, scope=state.scope) + if bindingof(x, meta_dict) isa Binding + b = bindingof(x, meta_dict) if isidentifier(b.name) name = valofid(b.name) elseif CSTParser.ismacroname(b.name) # must be getfield @@ -293,37 +287,37 @@ function add_binding(x, state, scope=state.scope) scope.names[string("@", name)] = b mn = CSTParser.get_name(x) if isidentifier(mn) - setref!(mn, b) + setref!(mn, b, meta_dict) end elseif defines_function(x) # TODO: Need to do check that we're not in a closure. tls = retrieve_toplevel_or_func_scope(scope) tls === nothing && return @warn "top-level scope not retrieved" if name_is_getfield(b.name) - resolve_ref(parentof(parentof(b.name)).args[1], scope, state) - lhs_ref = refof_maybe_getfield(parentof(parentof(b.name)).args[1]) + resolve_ref(parentof(parentof(b.name)).args[1], scope, state, meta_dict) + lhs_ref = refof_maybe_getfield(parentof(parentof(b.name)).args[1], meta_dict) if lhs_ref isa SymbolServer.ModuleStore && haskey(lhs_ref.vals, Symbol(name)) # Overloading if haskey(tls.names, name) && eventually_overloads(tls.names[name], lhs_ref.vals[Symbol(name)], state) # Though we're explicitly naming a function for overloading, it has already been imported to the toplevel scope. - if !hasref(b.name) - setref!(b.name, tls.names[name]) # Add ref to previous overload + if !hasref(b.name, meta_dict) + setref!(b.name, tls.names[name], meta_dict) # Add ref to previous overload overload_method(tls, b, VarRef(lhs_ref.name, Symbol(name))) end # Do nothing, get_name(x) will resolve to the root method elseif isexportedby(name, lhs_ref) # Name is already available tls.names[name] = b - if !hasref(b.name) # Is this an appropriate indicator that we've not marked the overload? + if !hasref(b.name, meta_dict) # Is this an appropriate indicator that we've not marked the overload? push!(b.refs, maybe_lookup(lhs_ref[Symbol(name)], state)) - setref!(b.name, b) # we actually set the rhs of the qualified name to point to this binding + setref!(b.name, b, meta_dict) # we actually set the rhs of the qualified name to point to this binding end else # Mark as overloaded so that calls to `M.f()` resolve properly. overload_method(tls, b, VarRef(lhs_ref.name, Symbol(name))) # Add to overloaded list but not scope. end elseif lhs_ref isa Binding && CoreTypes.ismodule(lhs_ref.type) - if hasscope(lhs_ref.val) && haskey(scopeof(lhs_ref.val).names, name) + if hasscope(lhs_ref.val, meta_dict) && haskey(scopeof(lhs_ref.val, meta_dict).names, name) # Don't need to do anything, name will resolve end end @@ -340,32 +334,32 @@ function add_binding(x, state, scope=state.scope) if (existing_binding isa Binding && ((CoreTypes.isfunction(existing_binding.type) || CoreTypes.isdatatype(existing_binding.type))) || existing_binding isa SymbolServer.FunctionStore || existing_binding isa SymbolServer.DataTypeStore) # do nothing name of `x` will resolve to the root method else - seterror!(x, CannotDefineFuncAlreadyHasValue) + seterror!(x, CannotDefineFuncAlreadyHasValue, meta_dict) end else scope.names[name] = b - if !hasref(b.name) - setref!(b.name, b) + if !hasref(b.name, meta_dict) + setref!(b.name, b, meta_dict) end end if CSTParser.defines_struct(scope.expr) && parentof(scope) isa Scope # hoist binding for inner constructor to parent scope - return add_binding(x, state, parentof(scope)) + return add_binding(x, state, meta_dict, parentof(scope)) end end elseif scopehasbinding(scope, name) # TODO: some checks about rebinding of consts - check_const_decl(name, b, scope) + check_const_decl(name, b, scope, meta_dict) scope.names[name] = b elseif is_soft_scope(scope) && parentof(scope) isa Scope && isidentifier(b.name) && scopehasbinding(parentof(scope), valofid(b.name)) && !enforce_hard_scope(x, scope) - add_binding(x, state, scope.parent) + add_binding(x, state, meta_dict, scope.parent) else scope.names[name] = b end - infer_type(b, scope, state) - elseif bindingof(x) isa SymbolServer.SymStore - scope.names[valofid(x)] = bindingof(x) + infer_type(b, scope, state, meta_dict) + elseif bindingof(x, meta_dict) isa SymbolServer.SymStore + scope.names[valofid(x)] = bindingof(x, meta_dict) end end @@ -401,9 +395,9 @@ function mark_globals(x::EXPR, state) end end -function name_extends_imported_method(b::Binding) +function name_extends_imported_method(b::Binding, meta_dict) if CoreTypes.isfunction(b.type) && CSTParser.hasparent(b.name) && CSTParser.is_getfield(parentof(b.name)) - if refof_maybe_getfield(parentof(b.name)[1]) !== nothing + if refof_maybe_getfield(parentof(b.name)[1], meta_dict) !== nothing end end diff --git a/src/StaticLint/imports.jl b/src/StaticLint/imports.jl index f665a85..54ca6df 100644 --- a/src/StaticLint/imports.jl +++ b/src/StaticLint/imports.jl @@ -1,14 +1,12 @@ -function resolve_import_block(x::EXPR, state::State, root, usinged, markfinal=true) +function resolve_import_block(x::EXPR, state::State, root, usinged, meta_dict, markfinal=true) if x.head == :as - resolve_import_block(x.args[1], state, root, usinged, markfinal) - if x.args[2].meta === nothing - x.args[2].meta = Meta() - end - if hasbinding(last(x.args[1].args)) && CSTParser.isidentifier(x.args[2]) - lhsbinding = bindingof(last(x.args[1].args)) - x.args[2].meta.binding = Binding(x.args[2], lhsbinding.val, lhsbinding.type, lhsbinding.refs) - setref!(x.args[2], bindingof(x.args[2])) - last(x.args[1].args).meta.binding = nothing + resolve_import_block(x.args[1], state, root, usinged, markfinal, meta_dict) + ensuremeta(x.args[2], meta_dict) + if hasbinding(last(x.args[1].args), meta_dict) && CSTParser.isidentifier(x.args[2]) + lhsbinding = bindingof(last(x.args[1].args), meta_dict) + getameta(x.args[2], meta_dict).binding = Binding(x.args[2], lhsbinding.val, lhsbinding.type, lhsbinding.refs) + setref!(x.args[2], bindingof(x.args[2], meta_dict), meta_dict) + getmeta(last(x.args[1].args), meta_dict).binding = nothing end return end @@ -23,11 +21,11 @@ function resolve_import_block(x::EXPR, state::State, root, usinged, markfinal=tr root = parentof(root) else # Too many dots - seterror!(arg, RelativeImportTooManyDots) + seterror!(arg, RelativeImportTooManyDots, meta_dict) return end elseif isidentifier(arg) || (i == n && (CSTParser.ismacroname(arg) || isoperator(arg))) - cand = hasref(arg) ? refof(arg) : _get_field(root, arg, state) + cand = hasref(arg, meta_dict) ? refof(arg, meta_dict) : _get_field(root, arg, state, meta_dict) if cand === nothing # Cannot resolve now (e.g. sibling not yet defined). Schedule a retry. if state isa Toplevel @@ -43,10 +41,10 @@ function resolve_import_block(x::EXPR, state::State, root, usinged, markfinal=tr return end root = maybe_lookup(cand, state) - setref!(arg, root) + setref!(arg, root, meta_dict) if i == n - markfinal && _mark_import_arg(arg, root, state, usinged) - return refof(arg) + markfinal && _mark_import_arg(arg, root, state, usinged, meta_dict) + return refof(arg, meta_dict) end else return @@ -54,11 +52,11 @@ function resolve_import_block(x::EXPR, state::State, root, usinged, markfinal=tr end end -function resolve_import(x::EXPR, state::State, root=getsymbols(state)) +function resolve_import(x::EXPR, state::State, meta_dict, root=getsymbols(state)) if (headof(x) === :using || headof(x) === :import) usinged = (headof(x) === :using) if length(x.args) > 0 && isoperator(headof(x.args[1])) && valof(headof(x.args[1])) == ":" - root2 = resolve_import_block(x.args[1].args[1], state, root, false, false) + root2 = resolve_import_block(x.args[1].args[1], state, root, false, meta_dict, false) if root2 === nothing # schedule a retry like above if state isa Toplevel @@ -69,17 +67,17 @@ function resolve_import(x::EXPR, state::State, root=getsymbols(state)) return end for i = 2:length(x.args[1].args) - resolve_import_block(x.args[1].args[i], state, root2, usinged) + resolve_import_block(x.args[1].args[i], state, root2, usinged, meta_dict) end else for i = 1:length(x.args) - resolve_import_block(x.args[i], state, root, usinged) + resolve_import_block(x.args[i], state, root, usinged, meta_dict) end end end end -function _mark_import_arg(arg, par, state, usinged) +function _mark_import_arg(arg, par, state, usinged, meta_dict) if par !== nothing && CSTParser.is_id_or_macroname(arg) if par isa Binding # mark reference to binding push!(par.refs, arg) @@ -88,12 +86,10 @@ function _mark_import_arg(arg, par, state, usinged) par = SymbolServer._lookup(par, getsymbols(state), true) !(par isa SymbolServer.SymStore) && return end - if bindingof(arg) === nothing - if !hasmeta(arg) - arg.meta = Meta() - end - arg.meta.binding = Binding(arg, par, _typeof(par, state), []) - setref!(arg, bindingof(arg)) + if bindingof(arg, meta_dict) === nothing + ensuremeta(arg, meta_dict) + getmeta(arg, meta_dict).binding = Binding(arg, par, _typeof(par, state), []) + setref!(arg, bindingof(arg, meta_dict), meta_dict) end if usinged @@ -102,24 +98,24 @@ function _mark_import_arg(arg, par, state, usinged) elseif par isa Binding && par.val isa SymbolServer.ModuleStore add_to_imported_modules(state.scope, Symbol(valofid(arg)), par.val) elseif par isa Binding && par.val isa EXPR && CSTParser.defines_module(par.val) - add_to_imported_modules(state.scope, Symbol(valofid(arg)), scopeof(par.val)) + add_to_imported_modules(state.scope, Symbol(valofid(arg)), scopeof(par.val, meta_dict)) elseif par isa Binding && par.val isa Binding && par.val.val isa EXPR && CSTParser.defines_module(par.val.val) - add_to_imported_modules(state.scope, Symbol(valofid(arg)), scopeof(par.val.val)) + add_to_imported_modules(state.scope, Symbol(valofid(arg)), scopeof(par.val.val, meta_dict)) end else # import binds the name in the current scope - state.scope.names[valofid(arg)] = bindingof(arg) + state.scope.names[valofid(arg)] = bindingof(arg, meta_dict) end end end function has_workspace_package(server, name) haskey(server.workspacepackages, name) && - hasscope(getcst(server.workspacepackages[name])) && - haskey(scopeof(getcst(server.workspacepackages[name])).names, name) && - scopeof(getcst(server.workspacepackages[name])).names[name] isa Binding && - scopeof(getcst(server.workspacepackages[name])).names[name].val isa EXPR && - CSTParser.defines_module(scopeof(getcst(server.workspacepackages[name])).names[name].val) + hasscope(getcst(server.workspacepackages[name]), meta_dict) && + haskey(scopeof(getcst(server.workspacepackages[name]), meta_dict).names, name) && + scopeof(getcst(server.workspacepackages[name]), meta_dict).names[name] isa Binding && + scopeof(getcst(server.workspacepackages[name]), meta_dict).names[name].val isa EXPR && + CSTParser.defines_module(scopeof(getcst(server.workspacepackages[name]), meta_dict).names[name].val) end function add_to_imported_modules(scope::Scope, name::Symbol, val) @@ -145,11 +141,11 @@ function get_named_toplevel_module(s::Scope, name::String) end return nothing end -function _get_field(par, arg, state) +function _get_field(par, arg, state, meta_dict) arg_str_rep = CSTParser.str_value(arg) if par isa SymbolServer.EnvStore - if (arg_scope = retrieve_scope(arg)) !== nothing && (tlm = get_named_toplevel_module(arg_scope, arg_str_rep)) !== nothing && hasbinding(tlm) - return bindingof(tlm) + if (arg_scope = retrieve_scope(arg, meta_dict)) !== nothing && (tlm = get_named_toplevel_module(arg_scope, arg_str_rep)) !== nothing && hasbinding(tlm, meta_dict) + return bindingof(tlm, meta_dict) # elseif has_workspace_package(state.server, arg_str_rep) # return scopeof(getcst(state.server.workspacepackages[arg_str_rep])).names[arg_str_rep] elseif haskey(par, Symbol(arg_str_rep)) @@ -187,17 +183,17 @@ function _get_field(par, arg, state) end elseif par isa Binding if par.val isa Binding - return _get_field(par.val, arg, state) - elseif par.val isa EXPR && CSTParser.defines_module(par.val) && scopeof(par.val) isa Scope - return _get_field(scopeof(par.val), arg, state) + return _get_field(par.val, arg, state, meta_dict) + elseif par.val isa EXPR && CSTParser.defines_module(par.val) && scopeof(par.val, meta_dict) isa Scope + return _get_field(scopeof(par.val, meta_dict), arg, state, meta_dict) elseif par.val isa EXPR && isassignment(par.val) - if hasref(par.val.args[2]) - return _get_field(refof(par.val.args[2]), arg, state) + if hasref(par.val.args[2], meta_dict) + return _get_field(refof(par.val.args[2], meta_dict), arg, state, meta_dict) elseif is_getfield_w_quotenode(par.val.args[2]) - return _get_field(refof_maybe_getfield(par.val.args[2]), arg, state) + return _get_field(refof_maybe_getfield(par.val.args[2], meta_dict), arg, state, meta_dict) end elseif par.val isa SymbolServer.ModuleStore - return _get_field(par.val, arg, state) + return _get_field(par.val, arg, state, meta_dict) end end return diff --git a/src/StaticLint/interface.jl b/src/StaticLint/interface.jl deleted file mode 100644 index 94ff4ee..0000000 --- a/src/StaticLint/interface.jl +++ /dev/null @@ -1,57 +0,0 @@ -function setup_server(env = dirname(SymbolServer.Pkg.Types.Context().env.project_file), depot = first(SymbolServer.Pkg.depots()), cache = joinpath(dirname(pathof(SymbolServer)), "..", "store")) - server = StaticLint.FileServer() - ssi = SymbolServerInstance(depot, cache) - _, symbols = SymbolServer.getstore(ssi, env) - extended_methods = SymbolServer.collect_extended_methods(symbols) - server.external_env = ExternalEnv(symbols, extended_methods, Symbol[]) - server -end - -""" - lint_string(s, server; gethints = false) - -Parse a string and run a semantic pass over it. This will mark scopes, bindings, -references, and lint hints. An annotated `EXPR` is returned or, if `gethints = true`, -it is paired with a collected list of errors/hints. -""" -function lint_string(s::String, server = setup_server(); gethints = false) - empty!(server.files) - f = File("", s, CSTParser.parse(s, true), nothing, server) - env = getenv(f, server) - setroot(f, f) - setfile(server, "", f) - semantic_pass(f) - check_all(f.cst, LintOptions(), env) - if gethints - return f.cst, [(x, string(haserror(x) ? LintCodeDescriptions[x.meta.error] : "Missing reference", " at offset ", offset)) for (offset, x) in collect_hints(f.cst, env)] - else - return f.cst - end -end - -""" - lint_file(rootpath, server) - -Read a file from disc, parse and run a semantic pass over it. The file should be the -root of a project, e.g. for this package that file is `src/StaticLint.jl`. Other files -in the project will be loaded automatically (calls to `include` with complicated arguments -are not handled, see `followinclude` for details). A `FileServer` will be returned -containing the `File`s of the package. -""" -function lint_file(rootpath, server = setup_server(); gethints = false) - empty!(server.files) - root = loadfile(server, rootpath) - semantic_pass(root) - for f in values(server.files) - check_all(f.cst, LintOptions(), getenv(f, server)) - end - if gethints - hints = [] - for (p,f) in server.files - append!(hints, [(x, string(haserror(x) ? LintCodeDescriptions[x.meta.error] : "Missing reference", " at offset ", offset, " of ", p)) for (offset, x) in collect_hints(f.cst, getenv(f, server))]) - end - return root, hints - else - return root - end -end diff --git a/src/StaticLint/linting/checks.jl b/src/StaticLint/linting/checks.jl index 9d89981..c6fcaad 100644 --- a/src/StaticLint/linting/checks.jl +++ b/src/StaticLint/linting/checks.jl @@ -72,13 +72,11 @@ const LintCodeDescriptions = Dict{LintCodes,String}( ) haserror(m::Meta) = m.error !== nothing -haserror(x::EXPR) = hasmeta(x) && haserror(x.meta) -errorof(x::EXPR) = hasmeta(x) ? x.meta.error : nothing -function seterror!(x::EXPR, e) - if !hasmeta(x) - x.meta = Meta() - end - x.meta.error = e +haserror(x::EXPR, meta_dict) = hasmeta(x, meta_dict) && haserror(getmeta(x, meta_dict)) +errorof(x::EXPR, meta_dict) = hasmeta(x, meta_dict) ? getmeta(x, meta_dict).error : nothing +function seterror!(x::EXPR, e, meta_dict) + ensuremeta(x, meta_dict) + getmeta(x, meta_dict).error = e end const default_options = (true, true, true, true, true, true, true, true, true, true) @@ -101,26 +99,26 @@ LintOptions(::Colon) = LintOptions(fill(true, length(default_options))...) LintOptions(options::Vararg{Union{Bool,Nothing},length(default_options)}) = LintOptions(something.(options, default_options)...) -function check_all(x::EXPR, opts::LintOptions, env::ExternalEnv) +function check_all(x::EXPR, opts::LintOptions, env::ExternalEnv, meta_dict) # Do checks - opts.call && check_call(x, env) - opts.iter && check_loop_iter(x, env) - opts.nothingcomp && check_nothing_equality(x, env) - opts.constif && check_if_conds(x) - opts.lazy && check_lazy(x) - opts.datadecl && check_datatype_decl(x, env) - opts.typeparam && check_typeparams(x) - opts.modname && check_modulename(x) - opts.pirates && check_for_pirates(x) - opts.useoffuncargs && check_farg_unused(x) - check_kw_default(x, env) - check_use_of_literal(x) - check_break_continue(x) - check_const(x) + opts.call && check_call(x, env, meta_dict) + opts.iter && check_loop_iter(x, env, meta_dict) + opts.nothingcomp && check_nothing_equality(x, env, meta_dict) + opts.constif && check_if_conds(x, meta_dict) + opts.lazy && check_lazy(x, meta_dict) + opts.datadecl && check_datatype_decl(x, env, meta_dict) + opts.typeparam && check_typeparams(x, meta_dict) + opts.modname && check_modulename(x, meta_dict) + opts.pirates && check_for_pirates(x, meta_dict) + opts.useoffuncargs && check_farg_unused(x, meta_dict) + check_kw_default(x, env, meta_dict) + check_use_of_literal(x, meta_dict) + check_break_continue(x, meta_dict) + check_const(x, meta_dict) if x.args !== nothing for i in 1:length(x.args) - check_all(x.args[i], opts, env) + check_all(x.args[i], opts, env, meta_dict) end end end @@ -277,28 +275,28 @@ end is_something_with_methods(x::T) where T <: Union{SymbolServer.FunctionStore,SymbolServer.DataTypeStore} = true is_something_with_methods(x) = false -function check_call(x, env::ExternalEnv) +function check_call(x, env::ExternalEnv, meta_dict) if iscall(x) parentof(x) isa EXPR && headof(parentof(x)) === :do && return # TODO: add number of args specified in do block. length(x.args) == 0 && return # find the function we're dealing with - func_ref = refof_call_func(x) + func_ref = refof_call_func(x, meta_dict) func_ref === nothing && return if is_something_with_methods(func_ref) && !(func_ref isa Binding && func_ref.val isa EXPR && func_ref.val.head === :macro) # intentionally empty if func_ref isa Binding && func_ref.val isa EXPR && isassignment(func_ref.val) && isidentifier(func_ref.val.args[1]) && isidentifier(func_ref.val.args[2]) # if func_ref is a shadow binding (for these purposes, an assignment that just changes the name of a mehtod), redirect to the rhs of the assignment. - func_ref = refof(func_ref.val.args[2]) + func_ref = refof(func_ref.val.args[2], meta_dict) end else return end call_counts = call_nargs(x) - tls = retrieve_toplevel_scope(x) + tls = retrieve_toplevel_scope(x, meta_dict) tls === nothing && return @warn "Couldn't get top-level scope." # General check, this means something has gone wrong. func_ref === nothing && return - !sig_match_any(func_ref, x, call_counts, tls, env) && seterror!(x, IncorrectCallArgs) + !sig_match_any(func_ref, x, call_counts, tls, env) && seterror!(x, IncorrectCallArgs, meta_dict) end end @@ -356,52 +354,52 @@ get_method(x) = nothing isdocumented(x::EXPR) = parentof(x) isa EXPR && CSTParser.ismacrocall(parentof(x)) && headof(parentof(x).args[1]) === :globalrefdoc -function check_loop_iter(x::EXPR, env::ExternalEnv) +function check_loop_iter(x::EXPR, env::ExternalEnv, meta_dict) if headof(x) === :for if length(x.args) > 1 body = x.args[2] if headof(x.args[1]) === :block && x.args[1].args !== nothing for arg in x.args[1].args - check_incorrect_iter_spec(arg, body, env) + check_incorrect_iter_spec(arg, body, env, meta_dict) end else - check_incorrect_iter_spec(x.args[1], body, env) + check_incorrect_iter_spec(x.args[1], body, env, meta_dict) end end elseif headof(x) === :generator body = x.args[1] for i = 2:length(x.args) - check_incorrect_iter_spec(x.args[i], body, env) + check_incorrect_iter_spec(x.args[i], body, env, meta_dict) end end end -function check_incorrect_iter_spec(x, body, env) +function check_incorrect_iter_spec(x, body, env, meta_dict) if x.args !== nothing && CSTParser.is_range(x) rng = rhs_of_iterator(x) - if headof(rng) === :FLOAT || headof(rng) === :INTEGER || (iscall(rng) && refof(rng.args[1]) === getsymbols(env)[:Base][:length]) - seterror!(x, IncorrectIterSpec) + if headof(rng) === :FLOAT || headof(rng) === :INTEGER || (iscall(rng) && refof(rng.args[1], meta_dict) === getsymbols(env)[:Base][:length]) + seterror!(x, IncorrectIterSpec, meta_dict) elseif iscall(rng) && valof(rng.args[1]) == ":" && length(rng.args) === 3 && headof(rng.args[2]) === :INTEGER && iscall(rng.args[3]) && length(rng.args[3].args) > 1 && ( - refof(rng.args[3].args[1]) === getsymbols(env)[:Base][:length] || - refof(rng.args[3].args[1]) === getsymbols(env)[:Base][:size] + refof(rng.args[3].args[1], meta_dict) === getsymbols(env)[:Base][:length] || + refof(rng.args[3].args[1], meta_dict) === getsymbols(env)[:Base][:size] ) if length(x.args) >= 1 lhs = x.args[1] arr = rng.args[3].args[2] - b = refof(arr) + b = refof(arr, meta_dict) # 1:length(arr) indexing is ok for Vector and Array specifically if b isa Binding && (CoreTypes.isarray(b.type) || CoreTypes.isvector(b.type)) return end if !all_underscore(valof(lhs)) - if check_is_used_in_getindex(body, lhs, arr) - seterror!(x, IndexFromLength) + if check_is_used_in_getindex(body, lhs, arr, meta_dict) + seterror!(x, IndexFromLength, meta_dict) end end end @@ -409,13 +407,13 @@ function check_incorrect_iter_spec(x, body, env) end end -function check_is_used_in_getindex(expr, lhs, arr) +function check_is_used_in_getindex(expr, lhs, arr, meta_dict) if headof(expr) === :ref && expr.args !== nothing && length(expr.args) > 1 this_arr = expr.args[1] - if hasref(this_arr) && hasref(arr) && refof(this_arr) == refof(arr) + if hasref(this_arr, meta_dict) && hasref(arr, meta_dict) && refof(this_arr, meta_dict) == refof(arr, meta_dict) for index_arg in expr.args[2:end] - if hasref(index_arg) && hasref(lhs) && refof(index_arg) == refof(lhs) - seterror!(expr, IndexFromLength) + if hasref(index_arg, meta_dict) && hasref(lhs, meta_dict) && refof(index_arg, meta_dict) == refof(lhs, meta_dict) + seterror!(expr, IndexFromLength, meta_dict) return true end end @@ -423,32 +421,32 @@ function check_is_used_in_getindex(expr, lhs, arr) end if expr.args !== nothing for arg in expr.args - check_is_used_in_getindex(arg, lhs, arr) && return true + check_is_used_in_getindex(arg, lhs, arr, meta_dict) && return true end end return false end -function check_nothing_equality(x::EXPR, env::ExternalEnv) +function check_nothing_equality(x::EXPR, env::ExternalEnv, meta_dict) if isbinarycall(x) && length(x.args) == 3 _nothing = getsymbols(env)[:Core][:nothing] if valof(x.args[1]) == "==" && ( - (valof(x.args[2]) == "nothing" && refof(x.args[2]) == _nothing) || - (valof(x.args[3]) == "nothing" && refof(x.args[3]) == _nothing) + (valof(x.args[2]) == "nothing" && refof(x.args[2], meta_dict) == _nothing) || + (valof(x.args[3]) == "nothing" && refof(x.args[3], meta_dict) == _nothing) ) - seterror!(x.args[1], NothingEquality) + seterror!(x.args[1], NothingEquality, meta_dict) elseif valof(x.args[1]) == "!=" && ( - (valof(x.args[2]) == "nothing" && refof(x.args[2]) == _nothing) || - (valof(x.args[3]) == "nothing" && refof(x.args[3]) == _nothing) + (valof(x.args[2]) == "nothing" && refof(x.args[2], meta_dict) == _nothing) || + (valof(x.args[3]) == "nothing" && refof(x.args[3], meta_dict) == _nothing) ) - seterror!(x.args[1], NothingNotEq) + seterror!(x.args[1], NothingNotEq, meta_dict) end end end function _get_top_binding(x::EXPR, name::String) - if scopeof(x) isa Scope - return _get_top_binding(scopeof(x), name) + if scopeof(x, meta_dict) isa Scope + return _get_top_binding(scopeof(x, meta_dict), name) elseif parentof(x) isa EXPR return _get_top_binding(parentof(x), name) else @@ -474,26 +472,26 @@ function _get_global_scope(s::Scope) end end -function check_if_conds(x::EXPR) +function check_if_conds(x::EXPR, meta_dict) if headof(x) === :if cond = x.args[1] if headof(cond) === :TRUE || headof(cond) === :FALSE - seterror!(cond, ConstIfCondition) + seterror!(cond, ConstIfCondition, meta_dict) elseif isassignment(cond) - seterror!(cond, EqInIfConditional) + seterror!(cond, EqInIfConditional, meta_dict) end end end -function check_lazy(x::EXPR) +function check_lazy(x::EXPR, meta_dict) if isbinarysyntax(x) if valof(headof(x)) == "||" if headof(x.args[1]) === :TRUE || headof(x.args[1]) === :FALSE - seterror!(x, PointlessOR) + seterror!(x, PointlessOR, meta_dict) end elseif valof(headof(x)) == "&&" if headof(x.args[1]) === :TRUE || headof(x.args[1]) === :FALSE || headof(x.args[2]) === :TRUE || headof(x.args[2]) === :FALSE - seterror!(x, PointlessAND) + seterror!(x, PointlessAND, meta_dict) end end end @@ -519,30 +517,30 @@ function is_never_datatype(b::Binding, env::ExternalEnv) return false end -function check_datatype_decl(x::EXPR, env::ExternalEnv) +function check_datatype_decl(x::EXPR, env::ExternalEnv, meta_dict) # Only call in function signatures? if isdeclaration(x) && parentof(x) isa EXPR && iscall(parentof(x)) - if (dt = refof_maybe_getfield(last(x.args))) !== nothing + if (dt = refof_maybe_getfield(last(x.args), meta_dict)) !== nothing if is_never_datatype(dt, env) - seterror!(x, InvalidTypeDeclaration) + seterror!(x, InvalidTypeDeclaration, meta_dict) end elseif CSTParser.isliteral(last(x.args)) - seterror!(x, InvalidTypeDeclaration) + seterror!(x, InvalidTypeDeclaration, meta_dict) end end end -function check_modulename(x::EXPR) +function check_modulename(x::EXPR, meta_dict) if CSTParser.defines_module(x) && # x is a module - scopeof(x) isa Scope && parentof(scopeof(x)) isa Scope && # it has a scope and a parent scope - CSTParser.defines_module(parentof(scopeof(x)).expr) && # the parent scope is a module - valof(CSTParser.get_name(x)) == valof(CSTParser.get_name(parentof(scopeof(x)).expr)) # their names match - seterror!(CSTParser.get_name(x), InvalidModuleName) + scopeof(x, meta_dict) isa Scope && parentof(scopeof(x, meta_dict)) isa Scope && # it has a scope and a parent scope + CSTParser.defines_module(parentof(scopeof(x, meta_dict)).expr) && # the parent scope is a module + valof(CSTParser.get_name(x)) == valof(CSTParser.get_name(parentof(scopeof(x, meta_dict)).expr)) # their names match + seterror!(CSTParser.get_name(x), InvalidModuleName, meta_dict) end end # Check whether function arguments are unused -function check_farg_unused(x::EXPR) +function check_farg_unused(x::EXPR, meta_dict) if CSTParser.defines_function(x) sig = CSTParser.rem_wheres_decls(CSTParser.get_sig(x)) if (headof(x) === :function && length(x.args) == 2 && x.args[2] isa EXPR && length(x.args[2].args) == 1 && CSTParser.isliteral(x.args[2].args[1])) || @@ -555,18 +553,18 @@ function check_farg_unused(x::EXPR) arg = sig.args[i] if arg.head === :parameters for arg2 in arg.args - !check_farg_unused_(arg2, arg_names) && return + !check_farg_unused_(arg2, arg_names, meta_dict) && return end else - !check_farg_unused_(arg, arg_names) && return + !check_farg_unused_(arg, arg_names, meta_dict) && return end end end end end -function check_farg_unused_(arg, arg_names) - if !hasbinding(arg) +function check_farg_unused_(arg, arg_names, meta_dict) + if !hasbinding(arg, meta_dict) if iskwarg(arg) arg = arg.args[1] end @@ -574,10 +572,10 @@ function check_farg_unused_(arg, arg_names) arg = unwrap_nospecialize(arg) end end - if !hasbinding(arg) + if !hasbinding(arg, meta_dict) return false end - b = bindingof(arg) + b = bindingof(arg, meta_dict) # We don't care about these valof(b.name) isa String && all_underscore(valof(b.name)) && return false @@ -588,13 +586,13 @@ function check_farg_unused_(arg, arg_names) # only self ref: (length(b.refs) == 1 && first(b.refs) == b.name) || # first usage has binding: - (length(b.refs) > 1 && b.refs[2] isa EXPR && hasbinding(b.refs[2])) - seterror!(arg, UnusedFunctionArgument) + (length(b.refs) > 1 && b.refs[2] isa EXPR && hasbinding(b.refs[2], meta_dict)) + seterror!(arg, UnusedFunctionArgument, meta_dict) end if valof(b.name) === nothing elseif valof(b.name) in arg_names - seterror!(arg, DuplicateFuncArgName) + seterror!(arg, DuplicateFuncArgName, meta_dict) else push!(arg_names, valof(b.name)) end @@ -618,7 +616,7 @@ collect_hints(x::EXPR, env, missingrefs = :all, isquoted = false, errs = Tuple{I Collect hints and errors from an expression. `missingrefs` = (:none, :id, :all) determines whether unresolved identifiers are marked, the :all option will mark identifiers used in getfield calls." """ -function collect_hints(x::EXPR, env, missingrefs=:all, isquoted=false, errs=Tuple{Int,EXPR}[], pos=0) +function collect_hints(x::EXPR, env, meta_dict, missingrefs=:all, isquoted=false, errs=Tuple{Int,EXPR}[], pos=0) if quoted(x) isquoted = true elseif isquoted && unquoted(x) @@ -628,51 +626,51 @@ function collect_hints(x::EXPR, env, missingrefs=:all, isquoted=false, errs=Tupl # collect parse errors push!(errs, (pos, x)) elseif !isquoted - if missingrefs != :none && isidentifier(x) && !hasref(x) && + if missingrefs != :none && isidentifier(x) && !hasref(x, meta_dict) && !(valof(x) == "var" && parentof(x) isa EXPR && isnonstdid(parentof(x))) && !((valof(x) == "stdcall" || valof(x) == "cdecl" || valof(x) == "fastcall" || valof(x) == "thiscall" || valof(x) == "llvmcall") && is_in_fexpr(x, x -> iscall(x) && isidentifier(x.args[1]) && valof(x.args[1]) == "ccall")) push!(errs, (pos, x)) - elseif haserror(x) && errorof(x) isa StaticLint.LintCodes + elseif haserror(x, meta_dict) && errorof(x, meta_dict) isa StaticLint.LintCodes # collect lint hints push!(errs, (pos, x)) end - elseif isquoted && missingrefs == :all && should_mark_missing_getfield_ref(x, env) + elseif isquoted && missingrefs == :all && should_mark_missing_getfield_ref(x, env, meta_dict) push!(errs, (pos, x)) end for i in 1:length(x) - collect_hints(x[i], env, missingrefs, isquoted, errs, pos) + collect_hints(x[i], env, meta_dict, missingrefs, isquoted, errs, pos) pos += x[i].fullspan end errs end -function refof_maybe_getfield(x::EXPR) +function refof_maybe_getfield(x::EXPR, meta_dict) if isidentifier(x) - return refof(x) + return refof(x, meta_dict) elseif is_getfield_w_quotenode(x) - return refof(x.args[2].args[1]) + return refof(x.args[2].args[1], meta_dict) end end -function should_mark_missing_getfield_ref(x, env) - if isidentifier(x) && !hasref(x) && # x has no ref +function should_mark_missing_getfield_ref(x, env, meta_dict) + if isidentifier(x) && !hasref(x, meta_dict) && # x has no ref parentof(x) isa EXPR && headof(parentof(x)) === :quotenode && parentof(parentof(x)) isa EXPR && is_getfield(parentof(parentof(x))) # x is the rhs of a getproperty - lhsref = refof_maybe_getfield(parentof(parentof(x)).args[1]) - hasref(x) && return false # We've resolved + lhsref = refof_maybe_getfield(parentof(parentof(x)).args[1], meta_dict) + hasref(x, meta_dict) && return false # We've resolved if lhsref isa SymbolServer.ModuleStore || (lhsref isa Binding && lhsref.val isa SymbolServer.ModuleStore) # a module, we should know this. return true elseif lhsref isa Binding # by-use type inference runs after we've resolved references so we may not have known lhsref's type first time round, lets try and find `x` again - resolve_getfield(x, lhsref, ResolveOnly(retrieve_scope(x), env, nothing)) # FIXME: Setting `server` to nothing might be sketchy? - hasref(x) && return false # We've resolved + resolve_getfield(x, lhsref, ResolveOnly(retrieve_scope(x, meta_dict), env), meta_dict) # FIXME: Setting `server` to nothing might be sketchy? + hasref(x, meta_dict) && return false # We've resolved if lhsref.val isa Binding lhsref = lhsref.val end - lhsref = get_root_method(lhsref, nothing) + lhsref = get_root_method(lhsref) if lhsref isa EXPR # Not clear what is happening here. return false @@ -680,8 +678,8 @@ function should_mark_missing_getfield_ref(x, env) return true elseif lhsref.type isa Binding && lhsref.type.val isa EXPR && CSTParser.defines_struct(lhsref.type.val) && !has_getproperty_method(lhsref.type) # We may have infered the lhs type after the semantic pass that was resolving references. Copied from `resolve_getfield(x::EXPR, parent_type::EXPR, state::State)::Bool`. - if scopehasbinding(scopeof(lhsref.type.val), valof(x)) - setref!(x, scopeof(lhsref.type.val).names[valof(x)]) + if scopehasbinding(scopeof(lhsref.type.val, meta_dict), valof(x)) + setref!(x, scopeof(lhsref.type.val, meta_dict).names[valof(x)], meta_dict) return false end return true @@ -740,32 +738,32 @@ end isunionfaketype(t::SymbolServer.FakeTypeName) = t.name.name === :Union && t.name.parent isa SymbolServer.VarRef && t.name.parent.name === :Core -function check_typeparams(x::EXPR) +function check_typeparams(x::EXPR, meta_dict) if iswhere(x) for i in 2:length(x.args) a = x.args[i] - if hasbinding(a) && (bindingof(a).refs === nothing || length(bindingof(a).refs) < 2) - seterror!(a, UnusedTypeParameter) + if hasbinding(a, meta_dict) && (bindingof(a, meta_dict).refs === nothing || length(bindingof(a, meta_dict).refs) < 2) + seterror!(a, UnusedTypeParameter, meta_dict) end end end end -function check_for_pirates(x::EXPR) +function check_for_pirates(x::EXPR, meta_dict) if CSTParser.defines_function(x) sig = CSTParser.rem_where_decl(CSTParser.get_sig(x)) fname = CSTParser.get_name(sig) if fname_is_noteq(fname) - seterror!(x, NotEqDef) - elseif iscall(sig) && hasbinding(x) && overwrites_imported_function(refof(fname)) + seterror!(x, NotEqDef, meta_dict) + elseif iscall(sig) && hasbinding(x, meta_dict) && overwrites_imported_function(refof(fname, meta_dict)) for i = 2:length(sig.args) - if hasbinding(sig.args[i]) && bindingof(sig.args[i]).type isa Binding + if hasbinding(sig.args[i], meta_dict) && bindingof(sig.args[i], meta_dict).type isa Binding return elseif refers_to_nonimported_type(sig.args[i]) return end end - seterror!(x, TypePiracy) + seterror!(x, TypePiracy, meta_dict) end end end @@ -783,7 +781,7 @@ end function refers_to_nonimported_type(arg::EXPR) arg = CSTParser.rem_wheres(arg) - if hasref(arg) && refof(arg) isa Binding + if hasref(arg, meta_dict) && refof(arg, meta_dict) isa Binding return true elseif isunarysyntax(arg) && (valof(headof(arg)) == "::" || valof(headof(arg)) == "<:") return refers_to_nonimported_type(arg.args[1]) @@ -811,29 +809,29 @@ end # Now called from add_binding # Should return true/false indicating whether the binding should actually be added? -function check_const_decl(name::String, b::Binding, scope) +function check_const_decl(name::String, b::Binding, scope, meta_dict) # assumes `scopehasbinding(scope, name)` - b.val isa Binding && return check_const_decl(name, b.val, scope) + b.val isa Binding && return check_const_decl(name, b.val, scope, meta_dict) if b.val isa EXPR && (CSTParser.defines_datatype(b.val) || is_const(bind)) - seterror!(b.val, CannotDeclareConst) + seterror!(b.val, CannotDeclareConst, meta_dict) else prev = scope.names[name] - if (CoreTypes.isdatatype(prev.type) && !is_mask_binding_of_datatype(prev)) || is_const(prev) + if (CoreTypes.isdatatype(prev.type) && !is_mask_binding_of_datatype(prev, meta_dict)) || is_const(prev) if b.val isa EXPR && prev.val isa EXPR && !in_same_if_branch(b.val, prev.val) return end if b.val isa EXPR - seterror!(b.val, InvalidRedefofConst) + seterror!(b.val, InvalidRedefofConst, meta_dict) else # TODO check what's going on here - seterror!(b.name, InvalidRedefofConst) + seterror!(b.name, InvalidRedefofConst, meta_dict) end end end end -function is_mask_binding_of_datatype(b::Binding) - b.val isa EXPR && CSTParser.isassignment(b.val) && (rhsref = refof(b.val.args[2])) !== nothing && (rhsref isa SymbolServer.DataTypeStore || (rhsref.val isa EXPR && rhsref.val isa SymbolServer.DataTypeStore) || (rhsref.val isa EXPR && CSTParser.defines_datatype(rhsref.val))) +function is_mask_binding_of_datatype(b::Binding, meta_dict) + b.val isa EXPR && CSTParser.isassignment(b.val) && (rhsref = refof(b.val.args[2], meta_dict)) !== nothing && (rhsref isa SymbolServer.DataTypeStore || (rhsref.val isa EXPR && rhsref.val isa SymbolServer.DataTypeStore) || (rhsref.val isa EXPR && CSTParser.defines_datatype(rhsref.val))) end # check whether a and b are in all the same :if blocks and in the same branches @@ -878,27 +876,27 @@ Check that the default value matches the type for keyword arguments. Following t checked: `String, Symbol, Int, Char, Bool, Float32, Float64, UInt8, UInt16, UInt32, UInt64, UInt128`. """ -function check_kw_default(x::EXPR, env::ExternalEnv) - if headof(x) == :kw && isdeclaration(x.args[1]) && CSTParser.isliteral(x.args[2]) && hasref(x.args[1].args[2]) - decl_T = get_eventual_datatype(refof(x.args[1].args[2]), env) +function check_kw_default(x::EXPR, env::ExternalEnv, meta_dict) + if headof(x) == :kw && isdeclaration(x.args[1]) && CSTParser.isliteral(x.args[2]) && hasref(x.args[1].args[2], meta_dict) + decl_T = get_eventual_datatype(refof(x.args[1].args[2], meta_dict), env) rhs = x.args[2] rhsval = valof(rhs) if decl_T == getsymbols(env)[:Core][:String] && !CSTParser.isstringliteral(rhs) - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) elseif decl_T == getsymbols(env)[:Core][:Symbol] && headof(rhs) !== :IDENTIFIER - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) elseif decl_T == getsymbols(env)[:Core][:Int] && headof(rhs) !== :INTEGER - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) elseif decl_T == getsymbols(env)[:Core][Sys.WORD_SIZE == 64 ? :Int64 : :Int32] && headof(rhs) !== :INTEGER - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) elseif decl_T == getsymbols(env)[:Core][:Bool] && !(headof(rhs) === :TRUE || headof(rhs) === :FALSE) - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) elseif decl_T == getsymbols(env)[:Core][:Char] && headof(rhs) !== :CHAR - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) elseif decl_T == getsymbols(env)[:Core][:Float64] && headof(rhs) !== :FLOAT - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) elseif decl_T == getsymbols(env)[:Core][:Float32] && !(headof(rhs) === :FLOAT && occursin("f", rhsval)) - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) else for T in (UInt8, UInt16, UInt32, UInt64, UInt128) if decl_T == getsymbols(env)[:Core][Symbol(T)] @@ -909,20 +907,20 @@ function check_kw_default(x::EXPR, env::ExternalEnv) ub = sizeof(T) lb = ub รท 2 if headof(rhs) == :BININT - 8lb < n <= 8ub || seterror!(rhs, KwDefaultMismatch) + 8lb < n <= 8ub || seterror!(rhs, KwDefaultMismatch, meta_dict) elseif headof(rhs) == :OCTINT - 3lb < n <= 3ub || seterror!(rhs, KwDefaultMismatch) + 3lb < n <= 3ub || seterror!(rhs, KwDefaultMismatch, meta_dict) elseif headof(rhs) == :HEXINT - 2lb < n <= 2ub || seterror!(rhs, KwDefaultMismatch) + 2lb < n <= 2ub || seterror!(rhs, KwDefaultMismatch, meta_dict) else - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) end end end # signed integers of non native size can't be declared as literal for T in (Int8, Int16, Sys.WORD_SIZE == 64 ? Int32 : Int64, Int128) if decl_T == getsymbols(env)[:Core][Symbol(T)] - seterror!(rhs, KwDefaultMismatch) + seterror!(rhs, KwDefaultMismatch, meta_dict) end end @@ -930,47 +928,47 @@ function check_kw_default(x::EXPR, env::ExternalEnv) end end -function check_use_of_literal(x::EXPR) +function check_use_of_literal(x::EXPR, meta_dict) if CSTParser.defines_module(x) && length(x.args) > 1 && isbadliteral(x.args[2]) - seterror!(x.args[2], InappropriateUseOfLiteral) + seterror!(x.args[2], InappropriateUseOfLiteral, meta_dict) elseif (CSTParser.defines_abstract(x) || CSTParser.defines_primitive(x)) && isbadliteral(x.args[1]) - seterror!(x.args[1], InappropriateUseOfLiteral) + seterror!(x.args[1], InappropriateUseOfLiteral, meta_dict) elseif CSTParser.defines_struct(x) && isbadliteral(x.args[2]) - seterror!(x.args[2], InappropriateUseOfLiteral) + seterror!(x.args[2], InappropriateUseOfLiteral, meta_dict) elseif (isassignment(x) || iskwarg(x)) && isbadliteral(x.args[1]) - seterror!(x.args[1], InappropriateUseOfLiteral) + seterror!(x.args[1], InappropriateUseOfLiteral, meta_dict) elseif isdeclaration(x) && isbadliteral(x.args[2]) - seterror!(x.args[2], InappropriateUseOfLiteral) + seterror!(x.args[2], InappropriateUseOfLiteral, meta_dict) elseif isbinarycall(x, "isa") && isbadliteral(x.args[3]) - seterror!(x.args[3], InappropriateUseOfLiteral) + seterror!(x.args[3], InappropriateUseOfLiteral, meta_dict) end end isbadliteral(x::EXPR) = CSTParser.isliteral(x) && (CSTParser.isstringliteral(x) || headof(x) === :INTEGER || headof(x) === :FLOAT || headof(x) === :CHAR || headof(x) === :TRUE || headof(x) === :FALSE) -function check_break_continue(x::EXPR) +function check_break_continue(x::EXPR, meta_dict) if iskeyword(x) && (headof(x) === :CONTINUE || headof(x) === :BREAK) && !is_in_fexpr(x, x -> headof(x) in (:for, :while)) - seterror!(x, ShouldBeInALoop) + seterror!(x, ShouldBeInALoop, meta_dict) end end -function check_const(x::EXPR) +function check_const(x::EXPR, meta_dict) if headof(x) === :const if VERSION < v"1.8.0-DEV.1500" && CSTParser.isassignment(x.args[1]) && CSTParser.isdeclaration(x.args[1].args[1]) - seterror!(x, TypeDeclOnGlobalVariable) + seterror!(x, TypeDeclOnGlobalVariable, meta_dict) elseif headof(x.args[1]) === :local - seterror!(x, UnsupportedConstLocalVariable) + seterror!(x, UnsupportedConstLocalVariable, meta_dict) end end end -function check_unused_binding(b::Binding, scope::Scope) +function check_unused_binding(b::Binding, scope::Scope, meta_dict, root_dict, rt) if headof(scope.expr) !== :struct && headof(scope.expr) !== :tuple && !all_underscore(valof(b.name)) - refs = loose_refs(b) + refs = loose_refs(b, meta_dict, root_dict, rt) if (isempty(refs) || length(refs) == 1 && refs[1] == b.name) && - !is_sig_arg(b.name) && !is_overwritten_in_loop(b.name) && - !is_overwritten_subsequently(b, scope) && !is_kw_of_macrocall(b) - seterror!(b.name, UnusedBinding) + !is_sig_arg(b.name) && !is_overwritten_in_loop(b.name, meta_dict) && + !is_overwritten_subsequently(b, scope, meta_dict, root_dict, rt) && !is_kw_of_macrocall(b) + seterror!(b.name, UnusedBinding, meta_dict) end end end @@ -986,7 +984,7 @@ function is_kw_of_macrocall(b::Binding) b.val isa EXPR && isassignment(b.val) && parentof(b.val) isa EXPR && CSTParser.ismacrocall(parentof(b.val)) end -function is_overwritten_in_loop(x) +function is_overwritten_in_loop(x, meta_dict) # Cuts out false positives for check_unused_binding - the linear nature of our # semantic passes mean a variable declared at the end of a loop's block but used at # the start won't appear to be referenced. @@ -999,7 +997,7 @@ function is_overwritten_in_loop(x) # Is this too expensive? loop = maybe_get_parent_fexpr(x, x -> x.head === :while || x.head === :for) if loop !== nothing - s = scopeof(loop) + s = scopeof(loop, meta_dict) if s isa Scope && parentof(s) isa Scope s2 = check_parent_scopes_for(s, valof(x)) if s2 isa Scope @@ -1038,7 +1036,7 @@ mutable struct ComesBefore result::Int end -function (state::ComesBefore)(x::EXPR) +function (state::ComesBefore)(x::EXPR, meta_dict, root_dict, rt) state.result > 0 && return if x == state.x1 state.result = 1 @@ -1047,8 +1045,8 @@ function (state::ComesBefore)(x::EXPR) state.result = 2 return end - if !hasscope(x) - traverse(x, state) + if !hasscope(x, meta_dict) + traverse(x, state, meta_dict, root_dict, rt) state.result > 0 && return end end @@ -1070,10 +1068,10 @@ end -function is_overwritten_subsequently(b::Binding, scope::Scope) +function is_overwritten_subsequently(b::Binding, scope::Scope, meta_dict, root_dict, rt) valof(b.name) === nothing && return false s = BoundAfter(b.name, valof(b.name), 0) - traverse(scope.expr, s) + traverse(scope.expr, s, meta_dict, root_dict, rt) return s.result == 2 end @@ -1088,15 +1086,15 @@ mutable struct BoundAfter result::Int end -function (state::BoundAfter)(x::EXPR) +function (state::BoundAfter)(x::EXPR, meta_dict, root_dict, rt) state.result > 1 && return if x == state.x1 state.result = 1 return end - if scopeof(x) isa Scope && haskey(scopeof(x).names, state.name) + if scopeof(x, meta_dict) isa Scope && haskey(scopeof(x, meta_dict).names, state.name) state.result = 2 return end - traverse(x, state) + traverse(x, state, meta_dict, root_dict, rt) end diff --git a/src/StaticLint/macros.jl b/src/StaticLint/macros.jl index b340e5f..0cbbf81 100644 --- a/src/StaticLint/macros.jl +++ b/src/StaticLint/macros.jl @@ -1,18 +1,18 @@ function handle_macro(@nospecialize(x), state) end -function handle_macro(x::EXPR, state) +function handle_macro(x::EXPR, state, meta_dict, root_dict, rt) !CSTParser.ismacrocall(x) && return if headof(x.args[1]) === :globalrefdoc if length(x.args) == 4 - if isidentifier(x.args[4]) && !resolve_ref(x.args[4], state) + if isidentifier(x.args[4]) && !resolve_ref(x.args[4], state, meta_dict) if state isa Toplevel push!(state.resolveonly, x) end elseif CSTParser.is_func_call(x.args[4]) sig = (x.args[4]) if sig isa EXPR - hasscope(sig) && return # We've already done this, don't repeat - setscope!(sig, Scope(sig)) - mark_sig_args!(sig) + hasscope(sig, meta_dict) && return # We've already done this, don't repeat + setscope!(sig, Scope(sig), meta_dict) + mark_sig_args!(sig, meta_dict) end if state isa Toplevel push!(state.resolveonly, x) @@ -20,70 +20,70 @@ function handle_macro(x::EXPR, state) end end elseif CSTParser.ismacroname(x.args[1]) - state(x.args[1]) - if _points_to_Base_macro(x.args[1], Symbol("@deprecate"), state) && length(x.args) == 4 - if bindingof(x.args[3]) !== nothing + state(x.args[1], meta_dict, root_dict, rt) + if _points_to_Base_macro(x.args[1], Symbol("@deprecate"), state, meta_dict) && length(x.args) == 4 + if bindingof(x.args[3], meta_dict) !== nothing return elseif CSTParser.is_func_call(x.args[3]) # add deprecated method # add deprecated function binding and args in new scope - mark_binding!(x.args[3], x) - mark_sig_args!(x.args[3]) + mark_binding!(x.args[3], meta_dict, x) + mark_sig_args!(x.args[3], meta_dict) s0 = state.scope # store previous scope state.scope = Scope(s0, x, Dict(), nothing, nothing) - setscope!(x, state.scope) # tag new scope to generating expression + setscope!(x, state.scope, meta_dict) # tag new scope to generating expression state(x.args[3]) state(x.args[4]) state.scope = s0 elseif isidentifier(x.args[3]) - mark_binding!(x.args[3], x) + mark_binding!(x.args[3], meta_dict, x) end - elseif _points_to_Base_macro(x.args[1], Symbol("@deprecate_binding"), state) && length(x.args) == 4 && isidentifier(x.args[3]) && isidentifier(x.args[4]) - setref!(x.args[3], refof(x.args[4])) - elseif _points_to_Base_macro(x.args[1], Symbol("@eval"), state) && length(x.args) == 3 && state isa Toplevel + elseif _points_to_Base_macro(x.args[1], Symbol("@deprecate_binding"), state, meta_dict) && length(x.args) == 4 && isidentifier(x.args[3]) && isidentifier(x.args[4]) + setref!(x.args[3], refof(x.args[4], meta_dict), meta_dict) + elseif _points_to_Base_macro(x.args[1], Symbol("@eval"), state, meta_dict) && length(x.args) == 3 && state isa Toplevel # Create scope around eval'ed expression. This ensures anybindings are # correctly hoisted to the top-level scope. - setscope!(x, Scope(x)) - setparent!(scopeof(x), state.scope) + setscope!(x, Scope(x), meta_dict) + setparent!(scopeof(x, meta_dict), state.scope) s0 = state.scope - state.scope = scopeof(x) + state.scope = scopeof(x, meta_dict) interpret_eval(x.args[3], state) state.scope = s0 - elseif _points_to_Base_macro(x.args[1], Symbol("@irrational"), state) && length(x.args) == 5 - mark_binding!(x.args[3], x) - elseif _points_to_Base_macro(x.args[1], Symbol("@enum"), state) + elseif _points_to_Base_macro(x.args[1], Symbol("@irrational"), state, meta_dict) && length(x.args) == 5 + mark_binding!(x.args[3], meta_dict, x) + elseif _points_to_Base_macro(x.args[1], Symbol("@enum"), state, meta_dict) for i = 3:length(x.args) - if bindingof(x.args[i]) !== nothing + if bindingof(x.args[i], meta_dict) !== nothing break end if i == 4 && headof(x.args[4]) === :block for j in 1:length(x.args[4].args) - mark_binding!(x.args[4].args[j], x) + mark_binding!(x.args[4].args[j], meta_dict, x) end break end - mark_binding!(x.args[i], x) + mark_binding!(x.args[i], meta_dict, x) end - elseif _points_to_Base_macro(x.args[1], Symbol("@goto"), state) + elseif _points_to_Base_macro(x.args[1], Symbol("@goto"), state, meta_dict) if length(x.args) == 3 && isidentifier(x.args[3]) - setref!(x.args[3], Binding(noname, nothing, nothing, EXPR[])) + setref!(x.args[3], Binding(noname, nothing, nothing, EXPR[]), meta_dict) end - elseif _points_to_Base_macro(x.args[1], Symbol("@label"), state) + elseif _points_to_Base_macro(x.args[1], Symbol("@label"), state, meta_dict) if length(x.args) == 3 && isidentifier(x.args[3]) - mark_binding!(x.args[3]) + mark_binding!(x.args[3], meta_dict) end - elseif _points_to_Base_macro(x.args[1], Symbol("@NamedTuple"), state) && length(x.args) > 2 && headof(x.args[3]) == :braces + elseif _points_to_Base_macro(x.args[1], Symbol("@NamedTuple"), state, meta_dict) && length(x.args) > 2 && headof(x.args[3]) == :braces for a in x.args[3].args - if CSTParser.isdeclaration(a) && isidentifier(a.args[1]) && !hasref(a.args[1]) - setref!(a.args[1], Binding(noname, nothing, nothing, EXPR[])) + if CSTParser.isdeclaration(a) && isidentifier(a.args[1]) && !hasref(a.args[1], meta_dict) + setref!(a.args[1], Binding(noname, nothing, nothing, EXPR[]), meta_dict) end end elseif is_nospecialize(x.args[1]) for i = 2:length(x.args) - if bindingof(x.args[i]) !== nothing + if bindingof(x.args[i], meta_dict) !== nothing break end - mark_binding!(x.args[i], x) + mark_binding!(x.args[i], meta_dict, x) end # elseif _points_to_arbitrary_macro(x.args[1], :Turing, :model, state) && length(x) == 3 && # isassignment(x.args[3]) && @@ -129,29 +129,29 @@ is_nospecialize(x) = isidentifier(x) && valofid(x) == "@nospecialize" function _mark_JuMP_binding(arg) if isidentifier(arg) || headof(arg) === :ref - mark_binding!(_rem_ref(arg)) + mark_binding!(_rem_ref(arg), meta_dict) elseif isbinarycall(arg, "==") || isbinarycall(arg, "<=") || isbinarycall(arg, ">=") if isidentifier(arg.args[1]) || headof(arg.args[1]) === :ref - mark_binding!(_rem_ref(arg.args[1])) + mark_binding!(_rem_ref(arg.args[1]), meta_dict) else - mark_binding!(_rem_ref(arg.args[3])) + mark_binding!(_rem_ref(arg.args[3]), meta_dict) end elseif headof(arg) === :comparision && length(arg.args) == 5 - mark_binding!(_rem_ref(arg.args[3])) + mark_binding!(_rem_ref(arg.args[3]), meta_dict) end end -function _points_to_Base_macro(x::EXPR, name, state) - CSTParser.is_getfield_w_quotenode(x) && return _points_to_Base_macro(x.args[2].args[1], name, state) +function _points_to_Base_macro(x::EXPR, name, state, meta_dict) + CSTParser.is_getfield_w_quotenode(x) && return _points_to_Base_macro(x.args[2].args[1], name, state, meta_dict) haskey(getsymbols(state)[:Base], name) || return false targetmacro = maybe_lookup(getsymbols(state)[:Base][name], state) - isidentifier(x) && Symbol(valofid(x)) == name && (ref = refof(x)) !== nothing && + isidentifier(x) && Symbol(valofid(x)) == name && (ref = refof(x, meta_dict)) !== nothing && (ref == targetmacro || (ref isa Binding && ref.val == targetmacro)) end function _points_to_arbitrary_macro(x::EXPR, module_name, name, state) - length(x.args) == 2 && isidentifier(x.args[2]) && valof(x.args[2]) == name && haskey(getsymbols(state), Symbol(module_name)) && haskey(getsymbols(state)[Symbol(module_name)], Symbol("@", name)) && (refof(x.args[2]) == maybe_lookup(getsymbols(state)[Symbol(module_name)][Symbol("@", name)], state) || - (refof(x.args[2]) isa Binding && refof(x.args[2]).val == maybe_lookup(getsymbols(state)[Symbol(module_name)][Symbol("@", name)], state))) + length(x.args) == 2 && isidentifier(x.args[2]) && valof(x.args[2]) == name && haskey(getsymbols(state), Symbol(module_name)) && haskey(getsymbols(state)[Symbol(module_name)], Symbol("@", name)) && (refof(x.args[2], meta_dict) == maybe_lookup(getsymbols(state)[Symbol(module_name)][Symbol("@", name)], state) || + (refof(x.args[2], meta_dict) isa Binding && refof(x.args[2], meta_dict).val == maybe_lookup(getsymbols(state)[Symbol(module_name)][Symbol("@", name)], state))) end maybe_lookup(x, env::ExternalEnv) = x isa SymbolServer.VarRef ? SymbolServer._lookup(x, getsymbols(env), true) : x @@ -206,23 +206,23 @@ any bindings made within the scope of `x` to the toplevel and replaces function interpret_eval(x::EXPR, state) # make sure we have bindings etc state(x) - tls = retrieve_toplevel_scope(x) - for ex in collect_expr_with_bindings(x) - b = bindingof(ex) + tls = retrieve_toplevel_scope(x, meta_dict) + for ex in collect_expr_with_bindings(x, meta_dict) + b = bindingof(ex, meta_dict) if isidentifier(b.name) # The name of the binding is fixed - add_binding(ex, state, tls) + add_binding(ex, state, meta_dict, tls) elseif isunarysyntax(b.name) && valof(headof(b.name)) == "\$" # The name of the binding is variable, we need to work out what the # interpolated symbol points to. variable_name = b.name.args[1] - resolve_ref(variable_name, state.scope, state) - if (ref = refof(variable_name)) isa Binding + resolve_ref(variable_name, state.scope, state, meta_dict) + if (ref = refof(variable_name, meta_dict)) isa Binding if isassignment(ref.val) && (rhs = maybeget_quotedsymbol(ref.val.args[2])) !== nothing # `name = :something` toplevel_binding = Binding(rhs, b.val, nothing, []) settype!(toplevel_binding, b.type) - infer_type(toplevel_binding, tls, state) + infer_type(toplevel_binding, tls, state, meta_dict) if scopehasbinding(tls, valofid(toplevel_binding.name)) tls.names[valofid(toplevel_binding.name)] = toplevel_binding # TODO: do we need to check whether this adds a method? else @@ -233,7 +233,7 @@ function interpret_eval(x::EXPR, state) for name in names toplevel_binding = Binding(name, b.val, nothing, []) settype!(toplevel_binding, b.type) - infer_type(toplevel_binding, tls, state) + infer_type(toplevel_binding, tls, state, meta_dict) if scopehasbinding(tls, valofid(toplevel_binding.name)) tls.names[valofid(toplevel_binding.name)] = toplevel_binding # TODO: do we need to check whether this adds a method? else @@ -255,8 +255,8 @@ function rhs_of_iterator(x::EXPR) end end -function collect_expr_with_bindings(x, bound_exprs=EXPR[]) - if hasbinding(x) +function collect_expr_with_bindings(x, meta_dict, bound_exprs=EXPR[]) + if hasbinding(x, meta_dict) push!(bound_exprs, x) # Assuming here that if an expression has a binding we don't want anything bound to chlid nodes. elseif x.args !== nothing && !((CSTParser.defines_function(x) && !is_eventually_interpolated(x.args[1])) || CSTParser.defines_macro(x) || headof(x) === :export) diff --git a/src/StaticLint/methodmatching.jl b/src/StaticLint/methodmatching.jl index 341c73b..5511577 100644 --- a/src/StaticLint/methodmatching.jl +++ b/src/StaticLint/methodmatching.jl @@ -1,8 +1,8 @@ -function arg_type(arg, ismethod) +function arg_type(arg, ismethod, meta_dict) if ismethod - if hasbinding(arg) - if bindingof(arg) isa Binding && bindingof(arg).type !== nothing - type = bindingof(arg).type + if hasbinding(arg, meta_dict) + if bindingof(arg, meta_dict) isa Binding && bindingof(arg, meta_dict).type !== nothing + type = bindingof(arg, meta_dict).type if type isa Binding && type.val isa SymbolServer.DataTypeStore type = type.val end @@ -10,9 +10,9 @@ function arg_type(arg, ismethod) end end else - if hasref(arg) - if refof(arg) isa Binding && refof(arg).type !== nothing - type = refof(arg).type + if hasref(arg, meta_dict) + if refof(arg, meta_dict) isa Binding && refof(arg, meta_dict).type !== nothing + type = refof(arg, meta_dict).type if type isa Binding && type.val isa SymbolServer.DataTypeStore type = type.val end @@ -56,11 +56,11 @@ function call_arg_types(call::EXPR, ismethod) push!(kws, call.args[2].args[i].args[1]) end for i = 3:length(call.args) - push!(types, arg_type(call.args[i], ismethod)) + push!(types, arg_type(call.args[i], ismethod, meta_dict)) end else for i = 2:length(call.args) - push!(types, arg_type(call.args[i], ismethod)) + push!(types, arg_type(call.args[i], ismethod, meta_dict)) end end types, kws @@ -75,17 +75,17 @@ function method_arg_types(call::EXPR) end for i = 3:length(call.args) if CSTParser.iskwarg(call.args[i]) - push!(opts, arg_type(call.args[i].args[1], true)) + push!(opts, arg_type(call.args[i].args[1], true, meta_dict)) else - push!(types, arg_type(call.args[i], true)) + push!(types, arg_type(call.args[i], true, meta_dict)) end end else for i = 2:length(call.args) if CSTParser.iskwarg(call.args[i]) - push!(opts, arg_type(call.args[i].args[1], true)) + push!(opts, arg_type(call.args[i].args[1], true, meta_dict)) else - push!(types, arg_type(call.args[i], true)) + push!(types, arg_type(call.args[i], true, meta_dict)) end end end @@ -96,7 +96,7 @@ function find_methods(x::EXPR, store) possibles = [] if iscall(x) length(x.args) === 0 && return possibles - func_ref = refof_call_func(x) + func_ref = refof_call_func(x, meta_dict) func_ref === nothing && return possibles args, kws = call_arg_types(x, false) if func_ref isa Binding && func_ref.val isa SymbolServer.FunctionStore || @@ -177,7 +177,7 @@ function match_method(args::Vector{Any}, kws::Vector{Any}, method::EXPR, store) end return true end - push!(margs, arg_type(arg, true)) + push!(margs, arg_type(arg, true, meta_dict)) end else sig = CSTParser.rem_decl(CSTParser.get_sig(method)) @@ -211,11 +211,11 @@ function match_method(args::Vector{Any}, kws::Vector{Any}, method::EXPR, store) return false end -function refof_call_func(x) - if isidentifier(first(x.args)) && hasref(first(x.args)) - return refof(first(x.args)) - elseif is_getfield_w_quotenode(x.args[1]) && (rhs = rhs_of_getfield(x.args[1])) !== nothing && hasref(rhs) - return refof(rhs) +function refof_call_func(x, meta_dict) + if isidentifier(first(x.args)) && hasref(first(x.args), meta_dict) + return refof(first(x.args), meta_dict) + elseif is_getfield_w_quotenode(x.args[1]) && (rhs = rhs_of_getfield(x.args[1])) !== nothing && hasref(rhs, meta_dict) + return refof(rhs, meta_dict) else return end diff --git a/src/StaticLint/references.jl b/src/StaticLint/references.jl index 390303a..7ee79f5 100644 --- a/src/StaticLint/references.jl +++ b/src/StaticLint/references.jl @@ -1,16 +1,12 @@ -function setref!(x::EXPR, binding::Binding) - if !hasmeta(x) - x.meta = Meta() - end - x.meta.ref = binding +function setref!(x::EXPR, binding::Binding, meta_dict) + ensuremeta(x, meta_dict) + getmeta(x, meta_dict).ref = binding push!(binding.refs, x) end -function setref!(x::EXPR, binding) - if !hasmeta(x) - x.meta = Meta() - end - x.meta.ref = binding +function setref!(x::EXPR, binding, meta_dict) + ensuremeta(x, meta_dict) + getmeta(x, meta_dict).ref = binding end @@ -18,9 +14,9 @@ end # refers to. If it remains unresolved and is in a delayed evaluation scope # (i.e. a function) it gets pushed to list (.urefs) to be resolved after we've # run over the entire top-level scope. -function resolve_ref(x, state) +function resolve_ref(x, state, meta_dict) if !(parentof(x) isa EXPR && headof(parentof(x)) === :quotenode) - resolve_ref(x, state.scope, state) + resolve_ref(x, state.scope, state, meta_dict) end end @@ -37,32 +33,32 @@ end # The return value is a boolean that is false if x should point to something but # can't be resolved. -function resolve_ref(x::EXPR, scope::Scope, state::State)::Bool +function resolve_ref(x::EXPR, scope::Scope, state::State, meta_dict)::Bool # if the current scope is a soft scope we should check the parent scope first # before trying to resolve the ref locally # if is_soft_scope(scope) && parentof(scope) isa Scope # resolve_ref(x, parentof(scope), state) && return true # end - hasref(x) && return true + hasref(x, meta_dict) && return true resolved = false if is_getfield(x) - return resolve_getfield(x, scope, state) + return resolve_getfield(x, scope, state, meta_dict) elseif iskwarg(x) # Note to self: this seems wronge - Binding should be attached to entire Kw EXPR. - if isidentifier(x.args[1]) && !hasbinding(x.args[1]) - setref!(x.args[1], Binding(x.args[1], nothing, nothing, [])) - elseif isdeclaration(x.args[1]) && isidentifier(x.args[1].args[1]) && !hasbinding(x.args[1].args[1]) - if hasbinding(x.args[1]) - setref!(x.args[1].args[1], bindingof(x.args[1])) + if isidentifier(x.args[1]) && !hasbinding(x.args[1], meta_dict) + setref!(x.args[1], Binding(x.args[1], nothing, nothing, []), meta_dict) + elseif isdeclaration(x.args[1]) && isidentifier(x.args[1].args[1]) && !hasbinding(x.args[1].args[1], meta_dict) + if hasbinding(x.args[1], meta_dict) + setref!(x.args[1].args[1], bindingof(x.args[1], meta_dict), meta_dict) else - setref!(x.args[1].args[1], Binding(x.args[1], nothing, nothing, [])) + setref!(x.args[1].args[1], Binding(x.args[1], nothing, nothing, []), meta_dict) end end return true elseif is_special_macro_term(x) || new_within_struct(x) - setref!(x, Binding(noname, nothing, nothing, [])) + setref!(x, Binding(noname, nothing, nothing, []), meta_dict) return true end mn = nameof_expr_to_resolve(x) @@ -72,52 +68,52 @@ function resolve_ref(x::EXPR, scope::Scope, state::State)::Bool if x.parent.head === :public scope.names[mn].is_public = true end - setref!(x, scope.names[mn]) + setref!(x, scope.names[mn], meta_dict) resolved = true elseif scope.modules isa Dict && length(scope.modules) > 0 for m in values(scope.modules) - resolved = resolve_ref_from_module(x, m, state) + resolved = resolve_ref_from_module(x, m, state, meta_dict) resolved && return true end end if !resolved && !CSTParser.defines_module(scope.expr) && parentof(scope) isa Scope - return resolve_ref(x, parentof(scope), state) + return resolve_ref(x, parentof(scope), state, meta_dict) end return resolved end # Searches a module store for a binding/variable that matches the reference `x1`. -function resolve_ref_from_module(x1::EXPR, m::SymbolServer.ModuleStore, state::State)::Bool - hasref(x1) && return true +function resolve_ref_from_module(x1::EXPR, m::SymbolServer.ModuleStore, state::State, meta_dict)::Bool + hasref(x1, meta_dict) && return true if CSTParser.ismacroname(x1) x = x1 if valof(x) == "@." && m.name == VarRef(nothing, :Base) # @. gets converted to @__dot__, probably during lowering. - setref!(x, m[:Broadcast][Symbol("@__dot__")]) + setref!(x, m[:Broadcast][Symbol("@__dot__")], meta_dict) return true end mn = Symbol(valof(x)) if isexportedby(mn, m) - setref!(x, maybe_lookup(m[mn], state)) + setref!(x, maybe_lookup(m[mn], state), meta_dict) return true end elseif isidentifier(x1) x = x1 if Symbol(valof(x)) == m.name.name - setref!(x, m) + setref!(x, m, meta_dict) return true elseif isexportedby(x, m) - setref!(x, maybe_lookup(m[Symbol(valof(x))], state)) + setref!(x, maybe_lookup(m[Symbol(valof(x))], state), meta_dict) return true end end return false end -function resolve_ref_from_module(x::EXPR, scope::Scope, state::State)::Bool - hasref(x) && return true +function resolve_ref_from_module(x::EXPR, scope::Scope, state::State, meta_dict)::Bool + hasref(x, meta_dict) && return true mn = nameof_expr_to_resolve(x) mn === nothing && return true @@ -126,9 +122,9 @@ function resolve_ref_from_module(x::EXPR, scope::Scope, state::State)::Bool if CSTParser.defines_module(scope.expr) n = CSTParser.get_name(scope.expr) if CSTParser.isidentifier(n) && mn == CSTParser.valof(n) - b = bindingof(scope.expr) # moduleโ€™s binding + b = bindingof(scope.expr, meta_dict) # moduleโ€™s binding if b isa Binding - setref!(x, b) + setref!(x, b, meta_dict) return true end end @@ -136,7 +132,7 @@ function resolve_ref_from_module(x::EXPR, scope::Scope, state::State)::Bool # 2) Resolve exported names from this module scope if scope_exports(scope, mn, state) - setref!(x, scope.names[mn]) + setref!(x, scope.names[mn], meta_dict) return true end @@ -171,8 +167,8 @@ function initial_pass_on_exports(x::EXPR, name, state) for a in x.args[3] # module block expressions if headof(a) === :export for i = 1:length(a.args) - if isidentifier(a.args[i]) && valof(a.args[i]) == name && !hasref(a.args[i]) - Delayed(scopeof(x), state.env, state.server)(a.args[i]) + if isidentifier(a.args[i]) && valof(a.args[i]) == name && !hasref(a.args[i], meta_dict) + Delayed(scopeof(x, meta_dict), state.env)(a.args[i]) end end end @@ -180,8 +176,8 @@ function initial_pass_on_exports(x::EXPR, name, state) end # Fallback method -function resolve_ref(x::EXPR, m, state::State)::Bool - return hasref(x)::Bool +function resolve_ref(x::EXPR, m, state::State, meta_dict)::Bool + return hasref(x, meta_dict)::Bool end rhs_of_getfield(x::EXPR) = CSTParser.is_getfield_w_quotenode(x) ? x.args[2].args[1] : x @@ -195,9 +191,9 @@ called with `parent::EXPR` resolves the reference for `parent`, other methods then check whether the Binding/Scope/ModuleStore to which `parent` points has a field matching `x`. """ -function resolve_getfield(x::EXPR, scope::Scope, state::State)::Bool - hasref(x) && return true - resolved = resolve_ref(x.args[1], scope, state) +function resolve_getfield(x::EXPR, scope::Scope, state::State, meta_dict)::Bool + hasref(x, meta_dict) && return true + resolved = resolve_ref(x.args[1], scope, state, meta_dict) if isidentifier(x.args[1]) lhs = x.args[1] elseif CSTParser.is_getfield_w_quotenode(x.args[1]) @@ -206,21 +202,21 @@ function resolve_getfield(x::EXPR, scope::Scope, state::State)::Bool return resolved end if resolved && (rhs = rhs_of_getfield(x)) !== nothing - resolved = resolve_getfield(rhs, refof(lhs), state) + resolved = resolve_getfield(rhs, refof(lhs, meta_dict), state, meta_dict) end return resolved end -function resolve_getfield(x::EXPR, parent_type::EXPR, state::State)::Bool - hasref(x) && return true +function resolve_getfield(x::EXPR, parent_type::EXPR, state::State, meta_dict)::Bool + hasref(x, meta_dict) && return true resolved = false if isidentifier(x) - if CSTParser.defines_module(parent_type) && scopeof(parent_type) isa Scope - resolved = resolve_ref(x, scopeof(parent_type), state) + if CSTParser.defines_module(parent_type) && scopeof(parent_type, meta_dict) isa Scope + resolved = resolve_ref(x, scopeof(parent_type), state, meta_dict) elseif CSTParser.defines_struct(parent_type) - if scopehasbinding(scopeof(parent_type), valofid(x)) - setref!(x, scopeof(parent_type).names[valofid(x)]) + if scopehasbinding(scopeof(parent_type, meta_dict), valofid(x)) + setref!(x, scopeof(parent_type, meta_dict).names[valofid(x)], meta_dict) resolved = true end end @@ -229,23 +225,23 @@ function resolve_getfield(x::EXPR, parent_type::EXPR, state::State)::Bool end -function resolve_getfield(x::EXPR, b::Binding, state::State)::Bool - hasref(x) && return true +function resolve_getfield(x::EXPR, b::Binding, state::State, meta_dict)::Bool + hasref(x, meta_dict) && return true resolved = false if b.val isa Binding - resolved = resolve_getfield(x, b.val, state) + resolved = resolve_getfield(x, b.val, state, meta_dict) elseif b.val isa SymbolServer.ModuleStore || (b.val isa EXPR && CSTParser.defines_module(b.val)) - resolved = resolve_getfield(x, b.val, state) + resolved = resolve_getfield(x, b.val, state, meta_dict) elseif b.type isa Binding - resolved = resolve_getfield(x, b.type.val, state) + resolved = resolve_getfield(x, b.type.val, state, meta_dict) elseif b.type isa SymbolServer.DataTypeStore - resolved = resolve_getfield(x, b.type, state) + resolved = resolve_getfield(x, b.type, state, meta_dict) end return resolved end -function resolve_getfield(x::EXPR, parent_type, state::State)::Bool - hasref(x) +function resolve_getfield(x::EXPR, parent_type, state::State, meta_dict)::Bool + hasref(x, meta_dict) end function is_overloaded(val::SymbolServer.SymStore, scope::Scope) @@ -253,15 +249,15 @@ function is_overloaded(val::SymbolServer.SymStore, scope::Scope) haskey(scope.overloaded, vr) end -function resolve_getfield(x::EXPR, m::SymbolServer.ModuleStore, state::State)::Bool - hasref(x) && return true +function resolve_getfield(x::EXPR, m::SymbolServer.ModuleStore, state::State, meta_dict)::Bool + hasref(x, meta_dict) && return true resolved = false if CSTParser.ismacroname(x) && (val = maybe_lookup(SymbolServer.maybe_getfield(Symbol(valofid(x)), m, getsymbols(state)), state)) !== nothing - setref!(x, val) + setref!(x, val, meta_dict) resolved = true elseif isidentifier(x) && (val = maybe_lookup(SymbolServer.maybe_getfield(Symbol(valofid(x)), m, getsymbols(state)), state)) !== nothing # Check whether variable is overloaded in top-level scope - tls = retrieve_toplevel_scope(state.scope) + tls = retrieve_toplevel_scope(state.scope, meta_dict) # if tls.overloaded !== nothing && (vr = val.name isa SymbolServer.FakeTypeName ? val.name.name : val.name; haskey(tls.overloaded, vr)) # @info 1 # setref!(x, tls.overloaded[vr]) @@ -269,33 +265,33 @@ function resolve_getfield(x::EXPR, m::SymbolServer.ModuleStore, state::State)::B # end vr = val.name isa SymbolServer.FakeTypeName ? val.name.name : val.name if haskey(tls.names, valof(x)) && tls.names[valof(x)] isa Binding && tls.names[valof(x)].val isa SymbolServer.FunctionStore - setref!(x, tls.names[valof(x)]) + setref!(x, tls.names[valof(x)], meta_dict) return true elseif tls.overloaded !== nothing && haskey(tls.overloaded, vr) - setref!(x, tls.overloaded[vr]) + setref!(x, tls.overloaded[vr], meta_dict) return true end - setref!(x, val) + setref!(x, val, meta_dict) resolved = true end return resolved end -function resolve_getfield(x::EXPR, parent::SymbolServer.DataTypeStore, state::State)::Bool - hasref(x) && return true +function resolve_getfield(x::EXPR, parent::SymbolServer.DataTypeStore, state::State, meta_dict)::Bool + hasref(x, meta_dict) && return true resolved = false if isidentifier(x) && Symbol(valof(x)) in parent.fieldnames fi = findfirst(f -> Symbol(valof(x)) == f, parent.fieldnames) ft = parent.types[fi] val = SymbolServer._lookup(ft, getsymbols(state), true) # TODO: Need to handle the case where we get back a FakeUnion, etc. - setref!(x, Binding(noname, nothing, val, [])) + setref!(x, Binding(noname, nothing, val, []), meta_dict) resolved = true end return resolved end -resolvable_macroname(x::EXPR) = isidentifier(x) && CSTParser.ismacroname(x) && refof(x) === nothing +resolvable_macroname(x::EXPR) = isidentifier(x) && CSTParser.ismacroname(x) && refof(x, meta_dict) === nothing nameof_expr_to_resolve(x) = isidentifier(x) ? valofid(x) : nothing diff --git a/src/StaticLint/scope.jl b/src/StaticLint/scope.jl index b096b30..47585e6 100644 --- a/src/StaticLint/scope.jl +++ b/src/StaticLint/scope.jl @@ -43,7 +43,7 @@ function addmoduletoscope!(s::Scope, m, mname::Symbol) s.modules[mname] = m end addmoduletoscope!(s::Scope, m::SymbolServer.ModuleStore) = addmoduletoscope!(s, m, m.name.name) -addmoduletoscope!(s::Scope, m::EXPR) = CSTParser.defines_module(m) && addmoduletoscope!(s, scopeof(m), Symbol(valof(CSTParser.get_name(m)))) +addmoduletoscope!(s::Scope, m::EXPR) = CSTParser.defines_module(m) && addmoduletoscope!(s, scopeof(m, meta_dict), Symbol(valof(CSTParser.get_name(m)))) addmoduletoscope!(s::Scope, s1::Scope) = CSTParser.defines_module(s1.expr) && addmoduletoscope!(s, s1, Symbol(valof(CSTParser.get_name(s1.expr)))) @@ -69,7 +69,7 @@ function introduces_scope(x::EXPR, state) return true elseif CSTParser.defines_anon_function(x) return true - elseif CSTParser.iswhere(x) + elseif CSTParser.iswhere(x) # unless in func def signature return !_in_func_or_struct_def(x) elseif CSTParser.istuple(x) && CSTParser.hastrivia(x) && ispunctuation(x.trivia[1]) && length(x.args) > 0 && isassignment(x.args[1]) @@ -93,16 +93,14 @@ function introduces_scope(x::EXPR, state) end -hasscope(x::EXPR) = hasmeta(x) && hasscope(x.meta) -scopeof(x) = nothing -scopeof(x::EXPR) = scopeof(x.meta) +hasscope(x::EXPR, meta_dict) = hasmeta(x, meta_dict) && hasscope(getmeta(x, meta_dict)) +scopeof(x, meta_dict) = nothing +scopeof(x::EXPR, meta_dict) = hasmeta(x, meta_dict) ? scopeof(getmeta(x, meta_dict)) : nothing CSTParser.parentof(s::Scope) = s.parent -function setscope!(x::EXPR, s) - if !hasmeta(x) - x.meta = Meta() - end - x.meta.scope = s +function setscope!(x::EXPR, s, meta_dict) + ensuremeta(x, meta_dict) + getmeta(x, meta_dict).scope = s end """ @@ -110,20 +108,20 @@ end Called when traversing the syntax tree and handles the association of scopes with expressions. On the first pass this will add scopes as -necessary, on following passes it empties it. +necessary, on following passes it empties it. """ -function scopes(x::EXPR, state) - clear_scope(x) - if scopeof(x) === nothing && introduces_scope(x, state) - setscope!(x, Scope(x)) +function scopes(x::EXPR, state, meta_dict) + clear_scope(x, meta_dict) + if scopeof(x, meta_dict) === nothing && introduces_scope(x, state) + setscope!(x, Scope(x), meta_dict) end s0 = state.scope if headof(x) === :file - setscope!(x, state.scope) + setscope!(x, state.scope, meta_dict) add_eval_method(x, state) - elseif scopeof(x) isa Scope - scopeof(x) != s0 && setparent!(scopeof(x), s0) - state.scope = scopeof(x) + elseif scopeof(x, meta_dict) isa Scope + scopeof(x, meta_dict) != s0 && setparent!(scopeof(x, meta_dict), s0) + state.scope = scopeof(x, meta_dict) if headof(x) === :module && headof(x.args[1]) === :TRUE # Add default modules to a new module state.scope.modules = Dict{Symbol,Any}() # TODO: only create new Dict if not assigned? state.scope.modules[:Base] = getsymbols(state)[:Base] @@ -134,10 +132,10 @@ function scopes(x::EXPR, state) state.scope.modules[:Core] = getsymbols(state)[:Core] add_eval_method(x, state) end - if headof(x) === :module && bindingof(x) !== nothing # Add reference to out of scope binding (i.e. itself) + if headof(x) === :module && bindingof(x, meta_dict) !== nothing # Add reference to out of scope binding (i.e. itself) # state.scope.names[bindingof(x).name] = bindingof(x) # TODO: move this to the binding stage - add_binding(x, state) + add_binding(x, state, meta_dict) # elseif headof(x) === :flatten && headof(x[1]) === CSTParser.Generator && length(x[1]) > 0 && headof(x[1][1]) === CSTParser.Generator # setscope!(x[1][1], nothing) end diff --git a/src/StaticLint/server.jl b/src/StaticLint/server.jl deleted file mode 100644 index 2293aa8..0000000 --- a/src/StaticLint/server.jl +++ /dev/null @@ -1,101 +0,0 @@ -#= -Project trees are usually made up of multiple files. An AbstractServer holds the AbstractFiles that represent this tree. FileServer is the basic implementation and assumes files are available and readable from disc. (LanguageServer illustrates another implementaiton). The accompanying functions summarised below are required for making an alternative implementation. - -Interface spec. -AbstractServer :-> (has/canload/load/set/get)file, getsymbols, getsymbolextends -AbstractFile :-> (get/set)path, (get/set)root, (get/set)cst, semantic_pass, (get/set)server -=# -abstract type AbstractServer end -abstract type AbstractFile end - -mutable struct File - path::String - source::String - cst::EXPR - root::Union{Nothing,File} - server -end - -mutable struct FileServer <: AbstractServer - files::Dict{String,File} - roots::Set{File} - workspacepackages::Dict{String,File} # list of files that may represent within-workspace packages - external_env::ExternalEnv -end -FileServer() = FileServer(Dict{String,File}(), Set{File}(), Dict{String,File}(), ExternalEnv(Dict{Symbol,SymbolServer.ModuleStore}(:Base => SymbolServer.stdlibs[:Base], :Core => SymbolServer.stdlibs[:Core]), SymbolServer.collect_extended_methods(SymbolServer.stdlibs), Symbol[])) - - -hasfile(server::FileServer, path::String) = haskey(server.files, path) -canloadfile(server, path) = isfile(path) -function setfile(server::FileServer, path::String, file::File) - server.files[path] = file -end -getfile(server::FileServer, path::String) = server.files[path] -function loadfile(server::FileServer, path::String) - try - source = read(path, String) - cst = CSTParser.parse(source, true) - f = File(path, source, cst, nothing, server) - setroot(f, f) - setfile(server, path, f) - return getfile(server, path) - catch - @info "Could not load $(path) from disk." - rethrow() - end -end - -getsymbols(env::ExternalEnv) = env.symbols -getsymbols(state::State) = getsymbols(state.env) - -getsymbolextendeds(env::ExternalEnv) = env.extended_methods -getsymbolextendeds(state::State) = getsymbolextendeds(state.env) - - -""" - getenv(file::File, server::FileServer) - -Get the relevant `ExternalEnv` for a given file. -""" -function getenv(file::File, server::FileServer) - # For FileServer this approach is equivalent to the previous behaviour. Other AbstractServers - # (e.g. LanguageServerInstance) can use this function to associate different files (or trees of - # files) with different environments. - server.external_env -end - - -getpath(file::File) = file.path - -getroot(file::File) = file.root -function setroot(file::File, root::File) - file.root = root - return file -end - -getcst(file::File) = file.cst -function setcst(file::File, cst::EXPR) - file.cst = cst - return file -end - -getserver(file::File) = file.server -function setserver(file::File, server::FileServer) - file.server = server - return file -end - -function Base.display(f::File) - println(f.path) -end - -function Base.display(s::FileServer) - n = length(s.files) - println(n, "-file Server") - cnt = 0 - for p in keys(s.files) - cnt += 1 - println(" ", p) - cnt > 10 && break - end -end diff --git a/src/StaticLint/subtypes.jl b/src/StaticLint/subtypes.jl index 1c2a478..e0282a0 100644 --- a/src/StaticLint/subtypes.jl +++ b/src/StaticLint/subtypes.jl @@ -39,8 +39,8 @@ function _super(b::Binding, store) StaticLint.CoreTypes.isdatatype(b.type) || error() b.val isa Binding && return _super(b.val, store) sup = _super(b.val, store) - if sup isa EXPR && StaticLint.hasref(sup) - StaticLint.refof(sup) + if sup isa EXPR && StaticLint.hasref(sup, meta_dict) + StaticLint.refof(sup, meta_dict) else store[:Core][:Any] end diff --git a/src/StaticLint/type_inf.jl b/src/StaticLint/type_inf.jl index fb2e345..ce1f761 100644 --- a/src/StaticLint/type_inf.jl +++ b/src/StaticLint/type_inf.jl @@ -7,7 +7,7 @@ function settype!(b::Binding, type) b.type = type end -function infer_type(binding::Binding, scope, state) +function infer_type(binding::Binding, scope, state, meta_dict) if binding isa Binding binding.type !== nothing && return if binding.val isa EXPR && CSTParser.defines_module(binding.val) @@ -21,10 +21,10 @@ function infer_type(binding::Binding, scope, state) if CSTParser.is_func_call(binding.val.args[1]) settype!(binding, CoreTypes.Function) else - infer_type_assignment_rhs(binding, state, scope) + infer_type_assignment_rhs(binding, state, scope, meta_dict) end elseif binding.val.head isa EXPR && valof(binding.val.head) == "::" - infer_type_decl(binding, state, scope) + infer_type_decl(binding, state, scope, meta_dict) elseif iswhere(parentof(binding.val)) settype!(binding, CoreTypes.DataType) end @@ -32,16 +32,16 @@ function infer_type(binding::Binding, scope, state) end end -function infer_type_assignment_rhs(binding, state, scope) +function infer_type_assignment_rhs(binding, state, scope, meta_dict) is_destructuring = false lhs = binding.val.args[1] rhs = binding.val.args[2] if is_loop_iter_assignment(binding.val) - settype!(binding, infer_eltype(rhs, state)) + settype!(binding, infer_eltype(rhs, state, meta_dict)) elseif headof(rhs) === :ref && length(rhs.args) > 1 - ref = refof_maybe_getfield(rhs.args[1]) + ref = refof_maybe_getfield(rhs.args[1], meta_dict) if ref isa Binding && ref.val isa EXPR - settype!(binding, infer_eltype(ref.val, state)) + settype!(binding, infer_eltype(ref.val, state, meta_dict)) end else if CSTParser.is_func_call(rhs) @@ -54,9 +54,9 @@ function infer_type_assignment_rhs(binding, state, scope) end callname = CSTParser.get_name(rhs) if isidentifier(callname) - resolve_ref(callname, scope, state) - if hasref(callname) - rb = get_root_method(refof(callname), state.server) + resolve_ref(callname, scope, state, meta_dict) + if hasref(callname, meta_dict) + rb = get_root_method(refof(callname, meta_dict)) if (rb isa Binding && (CoreTypes.isdatatype(rb.type) || rb.val isa SymbolServer.DataTypeStore)) || rb isa SymbolServer.DataTypeStore if is_destructuring infer_destructuring_type(binding, rb) @@ -85,7 +85,7 @@ function infer_type_assignment_rhs(binding, state, scope) elseif headof(rhs) === :TRUE || headof(rhs) === :FALSE settype!(binding, CoreTypes.Bool) elseif isidentifier(rhs) || is_getfield_w_quotenode(rhs) - refof_rhs = isidentifier(rhs) ? refof(rhs) : refof_maybe_getfield(rhs) + refof_rhs = isidentifier(rhs) ? refof(rhs, meta_dict) : refof_maybe_getfield(rhs, meta_dict) if refof_rhs isa Binding if refof_rhs.val isa SymbolServer.GenericStore && refof_rhs.val.typ isa SymbolServer.FakeTypeName settype!(binding, maybe_lookup(refof_rhs.val.typ.name, state)) @@ -118,7 +118,7 @@ function infer_destructuring_type(binding, rb::SymbolServer.DataTypeStore) end function infer_destructuring_type(binding::Binding, rb::EXPR) assigned_name = string(to_codeobject(binding.name)) - scope = scopeof(rb) + scope = scopeof(rb, meta_dict) names = scope.names if haskey(names, assigned_name) b = names[assigned_name] @@ -127,28 +127,28 @@ function infer_destructuring_type(binding::Binding, rb::EXPR) end infer_destructuring_type(binding, rb::Binding) = infer_destructuring_type(binding, rb.val) -function infer_type_decl(binding, state, scope) +function infer_type_decl(binding, state, scope, meta_dict) t = binding.val.args[2] if isidentifier(t) - resolve_ref(t, scope, state) + resolve_ref(t, scope, state, meta_dict) end if iscurly(t) t = t.args[1] - resolve_ref(t, scope, state) + resolve_ref(t, scope, state, meta_dict) end if CSTParser.is_getfield_w_quotenode(t) - resolve_getfield(t, scope, state) + resolve_getfield(t, scope, state, meta_dict) t = t.args[2].args[1] end - if refof(t) isa Binding - rb = get_root_method(refof(t), state.server) + if refof(t, meta_dict) isa Binding + rb = get_root_method(refof(t, meta_dict)) if rb isa Binding && CoreTypes.isdatatype(rb.type) settype!(binding, rb) else - settype!(binding, refof(t)) + settype!(binding, refof(t, meta_dict)) end else - edt = get_eventual_datatype(refof(t), state.env) + edt = get_eventual_datatype(refof(t, meta_dict), state.env) if edt !== nothing settype!(binding, edt) end @@ -162,7 +162,7 @@ function get_eventual_datatype(b::SymbolServer.FunctionStore, env::ExternalEnv) end # Work out what type a bound variable has by functions that are called on it. -function infer_type_by_use(b::Binding, env::ExternalEnv) +function infer_type_by_use(b::Binding, env::ExternalEnv, meta_dict) b.type !== nothing && return # b already has a type possibletypes = [] visitedmethods = [] @@ -180,7 +180,7 @@ function infer_type_by_use(b::Binding, env::ExternalEnv) end ifbranch = newbranch end - check_ref_against_calls(ref, visitedmethods, new_possibles, env) + check_ref_against_calls(ref, visitedmethods, new_possibles, env, meta_dict) if !isempty(new_possibles) if isempty(possibletypes) possibletypes = new_possibles @@ -207,33 +207,33 @@ function infer_type_by_use(b::Binding, env::ExternalEnv) end end -function check_ref_against_calls(x, visitedmethods, new_possibles, env::ExternalEnv) - if is_arg_of_resolved_call(x) && !call_is_func_sig(x.parent) +function check_ref_against_calls(x, visitedmethods, new_possibles, env::ExternalEnv, meta_dict) + if is_arg_of_resolved_call(x, meta_dict) && !call_is_func_sig(x.parent) sig = parentof(x) # x is argument of function call (func) and we know what that function is if CSTParser.isidentifier(sig.args[1]) - func = refof(sig.args[1]) + func = refof(sig.args[1], meta_dict) else - func = refof(sig.args[1].args[2].args[1]) + func = refof(sig.args[1].args[2].args[1], meta_dict) end argi = get_arg_position_in_call(sig, x) # what slot does ref sit in? - tls = retrieve_toplevel_scope(x) + tls = retrieve_toplevel_scope(x, meta_dict) if func isa Binding for method in func.refs method = get_method(method) method === nothing && continue if method isa EXPR if defines_function(method) - get_arg_type_at_position(method, argi, new_possibles) + get_arg_type_at_position(method, argi, new_possibles, meta_dict) # elseif CSTParser.defines_struct(method) # Can we ignore this? Default constructor gives us no type info? end else # elseif what? - iterate_over_ss_methods(method, tls, env, m -> (get_arg_type_at_position(m, argi, new_possibles);false)) + iterate_over_ss_methods(method, tls, env, m -> (get_arg_type_at_position(m, argi, new_possibles, meta_dict);false)) end end else - iterate_over_ss_methods(func, tls, env, m -> (get_arg_type_at_position(m, argi, new_possibles);false)) + iterate_over_ss_methods(func, tls, env, m -> (get_arg_type_at_position(m, argi, new_possibles, meta_dict);false)) end end end @@ -253,10 +253,10 @@ function call_is_func_sig(call::EXPR) end end -function is_arg_of_resolved_call(x::EXPR) +function is_arg_of_resolved_call(x::EXPR, meta_dict) parentof(x) isa EXPR && headof(parentof(x)) === :call && # check we're in a call signature (caller = parentof(x).args[1]) !== x && # and that x is not the caller - ((CSTParser.isidentifier(caller) && hasref(caller)) || (is_getfield(caller) && headof(caller.args[2]) === :quotenode && hasref(caller.args[2].args[1]))) + ((CSTParser.isidentifier(caller) && hasref(caller, meta_dict)) || (is_getfield(caller) && headof(caller.args[2]) === :quotenode && hasref(caller.args[2].args[1], meta_dict))) end function get_arg_position_in_call(sig::EXPR, arg) @@ -265,26 +265,26 @@ function get_arg_position_in_call(sig::EXPR, arg) end end -function get_arg_type_at_position(method, argi, types) +function get_arg_type_at_position(method, argi, types, meta_dict) if method isa EXPR sig = CSTParser.get_sig(method) if sig !== nothing && sig.args !== nothing && argi <= length(sig.args) && - hasbinding(sig.args[argi]) && - (argb = bindingof(sig.args[argi]); argb isa Binding && argb.type !== nothing) && + hasbinding(sig.args[argi], meta_dict) && + (argb = bindingof(sig.args[argi], meta_dict); argb isa Binding && argb.type !== nothing) && !(argb.type in types) push!(types, argb.type) return end elseif method isa SymbolServer.DataTypeStore || method isa SymbolServer.FunctionStore for m in method.methods - get_arg_type_at_position(m, argi, types) + get_arg_type_at_position(m, argi, types, meta_dict) end end return end -function get_arg_type_at_position(m::SymbolServer.MethodStore, argi, types) +function get_arg_type_at_position(m::SymbolServer.MethodStore, argi, types, meta_dict) if length(m.sig) >= argi && m.sig[argi][2] != SymbolServer.VarRef(SymbolServer.VarRef(nothing, :Core), :Any) && !(m.sig[argi][2] in types) push!(types, m.sig[argi][2]) end @@ -293,16 +293,16 @@ end # Assumes x.head.val == "=" is_loop_iter_assignment(x::EXPR) = x.parent isa EXPR && ((x.parent.head == :for || x.parent.head == :generator) || (x.parent.head == :block && x.parent.parent isa EXPR && (x.parent.parent.head == :for || x.parent.parent.head == :generator))) -function infer_eltype(x::EXPR, state) - if isidentifier(x) && hasref(x) # assume is IDENT - r = refof(x) +function infer_eltype(x::EXPR, state, meta_dict) + if isidentifier(x) && hasref(x, meta_dict) # assume is IDENT + r = refof(x, meta_dict) if r isa Binding && r.val isa EXPR if isassignment(r.val) && r.val.args[2] != x - return infer_eltype(r.val.args[2], state) + return infer_eltype(r.val.args[2], state, meta_dict) end end - elseif headof(x) === :ref && hasref(x.args[1]) - r = refof(x.args[1]) + elseif headof(x) === :ref && hasref(x.args[1], meta_dict) + r = refof(x.args[1], meta_dict) if r isa Binding && CoreTypes.isdatatype(r.type) return r end @@ -320,16 +320,16 @@ function infer_eltype(x::EXPR, state) elseif headof(x.args[2]) === :CHAR && headof(x.args[3]) === :CHAR return CoreTypes.Char end - elseif hasbinding(x) && isdeclaration(x) && length(x.args) == 2 + elseif hasbinding(x, meta_dict) && isdeclaration(x) && length(x.args) == 2 return maybe_get_vec_eltype(x.args[2]) end end function maybe_get_vec_eltype(t) if iscurly(t) - lhs_ref = refof_maybe_getfield(t.args[1]) + lhs_ref = refof_maybe_getfield(t.args[1], meta_dict) if lhs_ref isa SymbolServer.DataTypeStore && CoreTypes.isarray(lhs_ref) && length(t.args) > 1 - refof(t.args[2]) + refof(t.args[2], meta_dict) end end end diff --git a/src/StaticLint/utils.jl b/src/StaticLint/utils.jl index fb187cc..054718e 100644 --- a/src/StaticLint/utils.jl +++ b/src/StaticLint/utils.jl @@ -1,12 +1,12 @@ quoted(x) = headof(x) === :quote || headof(x) === :quotenode unquoted(x) = isunarycall(x) && valof(x.args[1]) == "\$" -function remove_ref(x::EXPR) - if hasref(x) && refof(x) isa Binding && refof(x).refs isa Vector - for ia in enumerate(refof(x).refs) +function remove_ref(x::EXPR, meta_dict) + if hasref(x, meta_dict) && refof(x, meta_dict) isa Binding && refof(x, meta_dict).refs isa Vector + for ia in enumerate(refof(x, meta_dict).refs) if ia[2] == x - deleteat!(refof(x).refs, ia[1]) - setref!(x, nothing) + deleteat!(refof(x, meta_dict).refs, ia[1]) + setref!(x, nothing, meta_dict) return end end @@ -14,68 +14,68 @@ function remove_ref(x::EXPR) end end -function clear_binding(x::EXPR) - if bindingof(x) isa Binding - for r in bindingof(x).refs +function clear_binding(x::EXPR, meta_dict) + if bindingof(x, meta_dict) isa Binding + for r in bindingof(x, meta_dict).refs if r isa EXPR - setref!(r, nothing) + setref!(r, nothing, meta_dict) elseif r isa Binding - if r.type == bindingof(x) + if r.type == bindingof(x, meta_dict) r.type = nothing else - clear_binding(r) + clear_binding(r, meta_dict) end end end - x.meta.binding = nothing + getmeta(x, meta_dict).binding = nothing end end -function clear_scope(x::EXPR) - if hasmeta(x) && scopeof(x) isa Scope - setparent!(scopeof(x), nothing) - empty!(scopeof(x).names) - if headof(x) === :file && scopeof(x).modules isa Dict && scopehasmodule(scopeof(x), :Base) && scopehasmodule(scopeof(x), :Core) - m1, m2 = getscopemodule(scopeof(x), :Base), getscopemodule(scopeof(x), :Core) - empty!(scopeof(x).modules) - addmoduletoscope!(scopeof(x), m1) - addmoduletoscope!(scopeof(x), m2) +function clear_scope(x::EXPR, meta_dict) + if hasmeta(x, meta_dict) && scopeof(x, meta_dict) isa Scope + setparent!(scopeof(x, meta_dict), nothing) + empty!(scopeof(x, meta_dict).names) + if headof(x) === :file && scopeof(x, meta_dict).modules isa Dict && scopehasmodule(scopeof(x, meta_dict), :Base) && scopehasmodule(scopeof(x, meta_dict), :Core) + m1, m2 = getscopemodule(scopeof(x, meta_dict), :Base), getscopemodule(scopeof(x, meta_dict), :Core) + empty!(scopeof(x, meta_dict).modules) + addmoduletoscope!(scopeof(x, meta_dict), m1) + addmoduletoscope!(scopeof(x, meta_dict), m2) else - scopeof(x).modules = nothing + scopeof(x, meta_dict).modules = nothing end - if scopeof(x).overloaded !== nothing - empty!(scopeof(x).overloaded) + if scopeof(x, meta_dict).overloaded !== nothing + empty!(scopeof(x, meta_dict).overloaded) end end end -function clear_ref(x::EXPR) - if refof(x) isa Binding - if refof(x).refs isa Vector - for i in 1:length(refof(x).refs) - if refof(x).refs[i] == x - deleteat!(refof(x).refs, i) +function clear_ref(x::EXPR, meta_dict) + if refof(x, meta_dict) isa Binding + if refof(x, meta_dict).refs isa Vector + for i in 1:length(refof(x, meta_dict).refs) + if refof(x, meta_dict).refs[i] == x + deleteat!(refof(x, meta_dict).refs, i) break end end end - setref!(x, nothing) - elseif refof(x) !== nothing - setref!(x, nothing) + setref!(x, nothing, meta_dict) + elseif refof(x, meta_dict) !== nothing + setref!(x, nothing, meta_dict) end end -function clear_error(x::EXPR) - if hasmeta(x) && x.meta.error !== nothing - x.meta.error = nothing +function clear_error(x::EXPR, meta_dict) + if hasmeta(x, meta_dict) && getmeta(x, meta_dict).error !== nothing + getmeta(x, meta_dict).error = nothing end end -function clear_meta(x::EXPR) - clear_binding(x) - clear_ref(x) - clear_scope(x) - clear_error(x) +function clear_meta(x::EXPR, meta_dict) + clear_binding(x, meta_dict) + clear_ref(x, meta_dict) + clear_scope(x, meta_dict) + clear_error(x, meta_dict) if x.args !== nothing for a in x.args - clear_meta(a) + clear_meta(a, meta_dict) end end # if x.trivia !== nothing @@ -85,11 +85,11 @@ function clear_meta(x::EXPR) # end end -function get_root_method(b, server) +function get_root_method(b) return b end -function get_root_method(b::Binding, server) +function get_root_method(b::Binding) if CoreTypes.isfunction(b.type) && !isempty(b.refs) first(b.refs) else @@ -97,24 +97,24 @@ function get_root_method(b::Binding, server) end end -function retrieve_delayed_scope(x) - if (CSTParser.defines_function(x) || CSTParser.defines_macro(x)) && scopeof(x) !== nothing - if parentof(scopeof(x)) !== nothing - return parentof(scopeof(x)) +function retrieve_delayed_scope(x, meta_dict) + if (CSTParser.defines_function(x) || CSTParser.defines_macro(x)) && scopeof(x, meta_dict) !== nothing + if parentof(scopeof(x, meta_dict)) !== nothing + return parentof(scopeof(x, meta_dict)) else - return scopeof(x) + return scopeof(x, meta_dict) end else - return retrieve_scope(x) + return retrieve_scope(x, meta_dict) end return nothing end -function retrieve_scope(x) - if scopeof(x) !== nothing - return scopeof(x) +function retrieve_scope(x, meta_dict) + if scopeof(x, meta_dict) !== nothing + return scopeof(x, meta_dict) elseif parentof(x) isa EXPR - return retrieve_scope(parentof(x)) + return retrieve_scope(parentof(x), meta_dict) end return end @@ -153,7 +153,7 @@ function find_exported_names(x::EXPR) expr = x.args[3].args[i] if headof(expr) === :export for j = 2:length(expr.args) - if isidentifier(expr.args[j]) && hasref(expr.args[j]) + if isidentifier(expr.args[j]) && hasref(expr.args[j], meta_dict) push!(exported_vars, expr.args[j]) end end @@ -186,17 +186,17 @@ isexportedby(k::String, m::SymbolServer.ModuleStore) = isexportedby(Symbol(k), m isexportedby(x::EXPR, m::SymbolServer.ModuleStore) = isexportedby(valof(x), m) isexportedby(k, m::SymbolServer.ModuleStore) = false -function retrieve_toplevel_scope(x::EXPR) - if scopeof(x) !== nothing && is_toplevel_scope(x) - return scopeof(x) +function retrieve_toplevel_scope(x::EXPR, meta_dict) + if scopeof(x, meta_dict) !== nothing && is_toplevel_scope(x) + return scopeof(x, meta_dict) elseif parentof(x) isa EXPR - return retrieve_toplevel_scope(parentof(x)) + return retrieve_toplevel_scope(parentof(x), meta_dict) else @info "Tried to reach toplevel scope, no scope found. Final expression $(headof(x))" return nothing end end -retrieve_toplevel_scope(s::Scope) = (is_toplevel_scope(s) || !(parentof(s) isa Scope)) ? s : retrieve_toplevel_scope(parentof(s)) +retrieve_toplevel_scope(s::Scope, meta_dict) = (is_toplevel_scope(s) || !(parentof(s) isa Scope)) ? s : retrieve_toplevel_scope(parentof(s), meta_dict) retrieve_toplevel_or_func_scope(s::Scope) = (is_toplevel_scope(s) || defines_function(s.expr) || !(parentof(s) isa Scope)) ? s : retrieve_toplevel_or_func_scope(parentof(s)) is_toplevel_scope(s::Scope) = is_toplevel_scope(s.expr) @@ -302,9 +302,9 @@ function is_nameof_func(name) f !== nothing && CSTParser.get_name(f) == name end -function loose_refs(b::Binding) +function loose_refs(b::Binding, meta_dict, root_dict, rt) b.val isa EXPR || return b.refs # to account for `#global` binding which doesn't have a val - scope = retrieve_scope(b.val) + scope = retrieve_scope(b.val, meta_dict) scope isa Scope && isidentifier(b.name) || return b.refs name_str = valofid(b.name) name_str isa String || return b.refs @@ -313,7 +313,7 @@ function loose_refs(b::Binding) scope = parentof(scope) end state = LooseRefs(scope.expr, name_str, scope, []) - state(scope.expr) + state(scope.expr, meta_dict, root_dict, rt) vcat([r.refs for r in state.result]...) end @@ -324,14 +324,14 @@ mutable struct LooseRefs result::Vector{Binding} end -function (state::LooseRefs)(x::EXPR) - if hasbinding(x) - ex = bindingof(x).name +function (state::LooseRefs)(x::EXPR, meta_dict, root_dict, rt) + if hasbinding(x, meta_dict) + ex = bindingof(x, meta_dict).name if isidentifier(ex) && valofid(ex) == state.name - push!(state.result, bindingof(x)) + push!(state.result, bindingof(x, meta_dict)) end end - if !hasscope(x) || (hasscope(x) && ((is_soft_scope(scopeof(x)) && !scopehasbinding(scopeof(x), state.name)) || scopeof(x) == state.scope)) - traverse(x, state) + if !hasscope(x, meta_dict) || (hasscope(x, meta_dict) && ((is_soft_scope(scopeof(x, meta_dict)) && !scopehasbinding(scopeof(x, meta_dict), state.name)) || scopeof(x, meta_dict) == state.scope)) + traverse(x, state, meta_dict, root_dict, rt) end end diff --git a/src/layer_diagnostics.jl b/src/layer_diagnostics.jl index 3ae3d0f..04db65f 100644 --- a/src/layer_diagnostics.jl +++ b/src/layer_diagnostics.jl @@ -69,6 +69,11 @@ Salsa.@derived function derived_diagnostics(rt, uri) append!(results, Diagnostic(i.range, :error, i.message, "Testitem") for i in tis.testerrors) end + if is_path_julia_file(uri2filepath(uri)) + sl = derived_static_lint_diagnostics(rt, uri) + append!(results, sl) + end + if (is_path_lintconfig_file(uri2filepath(uri)) || is_path_project_file(uri2filepath(uri)) || is_path_manifest_file(uri2filepath(uri)) ) && get(lint_config, "toml-syntax-errors", true) == true toml_syntax_errors = derived_toml_syntax_diagnostics(rt, uri) append!(results, toml_syntax_errors) diff --git a/src/layer_semantics.jl b/src/layer_semantics.jl new file mode 100644 index 0000000..026dbeb --- /dev/null +++ b/src/layer_semantics.jl @@ -0,0 +1,84 @@ +function StaticLint.hasfile(rt, path) + return derived_has_file(rt, filepath2uri(path)) +end + +Salsa.@derived function derived_external_env(rt, uri) + return StaticLint.ExternalEnv(Dict{Symbol,SymbolServer.ModuleStore}(:Base => SymbolServer.stdlibs[:Base], :Core => SymbolServer.stdlibs[:Core]), SymbolServer.collect_extended_methods(SymbolServer.stdlibs), Symbol[]) +end + +Salsa.@derived function derived_static_lint_meta(rt) + meta_dict = Dict{UInt64,StaticLint.Meta}() + root_dict = Dict{URI,URI}() + + julia_files = derived_julia_files(rt) + + for uri in julia_files + cst = derived_julia_legacy_syntax_tree(rt, uri) + StaticLint.ensuremeta(cst, meta_dict) + + StaticLint.getmeta(cst, meta_dict).error = :doc # TODO WHAT IS OUR DOC?? + end + + for uri in julia_files + # setroot(doc, doc) + root_dict[uri] = uri + end + + for uri in julia_files + cst = derived_julia_legacy_syntax_tree(rt, uri) + env = derived_external_env(rt, uri) + + StaticLint.semantic_pass(uri, cst, env, meta_dict, root_dict, rt) + + # lint! + end + + for file in julia_files + cst = derived_julia_legacy_syntax_tree(rt, file) + env = derived_external_env(rt, file) + + StaticLint.check_all(cst, StaticLint.LintOptions(), env, meta_dict) + end + + return meta_dict +end + +Salsa.@derived function derived_static_lint_diagnostics(rt, uri) + + meta_dict = derived_static_lint_meta(rt) + + cst = derived_julia_legacy_syntax_tree(rt, uri) + env = derived_external_env(rt, uri) + + # errs = StaticLint.collect_hints(cst, getenv(doc), doc.server.lint_missingrefs) + errs = StaticLint.collect_hints(cst, env, meta_dict, false) + + res = Diagnostic[] + + for err in errs + rng = err[1]:err[1]+err[2].fullspan + if StaticLint.headof(err[2]) === :errortoken + # push!(out, Diagnostic(rng, DiagnosticSeverities.Error, missing, missing, "Julia", "Parsing error", missing, missing)) + elseif CSTParser.isidentifier(err[2]) && !StaticLint.haserror(err[2], meta_dict) + push!(res, Diagnostic(rng, :Warning, "Missing reference: $(err[2].val)", "StaticLint.jl")) + # push!(out, Diagnostic(rng, DiagnosticSeverities.Warning, missing, missing, "Julia", "Missing reference: $(err[2].val)", missing, missing)) + elseif StaticLint.haserror(err[2], meta_dict) && StaticLint.errorof(err[2], meta_dict) isa StaticLint.LintCodes + code = StaticLint.errorof(err[2], meta_dict) + # description = get(StaticLint.LintCodeDescriptions, code, "") + # severity, tags = if code in (StaticLint.UnusedFunctionArgument, StaticLint.UnusedBinding, StaticLint.UnusedTypeParameter) + # DiagnosticSeverities.Hint, [DiagnosticTags.Unnecessary] + # else + # DiagnosticSeverities.Information, missing + # end + # code_details = if isdefined(StaticLint, :IndexFromLength) && code === StaticLint.IndexFromLength + # CodeDescription(URI("https://docs.julialang.org/en/v1/base/arrays/#Base.eachindex")) + # else + # missing + # end + # push!(out, Diagnostic(rng, severity, string(code), code_details, "Julia", description, tags, missing)) + push!(res, Diagnostic(rng, :Warning, string(code), "StaticLint.jl")) + end + end + + return res +end \ No newline at end of file diff --git a/src/layer_syntax_trees.jl b/src/layer_syntax_trees.jl index 43f70cc..dd0469e 100644 --- a/src/layer_syntax_trees.jl +++ b/src/layer_syntax_trees.jl @@ -37,6 +37,16 @@ Salsa.@derived function derived_julia_syntax_diagnostics(rt, uri) return diag_results end +Salsa.@derived function derived_julia_legacy_syntax_tree(rt, uri) + tf = input_text_file(rt, uri) + + content = tf.content.content + + cst = CSTParser.parse(content, true) + + return cst +end + Salsa.@derived function derived_toml_parse_result(rt, uri) tf = input_text_file(rt, uri) From 9ff8415ab9b899eb6b18006763fdc36c3ff703fa Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Mon, 24 Nov 2025 20:57:49 -0800 Subject: [PATCH 04/24] Fix diagnostics --- src/layer_semantics.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/layer_semantics.jl b/src/layer_semantics.jl index 026dbeb..ae07680 100644 --- a/src/layer_semantics.jl +++ b/src/layer_semantics.jl @@ -38,7 +38,7 @@ Salsa.@derived function derived_static_lint_meta(rt) env = derived_external_env(rt, file) StaticLint.check_all(cst, StaticLint.LintOptions(), env, meta_dict) - end + end return meta_dict end @@ -56,11 +56,11 @@ Salsa.@derived function derived_static_lint_diagnostics(rt, uri) res = Diagnostic[] for err in errs - rng = err[1]:err[1]+err[2].fullspan + rng = err[1]+1:err[1]+err[2].fullspan+1 if StaticLint.headof(err[2]) === :errortoken # push!(out, Diagnostic(rng, DiagnosticSeverities.Error, missing, missing, "Julia", "Parsing error", missing, missing)) elseif CSTParser.isidentifier(err[2]) && !StaticLint.haserror(err[2], meta_dict) - push!(res, Diagnostic(rng, :Warning, "Missing reference: $(err[2].val)", "StaticLint.jl")) + push!(res, Diagnostic(rng, :warning, "Missing reference: $(err[2].val)", "StaticLint.jl")) # push!(out, Diagnostic(rng, DiagnosticSeverities.Warning, missing, missing, "Julia", "Missing reference: $(err[2].val)", missing, missing)) elseif StaticLint.haserror(err[2], meta_dict) && StaticLint.errorof(err[2], meta_dict) isa StaticLint.LintCodes code = StaticLint.errorof(err[2], meta_dict) @@ -76,9 +76,9 @@ Salsa.@derived function derived_static_lint_diagnostics(rt, uri) # missing # end # push!(out, Diagnostic(rng, severity, string(code), code_details, "Julia", description, tags, missing)) - push!(res, Diagnostic(rng, :Warning, string(code), "StaticLint.jl")) + push!(res, Diagnostic(rng, :warning, string(code), "StaticLint.jl")) end end return res -end \ No newline at end of file +end From 39744f28fe6c0d0280159b321178809b29af3a90 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Mon, 24 Nov 2025 21:50:28 -0800 Subject: [PATCH 05/24] Small cleanup of semantic layer --- src/layer_semantics.jl | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/layer_semantics.jl b/src/layer_semantics.jl index ae07680..541ae5c 100644 --- a/src/layer_semantics.jl +++ b/src/layer_semantics.jl @@ -20,7 +20,6 @@ Salsa.@derived function derived_static_lint_meta(rt) end for uri in julia_files - # setroot(doc, doc) root_dict[uri] = uri end @@ -29,8 +28,6 @@ Salsa.@derived function derived_static_lint_meta(rt) env = derived_external_env(rt, uri) StaticLint.semantic_pass(uri, cst, env, meta_dict, root_dict, rt) - - # lint! end for file in julia_files From 86e3a7acb8413ecee0f809b845f0a970095316cf Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Mon, 24 Nov 2025 22:04:28 -0800 Subject: [PATCH 06/24] Make diagnostics richer --- src/JuliaWorkspaces.jl | 2 +- src/layer_diagnostics.jl | 6 ++--- ...ayer_semantics.jl => layer_static_lint.jl} | 23 ++++++++----------- src/layer_syntax_trees.jl | 4 +++- src/types.jl | 4 ++++ 5 files changed, 20 insertions(+), 19 deletions(-) rename src/{layer_semantics.jl => layer_static_lint.jl} (72%) diff --git a/src/JuliaWorkspaces.jl b/src/JuliaWorkspaces.jl index a792ad6..f2f1f5e 100644 --- a/src/JuliaWorkspaces.jl +++ b/src/JuliaWorkspaces.jl @@ -31,7 +31,7 @@ include("sourcetext.jl") include("inputs.jl") include("layer_files.jl") include("layer_syntax_trees.jl") -include("layer_semantics.jl") +include("layer_static_lint.jl") include("layer_projects.jl") include("layer_testitems.jl") include("layer_diagnostics.jl") diff --git a/src/layer_diagnostics.jl b/src/layer_diagnostics.jl index 04db65f..4a5b25c 100644 --- a/src/layer_diagnostics.jl +++ b/src/layer_diagnostics.jl @@ -14,11 +14,11 @@ Salsa.@derived function derived_lintconfig_diagnostics(rt, uri) for (k,v) in pairs(toml_content) if !(k in valid_lint_configs) - push!(res, Diagnostic(1:1, :error, "Invalid lint configuration $k.", "JuliaWorkspaces.jl")) + push!(res, Diagnostic(1:1, :error, "Invalid lint configuration $k.", nothing, Symbol[], "JuliaWorkspaces.jl")) end if !(v isa Bool) - push!(res, Diagnostic(1:1, :error, "Invalid lint configuration value for $k, ony `true` or `false` are valid.", "JuliaWorkspaces.jl")) + push!(res, Diagnostic(1:1, :error, "Invalid lint configuration value for $k, ony `true` or `false` are valid.", nothing, Symbol[], "JuliaWorkspaces.jl")) end end @@ -66,7 +66,7 @@ Salsa.@derived function derived_diagnostics(rt, uri) if is_path_julia_file(uri2filepath(uri)) && get(lint_config, "testitem-errors", true) == true tis = derived_testitems(rt, uri) - append!(results, Diagnostic(i.range, :error, i.message, "Testitem") for i in tis.testerrors) + append!(results, Diagnostic(i.range, :error, i.message, nothing, Symbol[], "Testitem") for i in tis.testerrors) end if is_path_julia_file(uri2filepath(uri)) diff --git a/src/layer_semantics.jl b/src/layer_static_lint.jl similarity index 72% rename from src/layer_semantics.jl rename to src/layer_static_lint.jl index 541ae5c..15ea3b5 100644 --- a/src/layer_semantics.jl +++ b/src/layer_static_lint.jl @@ -57,23 +57,18 @@ Salsa.@derived function derived_static_lint_diagnostics(rt, uri) if StaticLint.headof(err[2]) === :errortoken # push!(out, Diagnostic(rng, DiagnosticSeverities.Error, missing, missing, "Julia", "Parsing error", missing, missing)) elseif CSTParser.isidentifier(err[2]) && !StaticLint.haserror(err[2], meta_dict) - push!(res, Diagnostic(rng, :warning, "Missing reference: $(err[2].val)", "StaticLint.jl")) - # push!(out, Diagnostic(rng, DiagnosticSeverities.Warning, missing, missing, "Julia", "Missing reference: $(err[2].val)", missing, missing)) + push!(res, Diagnostic(rng, :warning, "Missing reference: $(err[2].val)", nothing, Symbol[], "StaticLint.jl")) elseif StaticLint.haserror(err[2], meta_dict) && StaticLint.errorof(err[2], meta_dict) isa StaticLint.LintCodes code = StaticLint.errorof(err[2], meta_dict) - # description = get(StaticLint.LintCodeDescriptions, code, "") - # severity, tags = if code in (StaticLint.UnusedFunctionArgument, StaticLint.UnusedBinding, StaticLint.UnusedTypeParameter) - # DiagnosticSeverities.Hint, [DiagnosticTags.Unnecessary] - # else - # DiagnosticSeverities.Information, missing - # end - # code_details = if isdefined(StaticLint, :IndexFromLength) && code === StaticLint.IndexFromLength - # CodeDescription(URI("https://docs.julialang.org/en/v1/base/arrays/#Base.eachindex")) - # else - # missing - # end + description = get(StaticLint.LintCodeDescriptions, code, "") + severity, tags = if code in (StaticLint.UnusedFunctionArgument, StaticLint.UnusedBinding, StaticLint.UnusedTypeParameter) + :hint, Symbol[:Unnecessary] + else + :information, Symbol[] + end + code_details = code === StaticLint.IndexFromLength ? URI("https://docs.julialang.org/en/v1/base/arrays/#Base.eachindex") : nothing # push!(out, Diagnostic(rng, severity, string(code), code_details, "Julia", description, tags, missing)) - push!(res, Diagnostic(rng, :warning, string(code), "StaticLint.jl")) + push!(res, Diagnostic(rng, severity, description, code_details, tags, "StaticLint.jl")) end end diff --git a/src/layer_syntax_trees.jl b/src/layer_syntax_trees.jl index dd0469e..e4d363f 100644 --- a/src/layer_syntax_trees.jl +++ b/src/layer_syntax_trees.jl @@ -30,6 +30,8 @@ Salsa.@derived function derived_julia_syntax_diagnostics(rt, uri) _range(i), i.level, i.message, + nothing, + Symbol[], "JuliaSyntax.jl" ) end @@ -55,7 +57,7 @@ Salsa.@derived function derived_toml_parse_result(rt, uri) parse_result = Pkg.TOML.tryparse(content) if parse_result isa Pkg.TOML.ParserError - return parse_result.table, Diagnostic[Diagnostic(parse_result.pos:parse_result.pos, :error, Base.TOML.format_error_message_for_err_type(parse_result), "TOML.jl")] + return parse_result.table, Diagnostic[Diagnostic(parse_result.pos:parse_result.pos, :error, Base.TOML.format_error_message_for_err_type(parse_result), nothing, Symbol[], "TOML.jl")] else return parse_result, Diagnostic[] end diff --git a/src/types.jl b/src/types.jl index b6e75e0..930381a 100644 --- a/src/types.jl +++ b/src/types.jl @@ -252,12 +252,16 @@ A diagnostic struct, consisting of range, severity, message, and source. - range::UnitRange{Int64} - severity::Symbol - message::String +- uri::Union{Nothing,URI} +- tags::Vector{Symbol} - source::String """ @auto_hash_equals struct Diagnostic range::UnitRange{Int64} severity::Symbol message::String + uri::Union{Nothing,URI} + tags::Vector{Symbol} source::String end From de3b6a45f9ddeb71c68ab408ca5975d4c155c3b9 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Mon, 24 Nov 2025 22:23:28 -0800 Subject: [PATCH 07/24] Fix some spelling --- src/layer_static_lint.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/layer_static_lint.jl b/src/layer_static_lint.jl index 15ea3b5..f16b518 100644 --- a/src/layer_static_lint.jl +++ b/src/layer_static_lint.jl @@ -62,12 +62,11 @@ Salsa.@derived function derived_static_lint_diagnostics(rt, uri) code = StaticLint.errorof(err[2], meta_dict) description = get(StaticLint.LintCodeDescriptions, code, "") severity, tags = if code in (StaticLint.UnusedFunctionArgument, StaticLint.UnusedBinding, StaticLint.UnusedTypeParameter) - :hint, Symbol[:Unnecessary] + :hint, Symbol[:unnecessary] else :information, Symbol[] end code_details = code === StaticLint.IndexFromLength ? URI("https://docs.julialang.org/en/v1/base/arrays/#Base.eachindex") : nothing - # push!(out, Diagnostic(rng, severity, string(code), code_details, "Julia", description, tags, missing)) push!(res, Diagnostic(rng, severity, description, code_details, tags, "StaticLint.jl")) end end From eeb2f57a802c7d4d7cc2d6b8716346132c3b72ca Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:22:18 -0800 Subject: [PATCH 08/24] Start on dynamic feature --- scripts/Project.toml | 5 +++ scripts/update_app_environments.jl | 38 ++++++++++++++++++ scripts/update_vendored_packages.jl | 62 +++++++++++++++++++++++++++++ src/JuliaWorkspaces.jl | 1 + src/dynamic_feature.jl | 56 ++++++++++++++++++++++++++ src/types.jl | 8 +++- 6 files changed, 168 insertions(+), 2 deletions(-) create mode 100644 scripts/Project.toml create mode 100644 scripts/update_app_environments.jl create mode 100644 scripts/update_vendored_packages.jl create mode 100644 src/dynamic_feature.jl diff --git a/scripts/Project.toml b/scripts/Project.toml new file mode 100644 index 0000000..39abbec --- /dev/null +++ b/scripts/Project.toml @@ -0,0 +1,5 @@ +[deps] +GitHub = "bc5e4493-9b4d-5f90-b8aa-2b2bcaad7a26" +LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433" +Query = "1a8c2f83-1ff3-5112-b086-8aa67b057ba1" +TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76" diff --git a/scripts/update_app_environments.jl b/scripts/update_app_environments.jl new file mode 100644 index 0000000..3e55450 --- /dev/null +++ b/scripts/update_app_environments.jl @@ -0,0 +1,38 @@ +julia_versions = [ + "1.0", + "1.1", + "1.2", + "1.3", + "1.4", + "1.5", + "1.6", + "1.7", + "1.8", + "1.9", + "1.10", + "1.11", + "1.12" +] + +for i in julia_versions + version_path = normpath(joinpath(@__DIR__, "../testprocess/environments/v$i")) + mkpath(version_path) + + run(Cmd(`julia +$i --project=. -e 'using Pkg; Pkg.develop(PackageSpec(path="../../TestItemServer"))'`, dir=version_path)) +end + +version_path = normpath(joinpath(@__DIR__, "../testprocess/environments/fallback")) +mkpath(version_path) +run(Cmd(`julia +nightly --project=. -e 'using Pkg; Pkg.develop(PackageSpec(path="../../TestItemServer"))'`, dir=version_path)) + +function replace_backslash_in_manifest(version) + filename = joinpath(@__DIR__, "../testprocess/environments/v$version/Manifest.toml") + manifest_content = read(filename, String) + + new_content = replace(manifest_content, "\\\\"=>'/') + + write(filename, new_content) +end + +replace_backslash_in_manifest("1.0") +replace_backslash_in_manifest("1.1") diff --git a/scripts/update_vendored_packages.jl b/scripts/update_vendored_packages.jl new file mode 100644 index 0000000..a053dea --- /dev/null +++ b/scripts/update_vendored_packages.jl @@ -0,0 +1,62 @@ +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +using GitHub, Query, LibGit2, TOML + +creds = LibGit2.GitCredential(GitConfig(), "https://github.com") +creds.password===nothing && error("Did not find credentials for github.com in the git credential manager.") +myauth = GitHub.authenticate(read(creds.password, String)) +Base.shred!(creds.password) + +packages = Dict( + # "CodeTracking" => "timholy/CodeTracking.jl", + # "CoverageTools" => "JuliaCI/CoverageTools.jl", + # "DebugAdapter" => "julia-vscode/DebugAdapter.jl", + # "JSON" => "", We skip this as we want to stay on an old version that has one less extra dependency + "JSONRPC" => "julia-vscode/JSONRPC.jl", + # "JuliaInterpreter" => "JuliaDebug/JuliaInterpreter.jl", + # "LoweredCodeUtils" => "JuliaDebug/LoweredCodeUtils.jl", + # "OrderedCollections" => "JuliaCollections/OrderedCollections.jl", + # "Revise" => "timholy/Revise.jl", + # "TestEnv" => "JuliaTesting/TestEnv.jl", + # "URIParser" => "JuliaWeb/URIParser.jl", + # "CancellationTokens" => "davidanthoff/CancellationTokens.jl" +) + +latest_versions = Dict{String,VersionNumber}() +current_versions = Dict{String,VersionNumber}() + +for (pkg,github_location) in packages + max_version = GitHub.references(github_location, auth=myauth)[1] |> + @map(_.ref) |> + @filter(!isnothing(_) && startswith(_, "refs/tags/v")) |> + @map(VersionNumber(_[12:end])) |> + maximum + + latest_versions[pkg] = max_version + + project_content = TOML.parsefile(joinpath(@__DIR__, "../packages/$pkg/Project.toml")) + current_version = VersionNumber(project_content["version"]) + + current_versions[pkg] = current_version + println("Package: $pkg, latest version: $max_version, current version: $current_version") +end + +for (pkg,github_location) in packages + latest_version = latest_versions[pkg] + current_version = current_versions[pkg] + + + if latest_version != current_version + run( + addenv( + Cmd( + `git subtree pull --prefix packages/$pkg https://github.com/$github_location v$latest_version --squash`, + dir=normpath(joinpath(@__DIR__, "..")) + ), + Dict("GIT_MERGE_AUTOEDIT" => "no") + ) + ) + end +end diff --git a/src/JuliaWorkspaces.jl b/src/JuliaWorkspaces.jl index f2f1f5e..b64a24f 100644 --- a/src/JuliaWorkspaces.jl +++ b/src/JuliaWorkspaces.jl @@ -26,6 +26,7 @@ using .URIs2: filepath2uri, uri2filepath using .URIs2: URI, @uri_str include("exception_types.jl") +include("dynamic_feature.jl") include("types.jl") include("sourcetext.jl") include("inputs.jl") diff --git a/src/dynamic_feature.jl b/src/dynamic_feature.jl new file mode 100644 index 0000000..40a5b30 --- /dev/null +++ b/src/dynamic_feature.jl @@ -0,0 +1,56 @@ +struct DynamicJuliaProcess + project::String + proc::Union{Nothing, Base.Process} + + function DynamicJuliaProcess(project::String) + return new( + project, + nothing + ) + end +end + +function Base.start(djp::DynamicJuliaProcess) +end + +function Base.kill(djp::DynamicJuliaProcess) +end + +struct DynamicFeature + in_channel::Channel{Any} + out_channel::Channel{Any} + procs::Dict{String,DynamicJuliaProcess} + + function DynamicFeature() + return new( + Channel{Any}(Inf), + Channel{Any}(Inf), + Dict{String,DynamicJuliaProcess}() + ) + end +end + +function Base.start(df::DynamicFeature) + Threads.@async begin + while true + msg = take!(df.in_channel) + + if msg.command == :set_environments + # Delete Julia procs we no longer need + foreach(setdiff(keys(df.procs), msg.environments)) do i + kill(procs[i]) + delete!(df.procs, i) + end + + # Add new required procs + foreach(msg.environments, setdiff(keys(df.procs), )) do i + djp = DynamicJuliaProcess(i) + df.procs[i] = djp + start(djp) + end + else + error("Unknown message: $msg") + end + end + end +end diff --git a/src/types.jl b/src/types.jl index 930381a..39003ab 100644 --- a/src/types.jl +++ b/src/types.jl @@ -274,13 +274,17 @@ A Julia workspace, consisting of a [`Salsa`](https://github.com/julia-vscode/Sal """ struct JuliaWorkspace runtime::Salsa.Runtime + dynamic_feature::Union{Nothing,DynamicFeature} - function JuliaWorkspace() + function JuliaWorkspace(dynamic=false) rt = Salsa.Runtime() set_input_files!(rt, Set{URI}()) set_input_fallback_test_project!(rt, nothing) - new(rt) + dynamic_feature = dynamic ? DynamicFeature() : nothing + dynamic_feature === nothing || start(dynamic_feature) + + new(rt, dynamic_feature) end end From b8789e8bfebed5195fa689d3066230ca84f1d1ac Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:23:35 -0800 Subject: [PATCH 09/24] Squashed 'packages/JSONRPC/' content from commit 9e022c8 git-subtree-dir: packages/JSONRPC git-subtree-split: 9e022c861a42677062dd015872aa16e4d45526d1 --- .github/pull_request_template.md | 5 + .github/workflows/juliaci.yml | 18 ++ .gitignore | 1 + .jlpkgbutler.toml | 1 + CHANGELOG.md | 5 + LICENSE.md | 7 + Project.toml | 22 ++ README.md | 5 + src/JSONRPC.jl | 7 + src/core.jl | 405 +++++++++++++++++++++++++++++++ src/interface_def.jl | 89 +++++++ src/packagedef.jl | 6 + src/pipenames.jl | 7 + src/typed.jl | 141 +++++++++++ test/runtests.jl | 3 + test/shared_test_code.jl | 20 ++ test/test_core.jl | 10 + test/test_interface_def.jl | 34 +++ test/test_typed.jl | 217 +++++++++++++++++ 19 files changed, 1003 insertions(+) create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/juliaci.yml create mode 100644 .gitignore create mode 100644 .jlpkgbutler.toml create mode 100644 CHANGELOG.md create mode 100644 LICENSE.md create mode 100644 Project.toml create mode 100644 README.md create mode 100644 src/JSONRPC.jl create mode 100644 src/core.jl create mode 100644 src/interface_def.jl create mode 100644 src/packagedef.jl create mode 100644 src/pipenames.jl create mode 100644 src/typed.jl create mode 100644 test/runtests.jl create mode 100644 test/shared_test_code.jl create mode 100644 test/test_core.jl create mode 100644 test/test_interface_def.jl create mode 100644 test/test_typed.jl diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000..685d099 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,5 @@ +Fixes #. + +For every PR, please check the following: +- [ ] End-user documentation check. If this PR requires end-user documentation in the Julia VS Code extension docs, please add that at https://github.com/julia-vscode/docs. +- [ ] Changelog mention. If this PR should be mentioned in the CHANGELOG for the Julia VS Code extension, please open a PR against https://github.com/julia-vscode/julia-vscode/blob/master/CHANGELOG.md with those changes. diff --git a/.github/workflows/juliaci.yml b/.github/workflows/juliaci.yml new file mode 100644 index 0000000..fb6340c --- /dev/null +++ b/.github/workflows/juliaci.yml @@ -0,0 +1,18 @@ +name: Julia CI + +on: + push: {branches: [main,master]} + pull_request: {types: [opened,synchronize,reopened]} + issue_comment: {types: [created]} + schedule: [{cron: '0 0 * * *'}] + workflow_dispatch: {inputs: {feature: {type: choice, description: What to run, options: [CompatHelper,DocDeploy,LintAndTest,TagBot]}}} + +jobs: + julia-ci: + uses: julia-vscode/testitem-workflow/.github/workflows/juliaci.yml@v1 + with: + include-all-compatible-minor-versions: true + include-rc-versions: true + permissions: write-all + secrets: + codecov_token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..722d5e7 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.vscode diff --git a/.jlpkgbutler.toml b/.jlpkgbutler.toml new file mode 100644 index 0000000..b72304f --- /dev/null +++ b/.jlpkgbutler.toml @@ -0,0 +1 @@ +template = "bach" diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..4f2702d --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Version v2.0.0 +## Breaking changes +- All typed request handlers must accept a final `token` argument from the CancellationTokens package +- Static dispatch handlers no longer receive the endpoint as the first argument +- `get_next_message` and iterating over and endpoint returns a new `Request` instance diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..9872e80 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,7 @@ +Copyright 2020-2022 David Anthoff + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Project.toml b/Project.toml new file mode 100644 index 0000000..ebe3af5 --- /dev/null +++ b/Project.toml @@ -0,0 +1,22 @@ +name = "JSONRPC" +uuid = "b9b8584e-8fd3-41f9-ad0c-7255d428e418" +authors = ["David Anthoff "] +version = "2.0.1" + +[deps] +JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +CancellationTokens = "2e8d271d-f2e2-407b-a864-17eb2156783e" +UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[extras] +TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Sockets = "6462fe0b-24de-5631-8697-dd941f90decc" + +[compat] +JSON = "0.20, 0.21" +julia = "1" +CancellationTokens = "1" + +[targets] +test = ["Test", "TestItemRunner", "Sockets"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..b09c320 --- /dev/null +++ b/README.md @@ -0,0 +1,5 @@ +# JSONRPC + +An implementation for JSON RPC 2.0. See [the specification](https://www.jsonrpc.org/specification) for details. + +Currently, only JSON RPC 2.0 is supported. This package can act as both a client & a server. \ No newline at end of file diff --git a/src/JSONRPC.jl b/src/JSONRPC.jl new file mode 100644 index 0000000..fcf702e --- /dev/null +++ b/src/JSONRPC.jl @@ -0,0 +1,7 @@ +module JSONRPC + +import JSON, UUIDs, CancellationTokens + +include("packagedef.jl") + +end diff --git a/src/core.jl b/src/core.jl new file mode 100644 index 0000000..482ccea --- /dev/null +++ b/src/core.jl @@ -0,0 +1,405 @@ +""" + JSONRPCError + +An object representing a JSON-RPC Error. + +Fields: + * code::Int + * msg::AbstractString + * data::Any + +See Section 5.1 of the JSON RPC 2.0 specification for more information. +""" +struct JSONRPCError <: Exception + code::Int + msg::AbstractString + data::Any +end + +""" + SERVER_ERROR_END + +The end of the range of server-reserved errors. + +These are JSON-RPC server errors that are free for the taking +for JSON-RPC server implementations. Applications making use of +this library should NOT define new errors in this range. +""" +const SERVER_ERROR_END = -32000 + +""" + SERVER_ERROR_START + +The start of the range of server-reserved errors. + +These are JSON-RPC server errors that are free for the taking +for JSON-RPC server implementations. Applications making use of +this library should NOT define new errors in this range. +""" +const SERVER_ERROR_START = -32099 + +""" + PARSE_ERROR + +Invalid JSON was received by the server. +An error occurred on the server while parsing the JSON text. +""" +const PARSE_ERROR = -32700 + +""" + INVALID_REQUEST + +The JSON sent is not a valid Request object. +""" +const INVALID_REQUEST = -32600 + +""" + METHOD_NOT_FOUND + +The method does not exist / is not available. +""" +const METHOD_NOT_FOUND = -32601 + +""" + INVALID_PARAMS + +Invalid method parameter(s). +""" +const INVALID_PARAMS = -32602 + +""" + INTERNAL_ERROR + +Internal JSON-RPC error. +""" +const INTERNAL_ERROR = -32603 + +""" + RPCErrorStrings + +A `Base.IdDict` containing the mapping of JSON-RPC error codes to a short, descriptive string. + +Use this to hook into `showerror(io::IO, ::JSONRPCError)` for display purposes. A default fallback to `"Unknown"` exists. +""" +const RPCErrorStrings = Base.IdDict( + PARSE_ERROR => "ParseError", + INVALID_REQUEST => "InvalidRequest", + METHOD_NOT_FOUND => "MethodNotFound", + INVALID_PARAMS => "InvalidParams", + INTERNAL_ERROR => "InternalError", + [ i => "ServerError" for i in SERVER_ERROR_START:SERVER_ERROR_END]..., + -32002 => "ServerNotInitialized", + -32001 => "UnknownErrorCode", +) + +function Base.showerror(io::IO, ex::JSONRPCError) + error_code_as_string = get(RPCErrorStrings, ex.code, "Unknown") + + print(io, error_code_as_string) + print(io, ": ") + print(io, ex.msg) + if ex.data !== nothing + print(io, " (") + print(io, ex.data) + print(io, ")") + end +end + +struct Request + method::String + params::Union{Nothing,Dict{String,Any},Vector{Any}} + id::Union{Nothing,String,Int} + token::Union{CancellationTokens.CancellationToken,Nothing} +end + +mutable struct JSONRPCEndpoint{IOIn <: IO,IOOut <: IO} + pipe_in::IOIn + pipe_out::IOOut + + out_msg_queue::Channel{Any} + in_msg_queue::Channel{Request} + + outstanding_requests::Dict{String,Channel{Any}} # These are requests sent where we are waiting for a response + cancellation_sources::Dict{Union{String,Int},CancellationTokens.CancellationTokenSource} # These are the cancellation sources for requests that are not finished processing + no_longer_needed_cancellation_sources::Channel{Union{String,Int}} + + err_handler::Union{Nothing,Function} + + status::Symbol + + read_task::Union{Nothing,Task} + write_task::Union{Nothing,Task} +end + +JSONRPCEndpoint(pipe_in, pipe_out, err_handler = nothing) = + JSONRPCEndpoint( + pipe_in, + pipe_out, + Channel{Any}(Inf), + Channel{Request}(Inf), + Dict{String,Channel{Any}}(), + Dict{Union{String,Int},CancellationTokens.CancellationTokenSource}(), + Channel{Union{String,Int}}(Inf), + err_handler, + :idle, + nothing, + nothing) + +function write_transport_layer(stream, response) + response_utf8 = transcode(UInt8, response) + n = length(response_utf8) + write(stream, "Content-Length: $n\r\n\r\n") + write(stream, response_utf8) + flush(stream) +end + +function read_transport_layer(stream) + try + header_dict = Dict{String,String}() + line = chomp(readline(stream)) + # Check whether the socket was closed + if line == "" + return nothing + end + while length(line) > 0 + h_parts = split(line, ":") + header_dict[chomp(h_parts[1])] = chomp(h_parts[2]) + line = chomp(readline(stream)) + end + message_length = parse(Int, header_dict["Content-Length"]) + message_str = String(read(stream, message_length)) + return message_str + catch err + if err isa Base.IOError + return nothing + end + + rethrow(err) + end +end + +Base.isopen(x::JSONRPCEndpoint) = x.status != :closed && isopen(x.pipe_in) && isopen(x.pipe_out) + +function Base.run(x::JSONRPCEndpoint) + x.status == :idle || error("Endpoint is not idle.") + + x.write_task = @async try + try + for msg in x.out_msg_queue + if isopen(x.pipe_out) + write_transport_layer(x.pipe_out, msg) + else + # TODO Reconsider at some point whether this should be treated as an error. + break + end + end + finally + close(x.out_msg_queue) + end + catch err + bt = catch_backtrace() + if x.err_handler !== nothing + x.err_handler(err, bt) + else + Base.display_error(stderr, err, bt) + end + end + + x.read_task = @async try + while true + # First we delete any cancellation sources that are no longer needed. We do it this way to avoid a lock + while isready(x.no_longer_needed_cancellation_sources) + no_longer_needed_cs_id = take!(x.no_longer_needed_cancellation_sources) + delete!(x.cancellation_sources, no_longer_needed_cs_id) + end + + # Now handle new messages + message = read_transport_layer(x.pipe_in) + + if message === nothing || x.status == :closed + break + end + + message_dict = JSON.parse(message) + + if haskey(message_dict, "method") + method_name = message_dict["method"] + params = get(message_dict, "params", nothing) + id = get(message_dict, "id", nothing) + cancel_source = id === nothing ? nothing : CancellationTokens.CancellationTokenSource() + cancel_token = cancel_source === nothing ? nothing : CancellationTokens.get_token(cancel_source) + + if method_name == "\$/cancelRequest" + id_of_cancelled_request = params["id"] + cs = get(x.cancellation_sources, id_of_cancelled_request, nothing) # We might have sent the response already + if cs !== nothing + CancellationTokens.cancel(cs) + end + else + if id !== nothing + x.cancellation_sources[id] = cancel_source + end + + request = Request( + method_name, + params, + id, + cancel_token + ) + + try + put!(x.in_msg_queue, request) + catch err + if err isa InvalidStateException + break + else + rethrow(err) + end + end + end + else + # This must be a response + id_of_request = message_dict["id"] + + channel_for_response = x.outstanding_requests[id_of_request] + put!(channel_for_response, message_dict) + end + end + + close(x.in_msg_queue) + + for i in values(x.outstanding_requests) + close(i) + end + + x.status = :closed + catch err + bt = catch_backtrace() + if x.err_handler !== nothing + x.err_handler(err, bt) + else + Base.display_error(stderr, err, bt) + end + end + + x.status = :running +end + +function send_notification(x::JSONRPCEndpoint, method::AbstractString, params) + check_dead_endpoint!(x) + + message = Dict("jsonrpc" => "2.0", "method" => method, "params" => params) + + message_json = JSON.json(message) + + put!(x.out_msg_queue, message_json) + + return nothing +end + +function send_request(x::JSONRPCEndpoint, method::AbstractString, params) + check_dead_endpoint!(x) + + id = string(UUIDs.uuid4()) + message = Dict("jsonrpc" => "2.0", "method" => method, "params" => params, "id" => id) + + response_channel = Channel{Any}(1) + x.outstanding_requests[id] = response_channel + + message_json = JSON.json(message) + + put!(x.out_msg_queue, message_json) + + response = take!(response_channel) + + if haskey(response, "result") + return response["result"] + elseif haskey(response, "error") + error_code = response["error"]["code"] + error_msg = response["error"]["message"] + error_data = get(response["error"], "data", nothing) + throw(JSONRPCError(error_code, error_msg, error_data)) + else + throw(JSONRPCError(0, "ERROR AT THE TRANSPORT LEVEL", nothing)) + end +end + +function get_next_message(endpoint::JSONRPCEndpoint) + check_dead_endpoint!(endpoint) + + msg = take!(endpoint.in_msg_queue) + + return msg +end + +function Base.iterate(endpoint::JSONRPCEndpoint, state = nothing) + check_dead_endpoint!(endpoint) + + try + return take!(endpoint.in_msg_queue), nothing + catch err + if err isa InvalidStateException + return nothing + else + rethrow(err) + end + end +end + +function send_success_response(endpoint, original_request::Request, result) + check_dead_endpoint!(endpoint) + + original_request.id === nothing && error("Cannot send a response to a notification.") + + put!(endpoint.no_longer_needed_cancellation_sources, original_request.id) + + response = Dict("jsonrpc" => "2.0", "id" => original_request.id, "result" => result) + + response_json = JSON.json(response) + + put!(endpoint.out_msg_queue, response_json) +end + +function send_error_response(endpoint, original_request::Request, code, message, data) + check_dead_endpoint!(endpoint) + + original_request.id === nothing && error("Cannot send a response to a notification.") + + put!(endpoint.no_longer_needed_cancellation_sources, original_request.id) + + response = Dict("jsonrpc" => "2.0", "id" => original_request.id, "error" => Dict("code" => code, "message" => message, "data" => data)) + + response_json = JSON.json(response) + + put!(endpoint.out_msg_queue, response_json) +end + +function Base.close(endpoint::JSONRPCEndpoint) + endpoint.status == :closed && return + + flush(endpoint) + + endpoint.status = :closed + isopen(endpoint.in_msg_queue) && close(endpoint.in_msg_queue) + isopen(endpoint.out_msg_queue) && close(endpoint.out_msg_queue) + + fetch(endpoint.write_task) + # TODO we would also like to close the read Task + # But unclear how to do that without also closing + # the socket, which we don't want to do + # fetch(endpoint.read_task) +end + +function Base.flush(endpoint::JSONRPCEndpoint) + check_dead_endpoint!(endpoint) + + while isready(endpoint.out_msg_queue) + yield() + end +end + +function check_dead_endpoint!(endpoint) + status = endpoint.status + status === :running && return + error("Endpoint is not running, the current state is $(status).") +end diff --git a/src/interface_def.jl b/src/interface_def.jl new file mode 100644 index 0000000..0e047f9 --- /dev/null +++ b/src/interface_def.jl @@ -0,0 +1,89 @@ +abstract type Outbound end + +function JSON.Writer.CompositeTypeWrapper(t::Outbound) + fns = collect(fieldnames(typeof(t))) + dels = Int[] + for i = 1:length(fns) + f = fns[i] + if getfield(t, f) isa Missing + push!(dels, i) + end + end + deleteat!(fns, dels) + JSON.Writer.CompositeTypeWrapper(t, Tuple(fns)) +end + +function JSON.lower(a::Outbound) + if nfields(a) > 0 + JSON.Writer.CompositeTypeWrapper(a) + else + nothing + end +end + +function field_allows_missing(field::Expr) + field.head == :(::) && field.args[2] isa Expr && + field.args[2].head == :curly && field.args[2].args[1] == :Union && + any(i -> i == :Missing, field.args[2].args) +end + +function field_type(field::Expr, typename::String) + if field.args[2] isa Expr && field.args[2].head == :curly && field.args[2].args[1] == :Union + if length(field.args[2].args) == 3 && (field.args[2].args[2] == :Missing || field.args[2].args[3] == :Missing) + return field.args[2].args[2] == :Missing ? field.args[2].args[3] : field.args[2].args[2] + else + # We return Any for now, which will lead to no type conversion + return :Any + end + else + return field.args[2] + end +end + +function get_kwsignature_for_field(field::Expr) + fieldname = field.args[1] + fieldtype = field.args[2] + default_value = field_allows_missing(field) ? missing : :(error("You must provide a value for the $fieldname field.")) + + return Expr(:kw, Expr(Symbol("::"), fieldname, fieldtype), default_value) +end + +macro dict_readable(arg) + tname = arg.args[2] isa Expr ? arg.args[2].args[1] : arg.args[2] + count_real_fields = count(field -> !(field isa LineNumberNode), arg.args[3].args) + ex = quote + $((arg)) + + $(count_real_fields > 0 ? :( + function $tname(; $((get_kwsignature_for_field(field) for field in arg.args[3].args if !(field isa LineNumberNode))...)) + $tname($((field.args[1] for field in arg.args[3].args if !(field isa LineNumberNode))...)) + end + ) : nothing) + + function $tname(dict::Dict) + end + end + + fex = :($((tname))()) + for field in arg.args[3].args + if !(field isa LineNumberNode) + fieldname = string(field.args[1]) + fieldtype = field_type(field, string(tname)) + if fieldtype isa Expr && fieldtype.head == :curly && length(fieldtype.args)==3 && fieldtype.args[1]==:Dict + f = :($(fieldtype)(i for i in pairs(dict[$fieldname]))) + elseif fieldtype isa Expr && fieldtype.head == :curly && fieldtype.args[2] != :Any + f = :($(fieldtype.args[2]).(dict[$fieldname])) + elseif fieldtype != :Any + f = :($(fieldtype)(dict[$fieldname])) + else + f = :(dict[$fieldname]) + end + if field_allows_missing(field) + f = :(haskey(dict, $fieldname) ? $f : missing) + end + push!(fex.args, f) + end + end + push!(ex.args[end].args[2].args, fex) + return esc(ex) +end diff --git a/src/packagedef.jl b/src/packagedef.jl new file mode 100644 index 0000000..2625005 --- /dev/null +++ b/src/packagedef.jl @@ -0,0 +1,6 @@ +export JSONRPCEndpoint, send_notification, send_request, send_success_response, send_error_response + +include("pipenames.jl") +include("core.jl") +include("typed.jl") +include("interface_def.jl") diff --git a/src/pipenames.jl b/src/pipenames.jl new file mode 100644 index 0000000..1d0e4b5 --- /dev/null +++ b/src/pipenames.jl @@ -0,0 +1,7 @@ +function generate_pipe_name() + if Sys.iswindows() + return "\\\\.\\pipe\\jl-$(UUIDs.uuid4())" + else + return tempname() + end +end diff --git a/src/typed.jl b/src/typed.jl new file mode 100644 index 0000000..e080d81 --- /dev/null +++ b/src/typed.jl @@ -0,0 +1,141 @@ +abstract type AbstractMessageType end + +struct NotificationType{TPARAM} <: AbstractMessageType + method::String +end + +struct RequestType{TPARAM,TR} <: AbstractMessageType + method::String +end + +function NotificationType(method::AbstractString, ::Type{TPARAM}) where TPARAM + return NotificationType{TPARAM}(method) +end + +function RequestType(method::AbstractString, ::Type{TPARAM}, ::Type{TR}) where {TPARAM,TR} + return RequestType{TPARAM,TR}(method) +end + +get_param_type(::NotificationType{TPARAM}) where {TPARAM} = TPARAM +get_param_type(::RequestType{TPARAM,TR}) where {TPARAM,TR} = TPARAM +get_return_type(::RequestType{TPARAM,TR}) where {TPARAM,TR} = TR + +function send(x::JSONRPCEndpoint, request::RequestType{TPARAM,TR}, params::TPARAM) where {TPARAM,TR} + res = send_request(x, request.method, params) + return typed_res(res, TR)::TR +end + +# `send_request` must have returned nothing in this case, we pass this on +# so that we get an error in the typecast at the end of `send` +# if that is not the case. +typed_res(res, TR::Type{Nothing}) = res +typed_res(res, TR::Type{<:T}) where {T <: AbstractArray{Any}} = T(res) +typed_res(res, TR::Type{<:AbstractArray{T}}) where T = T.(res) +typed_res(res, TR::Type) = TR(res) + +function send(x::JSONRPCEndpoint, notification::NotificationType{TPARAM}, params::TPARAM) where TPARAM + send_notification(x, notification.method, params) +end + +struct Handler + message_type::AbstractMessageType + func::Function +end + +mutable struct MsgDispatcher + _handlers::Dict{String,Handler} + _currentlyHandlingMsg::Bool + + function MsgDispatcher() + new(Dict{String,Handler}(), false) + end +end + +function Base.setindex!(dispatcher::MsgDispatcher, func::Function, message_type::AbstractMessageType) + dispatcher._handlers[message_type.method] = Handler(message_type, func) +end + +function dispatch_msg(x::JSONRPCEndpoint, dispatcher::MsgDispatcher, msg::Request) + dispatcher._currentlyHandlingMsg = true + try + method_name = msg.method + handler = get(dispatcher._handlers, method_name, nothing) + if handler !== nothing + param_type = get_param_type(handler.message_type) + params = param_type === Nothing ? nothing : param_type <: NamedTuple ? convert(param_type,(;(Symbol(i[1])=>i[2] for i in msg.params)...)) : param_type(msg.params) + + if handler.message_type isa RequestType + res = handler.func(x, params, msg.token) + else + res = handler.func(x, params) + end + + if handler.message_type isa RequestType + if res isa JSONRPCError + send_error_response(x, msg, res.code, res.msg, res.data) + elseif res isa get_return_type(handler.message_type) + send_success_response(x, msg, res) + else + error_msg = "The handler for the '$method_name' request returned a value of type $(typeof(res)), which is not a valid return type according to the request definition." + send_error_response(x, msg, -32603, error_msg, nothing) + error(error_msg) + end + end + else + error("Unknown method $method_name.") + end + finally + dispatcher._currentlyHandlingMsg = false + end +end + +is_currently_handling_msg(d::MsgDispatcher) = d._currentlyHandlingMsg + +macro message_dispatcher(name, body) + quote + function $(esc(name))(x, msg::Request, context=nothing) + method_name = msg.method + + $( + ( + :( + if method_name == $(esc(i.args[2])).method + param_type = get_param_type($(esc(i.args[2]))) + params = param_type === Nothing ? nothing : param_type <: NamedTuple ? convert(param_type,(;(Symbol(i[1])=>i[2] for i in msg.params)...)) : param_type(msg.params) + + if context===nothing + if $(esc(i.args[2])) isa RequestType + res = $(esc(i.args[3]))(params, msg.token) + else + res = $(esc(i.args[3]))(params) + end + else + if $(esc(i.args[2])) isa RequestType + res = $(esc(i.args[3]))(params, context, msg.token) + else + res = $(esc(i.args[3]))(params, context) + end + end + + if $(esc(i.args[2])) isa RequestType + if res isa JSONRPCError + send_error_response(x, msg, res.code, res.msg, res.data) + elseif res isa get_return_type($(esc(i.args[2]))) + send_success_response(x, msg, res) + else + error_msg = "The handler for the '$method_name' request returned a value of type $(typeof(res)), which is not a valid return type according to the request definition." + send_error_response(x, msg, -32603, error_msg, nothing) + error(error_msg) + end + end + + return + end + ) for i in filter(i->i isa Expr, body.args) + )... + ) + + error("Unknown method $method_name.") + end + end +end diff --git a/test/runtests.jl b/test/runtests.jl new file mode 100644 index 0000000..b9e874d --- /dev/null +++ b/test/runtests.jl @@ -0,0 +1,3 @@ +using TestItemRunner + +@run_package_tests diff --git a/test/shared_test_code.jl b/test/shared_test_code.jl new file mode 100644 index 0000000..a40f190 --- /dev/null +++ b/test/shared_test_code.jl @@ -0,0 +1,20 @@ +@testmodule TestStructs begin + using JSONRPC: @dict_readable, Outbound + + export Foo, Foo2 + + @dict_readable struct Foo <: Outbound + fieldA::Int + fieldB::String + fieldC::Union{Missing,String} + fieldD::Union{String,Missing} + end + + @dict_readable struct Foo2 <: Outbound + fieldA::Union{Nothing,Int} + fieldB::Vector{Int} + end + + Base.:(==)(a::Foo2,b::Foo2) = a.fieldA == b.fieldA && a.fieldB == b.fieldB + +end diff --git a/test/test_core.jl b/test/test_core.jl new file mode 100644 index 0000000..9db3e0e --- /dev/null +++ b/test/test_core.jl @@ -0,0 +1,10 @@ +@testitem "Core" begin + @test sprint(showerror, JSONRPC.JSONRPCError(-32700, "FOO", "BAR")) == "ParseError: FOO (BAR)" + @test sprint(showerror, JSONRPC.JSONRPCError(-32600, "FOO", "BAR")) == "InvalidRequest: FOO (BAR)" + @test sprint(showerror, JSONRPC.JSONRPCError(-32601, "FOO", "BAR")) == "MethodNotFound: FOO (BAR)" + @test sprint(showerror, JSONRPC.JSONRPCError(-32602, "FOO", "BAR")) == "InvalidParams: FOO (BAR)" + @test sprint(showerror, JSONRPC.JSONRPCError(-32603, "FOO", "BAR")) == "InternalError: FOO (BAR)" + @test sprint(showerror, JSONRPC.JSONRPCError(-32002, "FOO", "BAR")) == "ServerNotInitialized: FOO (BAR)" + @test sprint(showerror, JSONRPC.JSONRPCError(-32001, "FOO", "BAR")) == "UnknownErrorCode: FOO (BAR)" + @test sprint(showerror, JSONRPC.JSONRPCError(1, "FOO", "BAR")) == "Unknown: FOO (BAR)" +end diff --git a/test/test_interface_def.jl b/test/test_interface_def.jl new file mode 100644 index 0000000..43ee29d --- /dev/null +++ b/test/test_interface_def.jl @@ -0,0 +1,34 @@ +@testitem "Interface Definition" setup=[TestStructs] begin + using JSON + using .TestStructs: Foo, Foo2 + + @test_throws ErrorException Foo() + + a = Foo(fieldA=1, fieldB="A") + + @test a.fieldA == 1 + @test a.fieldB == "A" + @test a.fieldC === missing + @test a.fieldD === missing + + b = Foo(fieldA=1, fieldB="A", fieldC="B", fieldD="C") + + @test b.fieldA == 1 + @test b.fieldB == "A" + @test b.fieldC == "B" + @test b.fieldD == "C" + + @test Foo(JSON.parse(JSON.json(a))) == a + @test Foo(JSON.parse(JSON.json(b))) == b + + c = Foo2(fieldA=nothing, fieldB=[1,2]) + + @test c.fieldA === nothing + @test c.fieldB == [1,2] + @test Foo2(JSON.parse(JSON.json(c))) == c + + d = Foo2(fieldA=3, fieldB=[1,2]) + @test d.fieldA === 3 + @test d.fieldB == [1,2] + @test Foo2(JSON.parse(JSON.json(d))) == d +end diff --git a/test/test_typed.jl b/test/test_typed.jl new file mode 100644 index 0000000..a9b6098 --- /dev/null +++ b/test/test_typed.jl @@ -0,0 +1,217 @@ +@testitem "Dynamic message dispatcher" setup=[TestStructs] begin + using Sockets + using .TestStructs: Foo, Foo2 + + global_socket_name1 = JSONRPC.generate_pipe_name() + + request1_type = JSONRPC.RequestType("request1", Foo, String) + request2_type = JSONRPC.RequestType("request2", Nothing, String) + notify1_type = JSONRPC.NotificationType("notify1", Vector{String}) + + global g_var = "" + + server_is_up = Base.Condition() + + server_task = @async try + server = listen(global_socket_name1) + notify(server_is_up) + sock = accept(server) + global conn = JSONRPC.JSONRPCEndpoint(sock, sock) + global msg_dispatcher = JSONRPC.MsgDispatcher() + + msg_dispatcher[request1_type] = (conn, params, token) -> begin + @test JSONRPC.is_currently_handling_msg(msg_dispatcher) + params.fieldA == 1 ? "YES" : "NO" + end + msg_dispatcher[request2_type] = (conn, params, token) -> JSONRPC.JSONRPCError(-32600, "Our message", nothing) + msg_dispatcher[notify1_type] = (conn, params) -> global g_var = params[1] + + run(conn) + + for msg in conn + @info "Got a message, now dispatching" msg + JSONRPC.dispatch_msg(conn, msg_dispatcher, msg) + @info "Finished dispatching" + end + catch err + Base.display_error(stderr, err, catch_backtrace()) + Base.flush(stderr) + end + + wait(server_is_up) + + sock2 = connect(global_socket_name1) + conn2 = JSONRPCEndpoint(sock2, sock2) + + run(conn2) + + JSONRPC.send(conn2, notify1_type, ["TEST"]) + + res = JSONRPC.send(conn2, request1_type, Foo(fieldA=1, fieldB="FOO")) + + @test res == "YES" + @test g_var == "TEST" + + @test_throws JSONRPC.JSONRPCError(-32600, "Our message", nothing) JSONRPC.send(conn2, request2_type, nothing) + + close(conn2) + close(sock2) + close(conn) + + fetch(server_task) + + # Now we test a faulty server + + global_socket_name2 = JSONRPC.generate_pipe_name() + + server_is_up = Base.Condition() + + server_task2 = @async try + server = listen(global_socket_name2) + notify(server_is_up) + sock = accept(server) + global conn = JSONRPC.JSONRPCEndpoint(sock, sock) + global msg_dispatcher = JSONRPC.MsgDispatcher() + + msg_dispatcher[request2_type] = (conn, params, token)->34 # The request type requires a `String` return, so this tests whether we get an error. + + run(conn) + + for msg in conn + @test_throws ErrorException("The handler for the 'request2' request returned a value of type $Int, which is not a valid return type according to the request definition.") JSONRPC.dispatch_msg(conn, msg_dispatcher, msg) + end + catch err + Base.display_error(stderr, err, catch_backtrace()) + Base.flush(stderr) + end + + wait(server_is_up) + + sock2 = connect(global_socket_name2) + conn2 = JSONRPCEndpoint(sock2, sock2) + + run(conn2) + + @test_throws JSONRPC.JSONRPCError(-32603, "The handler for the 'request2' request returned a value of type $Int, which is not a valid return type according to the request definition.", nothing) JSONRPC.send(conn2, request2_type, nothing) + + close(conn2) + close(sock2) + close(conn) + + fetch(server_task) + +end + +@testitem "check response type" begin + using JSONRPC: typed_res + + @test typed_res(nothing, Nothing) isa Nothing + @test typed_res([1,"2",3], Vector{Any}) isa Vector{Any} + @test typed_res([1,2,3], Vector{Int}) isa Vector{Int} + @test typed_res([1,2,3], Vector{Float64}) isa Vector{Float64} + @test typed_res(['f','o','o'], String) isa String + @test typed_res("foo", String) isa String +end + +@testitem "Static message dispatcher" setup=[TestStructs] begin + using Sockets + using .TestStructs: Foo, Foo2 + + global_socket_name1 = JSONRPC.generate_pipe_name() + + request1_type = JSONRPC.RequestType("request1", Foo, String) + request2_type = JSONRPC.RequestType("request2", Nothing, String) + notify1_type = JSONRPC.NotificationType("notify1", Vector{String}) + + global g_var = "" + + server_is_up = Base.Condition() + + JSONRPC.@message_dispatcher my_dispatcher begin + request1_type => (params, token) -> begin + params.fieldA == 1 ? "YES" : "NO" + end + request2_type => (params, token) -> JSONRPC.JSONRPCError(-32600, "Our message", nothing) + notify1_type => (params) -> global g_var = params[1] + end + + server_task = @async try + server = listen(global_socket_name1) + notify(server_is_up) + sock = accept(server) + global conn = JSONRPC.JSONRPCEndpoint(sock, sock) + global msg_dispatcher = JSONRPC.MsgDispatcher() + + run(conn) + + for msg in conn + my_dispatcher(conn, msg) + end + catch err + Base.display_error(stderr, err, catch_backtrace()) + end + + wait(server_is_up) + + sock2 = connect(global_socket_name1) + conn2 = JSONRPCEndpoint(sock2, sock2) + + run(conn2) + + JSONRPC.send(conn2, notify1_type, ["TEST"]) + + res = JSONRPC.send(conn2, request1_type, Foo(fieldA=1, fieldB="FOO")) + + @test res == "YES" + @test g_var == "TEST" + + @test_throws JSONRPC.JSONRPCError(-32600, "Our message", nothing) JSONRPC.send(conn2, request2_type, nothing) + + close(conn2) + close(sock2) + close(conn) + + fetch(server_task) + + # Now we test a faulty server + + global_socket_name2 = JSONRPC.generate_pipe_name() + + server_is_up = Base.Condition() + + JSONRPC.@message_dispatcher my_dispatcher2 begin + request2_type => (params, token) -> 34 # The request type requires a `String` return, so this tests whether we get an error. + end + + server_task2 = @async try + server = listen(global_socket_name2) + notify(server_is_up) + sock = accept(server) + global conn = JSONRPC.JSONRPCEndpoint(sock, sock) + global msg_dispatcher = JSONRPC.MsgDispatcher() + + run(conn) + + for msg in conn + @test_throws ErrorException("The handler for the 'request2' request returned a value of type $Int, which is not a valid return type according to the request definition.") my_dispatcher2(conn, msg) + end + catch err + Base.display_error(stderr, err, catch_backtrace()) + end + + wait(server_is_up) + + sock2 = connect(global_socket_name2) + conn2 = JSONRPCEndpoint(sock2, sock2) + + run(conn2) + + @test_throws JSONRPC.JSONRPCError(-32603, "The handler for the 'request2' request returned a value of type $Int, which is not a valid return type according to the request definition.", nothing) JSONRPC.send(conn2, request2_type, nothing) + + close(conn2) + close(sock2) + close(conn) + + fetch(server_task) + +end From 8bd3a8e74f6d40d65a251ff90966af9a1a85810c Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:25:01 -0800 Subject: [PATCH 10/24] Squashed 'packages/JSON/' content from commit 4b3913d git-subtree-dir: packages/JSON git-subtree-split: 4b3913d58f04cc5bb2f8d23c6ef82e0fbed20525 --- .gitignore | 3 + .travis.yml | 12 + LICENSE.md | 25 ++ Project.toml | 22 ++ README.md | 108 ++++++ appveyor.yml | 43 +++ bench/bench.jl | 92 +++++ bench/micro.jl | 56 +++ data/jsonchecker/fail01.json | 1 + data/jsonchecker/fail02.json | 1 + data/jsonchecker/fail03.json | 1 + data/jsonchecker/fail04.json | 1 + data/jsonchecker/fail05.json | 1 + data/jsonchecker/fail06.json | 1 + data/jsonchecker/fail07.json | 1 + data/jsonchecker/fail08.json | 1 + data/jsonchecker/fail09.json | 1 + data/jsonchecker/fail10.json | 1 + data/jsonchecker/fail11.json | 1 + data/jsonchecker/fail12.json | 1 + data/jsonchecker/fail13.json | 1 + data/jsonchecker/fail14.json | 1 + data/jsonchecker/fail15.json | 1 + data/jsonchecker/fail16.json | 1 + data/jsonchecker/fail17.json | 1 + data/jsonchecker/fail18.json | 2 + data/jsonchecker/fail19.json | 1 + data/jsonchecker/fail20.json | 1 + data/jsonchecker/fail21.json | 1 + data/jsonchecker/fail22.json | 1 + data/jsonchecker/fail23.json | 1 + data/jsonchecker/fail24.json | 1 + data/jsonchecker/fail25.json | 1 + data/jsonchecker/fail26.json | 1 + data/jsonchecker/fail27.json | 2 + data/jsonchecker/fail28.json | 2 + data/jsonchecker/fail29.json | 1 + data/jsonchecker/fail30.json | 1 + data/jsonchecker/fail31.json | 1 + data/jsonchecker/fail32.json | 1 + data/jsonchecker/fail33.json | 1 + data/jsonchecker/fail34.json | 1 + data/jsonchecker/fail35.json | 1 + data/jsonchecker/fail36.json | 1 + data/jsonchecker/fail37.json | 1 + data/jsonchecker/fail38.json | 1 + data/jsonchecker/pass01.json | 58 +++ data/jsonchecker/pass02.json | 1 + data/jsonchecker/pass03.json | 6 + data/jsonchecker/readme.txt | 3 + data/roundtrip/roundtrip01.json | 1 + data/roundtrip/roundtrip02.json | 1 + data/roundtrip/roundtrip03.json | 1 + data/roundtrip/roundtrip04.json | 1 + data/roundtrip/roundtrip05.json | 1 + data/roundtrip/roundtrip06.json | 1 + data/roundtrip/roundtrip07.json | 1 + data/roundtrip/roundtrip08.json | 1 + data/roundtrip/roundtrip09.json | 1 + data/roundtrip/roundtrip10.json | 1 + data/roundtrip/roundtrip11.json | 1 + data/roundtrip/roundtrip12.json | 1 + data/roundtrip/roundtrip13.json | 1 + data/roundtrip/roundtrip14.json | 1 + data/roundtrip/roundtrip15.json | 1 + data/roundtrip/roundtrip16.json | 1 + data/roundtrip/roundtrip17.json | 1 + data/roundtrip/roundtrip18.json | 1 + data/roundtrip/roundtrip19.json | 1 + data/roundtrip/roundtrip20.json | 1 + data/roundtrip/roundtrip21.json | 1 + data/roundtrip/roundtrip22.json | 1 + data/roundtrip/roundtrip23.json | 1 + data/roundtrip/roundtrip24.json | 1 + data/roundtrip/roundtrip25.json | 1 + data/roundtrip/roundtrip26.json | 1 + data/roundtrip/roundtrip27.json | 1 + src/Common.jl | 11 + src/JSON.jl | 31 ++ src/Parser.jl | 444 ++++++++++++++++++++++ src/Serializations.jl | 39 ++ src/Writer.jl | 357 ++++++++++++++++++ src/bytes.jl | 67 ++++ src/errors.jl | 12 + src/pushvector.jl | 33 ++ src/specialized.jl | 144 +++++++ test/async.jl | 109 ++++++ test/enum.jl | 4 + test/indentation.jl | 10 + test/json-checker.jl | 28 ++ test/json-samples.jl | 644 ++++++++++++++++++++++++++++++++ test/lowering.jl | 37 ++ test/parser/dicttype.jl | 22 ++ test/parser/inttype.jl | 16 + test/parser/invalid-input.jl | 33 ++ test/parser/parsefile.jl | 10 + test/regression/issue021.jl | 4 + test/regression/issue026.jl | 2 + test/regression/issue057.jl | 2 + test/regression/issue109.jl | 8 + test/regression/issue152.jl | 2 + test/regression/issue163.jl | 1 + test/runtests.jl | 80 ++++ test/serializer.jl | 95 +++++ test/standard-serializer.jl | 72 ++++ 105 files changed, 2814 insertions(+) create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 LICENSE.md create mode 100644 Project.toml create mode 100644 README.md create mode 100644 appveyor.yml create mode 100644 bench/bench.jl create mode 100644 bench/micro.jl create mode 100644 data/jsonchecker/fail01.json create mode 100644 data/jsonchecker/fail02.json create mode 100644 data/jsonchecker/fail03.json create mode 100644 data/jsonchecker/fail04.json create mode 100644 data/jsonchecker/fail05.json create mode 100644 data/jsonchecker/fail06.json create mode 100644 data/jsonchecker/fail07.json create mode 100644 data/jsonchecker/fail08.json create mode 100644 data/jsonchecker/fail09.json create mode 100644 data/jsonchecker/fail10.json create mode 100644 data/jsonchecker/fail11.json create mode 100644 data/jsonchecker/fail12.json create mode 100644 data/jsonchecker/fail13.json create mode 100644 data/jsonchecker/fail14.json create mode 100644 data/jsonchecker/fail15.json create mode 100644 data/jsonchecker/fail16.json create mode 100644 data/jsonchecker/fail17.json create mode 100644 data/jsonchecker/fail18.json create mode 100644 data/jsonchecker/fail19.json create mode 100644 data/jsonchecker/fail20.json create mode 100644 data/jsonchecker/fail21.json create mode 100644 data/jsonchecker/fail22.json create mode 100644 data/jsonchecker/fail23.json create mode 100644 data/jsonchecker/fail24.json create mode 100644 data/jsonchecker/fail25.json create mode 100644 data/jsonchecker/fail26.json create mode 100644 data/jsonchecker/fail27.json create mode 100644 data/jsonchecker/fail28.json create mode 100644 data/jsonchecker/fail29.json create mode 100644 data/jsonchecker/fail30.json create mode 100644 data/jsonchecker/fail31.json create mode 100644 data/jsonchecker/fail32.json create mode 100644 data/jsonchecker/fail33.json create mode 100644 data/jsonchecker/fail34.json create mode 100644 data/jsonchecker/fail35.json create mode 100644 data/jsonchecker/fail36.json create mode 100644 data/jsonchecker/fail37.json create mode 100644 data/jsonchecker/fail38.json create mode 100644 data/jsonchecker/pass01.json create mode 100644 data/jsonchecker/pass02.json create mode 100644 data/jsonchecker/pass03.json create mode 100644 data/jsonchecker/readme.txt create mode 100644 data/roundtrip/roundtrip01.json create mode 100644 data/roundtrip/roundtrip02.json create mode 100644 data/roundtrip/roundtrip03.json create mode 100644 data/roundtrip/roundtrip04.json create mode 100644 data/roundtrip/roundtrip05.json create mode 100644 data/roundtrip/roundtrip06.json create mode 100644 data/roundtrip/roundtrip07.json create mode 100644 data/roundtrip/roundtrip08.json create mode 100644 data/roundtrip/roundtrip09.json create mode 100644 data/roundtrip/roundtrip10.json create mode 100644 data/roundtrip/roundtrip11.json create mode 100644 data/roundtrip/roundtrip12.json create mode 100644 data/roundtrip/roundtrip13.json create mode 100644 data/roundtrip/roundtrip14.json create mode 100644 data/roundtrip/roundtrip15.json create mode 100644 data/roundtrip/roundtrip16.json create mode 100644 data/roundtrip/roundtrip17.json create mode 100644 data/roundtrip/roundtrip18.json create mode 100644 data/roundtrip/roundtrip19.json create mode 100644 data/roundtrip/roundtrip20.json create mode 100644 data/roundtrip/roundtrip21.json create mode 100644 data/roundtrip/roundtrip22.json create mode 100644 data/roundtrip/roundtrip23.json create mode 100644 data/roundtrip/roundtrip24.json create mode 100644 data/roundtrip/roundtrip25.json create mode 100644 data/roundtrip/roundtrip26.json create mode 100644 data/roundtrip/roundtrip27.json create mode 100644 src/Common.jl create mode 100644 src/JSON.jl create mode 100644 src/Parser.jl create mode 100644 src/Serializations.jl create mode 100644 src/Writer.jl create mode 100644 src/bytes.jl create mode 100644 src/errors.jl create mode 100644 src/pushvector.jl create mode 100644 src/specialized.jl create mode 100644 test/async.jl create mode 100644 test/enum.jl create mode 100644 test/indentation.jl create mode 100644 test/json-checker.jl create mode 100644 test/json-samples.jl create mode 100644 test/lowering.jl create mode 100644 test/parser/dicttype.jl create mode 100644 test/parser/inttype.jl create mode 100644 test/parser/invalid-input.jl create mode 100644 test/parser/parsefile.jl create mode 100644 test/regression/issue021.jl create mode 100644 test/regression/issue026.jl create mode 100644 test/regression/issue057.jl create mode 100644 test/regression/issue109.jl create mode 100644 test/regression/issue152.jl create mode 100644 test/regression/issue163.jl create mode 100644 test/runtests.jl create mode 100644 test/serializer.jl create mode 100644 test/standard-serializer.jl diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..edc6d3b --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +*.cov +*.mem +data/*.json diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..1bc2f60 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,12 @@ +language: julia +os: + - osx + - linux +julia: + - 0.7 + - 1.0 + - nightly +notifications: + email: false +after_success: + - julia -e 'import Pkg; Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())'; diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..d916e61 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,25 @@ +The Julia JSON package is licensed under the MIT Expat License: + +> Copyright (c) 2002: JSON.org, 2012โ€“2016: Avik Sengupta, Stefan Karpinski, +> David de Laat, Dirk Gadsen, Milo Yip and other contributors +> โ€“ https://github.com/JuliaLang/JSON.jl/contributors +> and https://github.com/miloyip/nativejson-benchmark/contributors +> +> Permission is hereby granted, free of charge, to any person obtaining +> a copy of this software and associated documentation files (the +> "Software"), to deal in the Software without restriction, including +> without limitation the rights to use, copy, modify, merge, publish, +> distribute, sublicense, and/or sell copies of the Software, and to +> permit persons to whom the Software is furnished to do so, subject to +> the following conditions: +> +> The above copyright notice and this permission notice shall be +> included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +> NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +> LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +> OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +> WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Project.toml b/Project.toml new file mode 100644 index 0000000..2a0d1b3 --- /dev/null +++ b/Project.toml @@ -0,0 +1,22 @@ +name = "JSON" +uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +version = "0.20.1" + +[deps] +Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" +Mmap = "a63ad114-7e13-5084-954f-fe012c677804" +Unicode = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[extras] +DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" +Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" +FixedPointNumbers = "53c48c17-4a7d-5ca2-90c5-79b7896eea93" +OffsetArrays = "6fe1bfb0-de20-5000-8ca7-80f57d26f881" +Sockets = "6462fe0b-24de-5631-8697-dd941f90decc" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[compat] +julia = "0.7, 1" + +[targets] +test = ["DataStructures", "Distributed", "FixedPointNumbers", "OffsetArrays", "Sockets", "Test"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..9ccbd6d --- /dev/null +++ b/README.md @@ -0,0 +1,108 @@ +# JSON.jl +### Parsing and printing JSON in pure Julia. + +[![Build Status](https://travis-ci.org/JuliaIO/JSON.jl.svg)](https://travis-ci.org/JuliaIO/JSON.jl) +[![Build status](https://ci.appveyor.com/api/projects/status/2sfomjwl29k6y6oy)](https://ci.appveyor.com/project/staticfloat/json-jl) +[![codecov.io](http://codecov.io/github/JuliaIO/JSON.jl/coverage.svg?branch=master)](http://codecov.io/github/JuliaIO/JSON.jl?branch=master) + +[![JSON](http://pkg.julialang.org/badges/JSON_0.3.svg)](http://pkg.julialang.org/?pkg=JSON&ver=0.3) +[![JSON](http://pkg.julialang.org/badges/JSON_0.4.svg)](http://pkg.julialang.org/?pkg=JSON&ver=0.4) +[![JSON](http://pkg.julialang.org/badges/JSON_0.5.svg)](http://pkg.julialang.org/?pkg=JSON&ver=0.5) +[![JSON](http://pkg.julialang.org/badges/JSON_0.6.svg)](http://pkg.julialang.org/?pkg=JSON&ver=0.6) + +**Installation**: `julia> Pkg.add("JSON")` + + +## Basic Usage + + +```julia +import JSON + +# JSON.parse - string or stream to Julia data structures +s = "{\"a_number\" : 5.0, \"an_array\" : [\"string\", 9]}" +j = JSON.parse(s) +# Dict{AbstractString,Any} with 2 entries: +# "an_array" => {"string",9} +# "a_number" => 5.0 + +# JSON.json - Julia data structures to a string +JSON.json([2,3]) +# "[2,3]" +JSON.json(j) +# "{\"an_array\":[\"string\",9],\"a_number\":5.0}" +``` + +## Documentation + + +```julia +JSON.print(io::IO, s::AbstractString) +JSON.print(io::IO, s::Union{Integer, AbstractFloat}) +JSON.print(io::IO, n::Nothing) +JSON.print(io::IO, b::Bool) +JSON.print(io::IO, a::AbstractDict) +JSON.print(io::IO, v::AbstractVector) +JSON.print{T, N}(io::IO, v::Array{T, N}) +``` + +Writes a compact (no extra whitespace or indentation) JSON representation +to the supplied IO. + +```julia +JSON.print(a::AbstractDict, indent) +JSON.print(io::IO, a::AbstractDict, indent) +``` + +Writes a JSON representation with newlines, and indentation if specified. Non-zero `indent` will be applied recursively to nested elements. + + +```julia +json(a::Any) +``` + +Returns a compact JSON representation as an `AbstractString`. + +```julia +JSON.parse(s::AbstractString; dicttype=Dict, inttype=Int64) +JSON.parse(io::IO; dicttype=Dict, inttype=Int64) +JSON.parsefile(filename::AbstractString; dicttype=Dict, inttype=Int64, use_mmap=true) +``` + +Parses a JSON `AbstractString` or IO stream into a nested `Array` or `Dict`. + +The `dicttype` indicates the dictionary type (`<: Associative`), or a function that +returns an instance of a dictionary type, +that JSON objects are parsed to. It defaults to `Dict` (the built-in Julia +dictionary), but a different type can be passed for additional functionality. +For example, if you `import DataStructures` +(assuming the [DataStructures +package](https://github.com/JuliaLang/DataStructures.jl) is +installed) + + - you can pass `dicttype=DataStructures.OrderedDict` to maintain the insertion order + of the items in the object; + - or you can pass `()->DefaultDict{String,Any}(Missing)` to having any non-found keys + return `missing` when you index the result. + + +The `inttype` argument controls how integers are parsed. If a number in a JSON +file is recognized to be an integer, it is parsed as one; otherwise it is parsed +as a `Float64`. The `inttype` defaults to `Int64`, but, for example, if you know +that your integer numbers are all small and want to save space, you can pass +`inttype=Int32`. Alternatively, if your JSON input has integers which are too large +for Int64, you can pass `inttype=Int128` or `inttype=BigInt`. `inttype` can be any +subtype of `Real`. + +```julia +JSONText(s::AbstractString) +``` +A wrapper around a Julia string representing JSON-formatted text, +which is inserted *as-is* in the JSON output of `JSON.print` and `JSON.json`. + +```julia +JSON.lower(p::Point2D) = [p.x, p.y] +``` + +Define a custom serialization rule for a particular data type. Must return a +value that can be directly serialized; see help for more details. diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000..912635f --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,43 @@ +environment: + matrix: + - julia_version: 0.7 + - julia_version: 1 + - julia_version: nightly + +platform: + - x86 # 32-bit + - x64 # 64-bit + +# # Uncomment the following lines to allow failures on nightly julia +# # (tests will run but not make your overall status red) +# matrix: +# allow_failures: +# - julia_version: nightly + +branches: + only: + - master + - /release-.*/ + +notifications: + - provider: Email + on_build_success: false + on_build_failure: false + on_build_status_changed: false + +install: + - ps: iex ((new-object net.webclient).DownloadString("https://raw.githubusercontent.com/JuliaCI/Appveyor.jl/version-1/bin/install.ps1")) + +build_script: + - echo "%JL_BUILD_SCRIPT%" + - C:\julia\bin\julia -e "%JL_BUILD_SCRIPT%" + +test_script: + - echo "%JL_TEST_SCRIPT%" + - C:\julia\bin\julia -e "%JL_TEST_SCRIPT%" + +# # Uncomment to support code coverage upload. Should only be enabled for packages +# # which would have coverage gaps without running on Windows +# on_success: +# - echo "%JL_CODECOV_SCRIPT%" +# - C:\julia\bin\julia -e "%JL_CODECOV_SCRIPT%" \ No newline at end of file diff --git a/bench/bench.jl b/bench/bench.jl new file mode 100644 index 0000000..a9b4be5 --- /dev/null +++ b/bench/bench.jl @@ -0,0 +1,92 @@ +#!/usr/bin/julia --color=yes + +using ArgParse +using JSON + + +function bench(f, simulate=false) + fp = joinpath(JSON_DATA_DIR, string(f, ".json")) + if !isfile(fp) + println("Downloading benchmark file...") + download(DATA_SOURCES[f], fp) + end + GC.gc() # run gc so it doesn't affect benchmarks + t = if args["parse"]["parse-file"] + @elapsed JSON.parsefile(fp) + else + data = read(fp, String) + @elapsed JSON.Parser.parse(data) + end + + if !simulate + printstyled(" [Bench$FLAGS] "; color=:yellow) + println(f, " ", t, " seconds") + end + t +end + + +const JSON_DATA_DIR = joinpath(dirname(dirname(@__FILE__)), "data") +const s = ArgParseSettings(description="Benchmark JSON.jl") + +const DATA_SOURCES = Dict( + "canada" => "https://raw.githubusercontent.com/miloyip/nativejson-benchmark/v1.0.0/data/canada.json", + "citm_catalog" => "https://raw.githubusercontent.com/miloyip/nativejson-benchmark/v1.0.0/data/citm_catalog.json", + "citylots" => "https://raw.githubusercontent.com/zemirco/sf-city-lots-json/master/citylots.json", + "twitter" => "https://raw.githubusercontent.com/miloyip/nativejson-benchmark/v1.0.0/data/twitter.json") + +@add_arg_table s begin + "parse" + action = :command + help = "Run a JSON parser benchmark" + "list" + action = :command + help = "List available JSON files for use" +end + +@add_arg_table s["parse"] begin + "--include-compile", "-c" + help = "If set, include the compile time in measurements" + action = :store_true + "--parse-file", "-f" + help = "If set, measure JSON.parsefile, hence including IO time" + action = :store_true + "file" + help = "The JSON file to benchmark (leave out to benchmark all)" + required = false +end + +const args = parse_args(ARGS, s) + +if args["%COMMAND%"] == "parse" + const FLAGS = string( + args["parse"]["include-compile"] ? "C" : "", + args["parse"]["parse-file"] ? "F" : "") + + if args["parse"]["file"] โ‰  nothing + const file = args["parse"]["file"] + + if !args["parse"]["include-compile"] + bench(file, true) + end + bench(file) + else + times = 1.0 + if args["parse"]["include-compile"] + error("Option --include-compile can only be used for single file.") + end + for k in sort(collect(keys(DATA_SOURCES))) + bench(k, true) # warm up compiler + end + for k in sort(collect(keys(DATA_SOURCES))) + times *= bench(k) # do benchmark + end + print_with_color(:yellow, " [Bench$FLAGS] ") + println("Total (G.M.) ", times^(1/length(DATA_SOURCES)), " seconds") + end +elseif args["%COMMAND%"] == "list" + println("Available benchmarks are:") + for k in sort(collect(keys(DATA_SOURCES))) + println(" โ€ข $k") + end +end diff --git a/bench/micro.jl b/bench/micro.jl new file mode 100644 index 0000000..9c3f653 --- /dev/null +++ b/bench/micro.jl @@ -0,0 +1,56 @@ +# JSON Microbenchmarks +# 0.6 required for running benchmarks + +using JSON +using BenchmarkTools +using Dates + +const suite = BenchmarkGroup() + +suite["print"] = BenchmarkGroup(["serialize"]) +suite["pretty-print"] = BenchmarkGroup(["serialize"]) + +struct CustomListType + x::Int + y::Float64 + z::Union{CustomListType, Nothing} +end + +struct CustomTreeType + x::String + y::Union{CustomTreeType, Nothing} + z::Union{CustomTreeType, Nothing} +end + +list(x) = x == 0 ? nothing : CustomListType(1, 1.0, list(x - 1)) +tree(x) = x == 0 ? nothing : CustomTreeType("!!!", tree(x - 1), tree(x - 1)) + +const micros = Dict( + "integer" => 88, + "float" => -88.8, + "ascii" => "Hello World!", + "ascii-1024" => "x" ^ 1024, + "unicode" => "เบชเบฐโ€‹เบšเบฒเบโ€‹เบ”เบตโ€‹เบŠเบฒเบงโ€‹เป‚เบฅเบ!", + "unicode-1024" => "โ„œ" ^ 1024, + "bool" => true, + "null" => nothing, + "flat-homogenous-array-16" => collect(1:16), + "flat-homogenous-array-1024" => collect(1:1024), + "heterogenous-array" => [ + 1, 2, 3, 7, "A", "C", "E", "N", "Q", "R", "Shuttle to Grand Central"], + "nested-array-16^2" => [collect(1:16) for _ in 1:16], + "nested-array-16^3" => [[collect(1:16) for _ in 1:16] for _ in 1:16], + "small-dict" => Dict( + :a => :b, :c => "๐Ÿ’™๐Ÿ’™๐Ÿ’™๐Ÿ’™๐Ÿ’™๐Ÿ’™", :e => 10, :f => Dict(:a => :b)), + "flat-dict-128" => Dict(zip(collect(1:128), collect(1:128))), + "date" => Date(2016, 08, 09), + "matrix-16" => [i == j ? 1.0 : 0.0 for i in 1:16, j in 1:16], + "custom-list-128" => list(128), + "custom-tree-8" => tree(8)) + +for (k, v) in micros + io = IOBuffer() + suite["print"][k] = @benchmarkable JSON.print($(IOBuffer()), $v) + suite["pretty-print"][k] = @benchmarkable JSON.print( + $(IOBuffer()), $v, 4) +end diff --git a/data/jsonchecker/fail01.json b/data/jsonchecker/fail01.json new file mode 100644 index 0000000..92a451e --- /dev/null +++ b/data/jsonchecker/fail01.json @@ -0,0 +1 @@ +fable diff --git a/data/jsonchecker/fail02.json b/data/jsonchecker/fail02.json new file mode 100644 index 0000000..6b7c11e --- /dev/null +++ b/data/jsonchecker/fail02.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/data/jsonchecker/fail03.json b/data/jsonchecker/fail03.json new file mode 100644 index 0000000..168c81e --- /dev/null +++ b/data/jsonchecker/fail03.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/data/jsonchecker/fail04.json b/data/jsonchecker/fail04.json new file mode 100644 index 0000000..9de168b --- /dev/null +++ b/data/jsonchecker/fail04.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/data/jsonchecker/fail05.json b/data/jsonchecker/fail05.json new file mode 100644 index 0000000..ddf3ce3 --- /dev/null +++ b/data/jsonchecker/fail05.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/data/jsonchecker/fail06.json b/data/jsonchecker/fail06.json new file mode 100644 index 0000000..ed91580 --- /dev/null +++ b/data/jsonchecker/fail06.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/data/jsonchecker/fail07.json b/data/jsonchecker/fail07.json new file mode 100644 index 0000000..8a96af3 --- /dev/null +++ b/data/jsonchecker/fail07.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/data/jsonchecker/fail08.json b/data/jsonchecker/fail08.json new file mode 100644 index 0000000..b28479c --- /dev/null +++ b/data/jsonchecker/fail08.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/data/jsonchecker/fail09.json b/data/jsonchecker/fail09.json new file mode 100644 index 0000000..5815574 --- /dev/null +++ b/data/jsonchecker/fail09.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/data/jsonchecker/fail10.json b/data/jsonchecker/fail10.json new file mode 100644 index 0000000..5d8c004 --- /dev/null +++ b/data/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/data/jsonchecker/fail11.json b/data/jsonchecker/fail11.json new file mode 100644 index 0000000..76eb95b --- /dev/null +++ b/data/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/data/jsonchecker/fail12.json b/data/jsonchecker/fail12.json new file mode 100644 index 0000000..77580a4 --- /dev/null +++ b/data/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/data/jsonchecker/fail13.json b/data/jsonchecker/fail13.json new file mode 100644 index 0000000..379406b --- /dev/null +++ b/data/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/data/jsonchecker/fail14.json b/data/jsonchecker/fail14.json new file mode 100644 index 0000000..0ed366b --- /dev/null +++ b/data/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/data/jsonchecker/fail15.json b/data/jsonchecker/fail15.json new file mode 100644 index 0000000..fc8376b --- /dev/null +++ b/data/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/data/jsonchecker/fail16.json b/data/jsonchecker/fail16.json new file mode 100644 index 0000000..3fe21d4 --- /dev/null +++ b/data/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/data/jsonchecker/fail17.json b/data/jsonchecker/fail17.json new file mode 100644 index 0000000..62b9214 --- /dev/null +++ b/data/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/data/jsonchecker/fail18.json b/data/jsonchecker/fail18.json new file mode 100644 index 0000000..bd7f1d6 --- /dev/null +++ b/data/jsonchecker/fail18.json @@ -0,0 +1,2 @@ +"mutliple" +"things" diff --git a/data/jsonchecker/fail19.json b/data/jsonchecker/fail19.json new file mode 100644 index 0000000..3b9c46f --- /dev/null +++ b/data/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/data/jsonchecker/fail20.json b/data/jsonchecker/fail20.json new file mode 100644 index 0000000..27c1af3 --- /dev/null +++ b/data/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/data/jsonchecker/fail21.json b/data/jsonchecker/fail21.json new file mode 100644 index 0000000..6247457 --- /dev/null +++ b/data/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/data/jsonchecker/fail22.json b/data/jsonchecker/fail22.json new file mode 100644 index 0000000..a775258 --- /dev/null +++ b/data/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/data/jsonchecker/fail23.json b/data/jsonchecker/fail23.json new file mode 100644 index 0000000..494add1 --- /dev/null +++ b/data/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/data/jsonchecker/fail24.json b/data/jsonchecker/fail24.json new file mode 100644 index 0000000..caff239 --- /dev/null +++ b/data/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/data/jsonchecker/fail25.json b/data/jsonchecker/fail25.json new file mode 100644 index 0000000..8b7ad23 --- /dev/null +++ b/data/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/data/jsonchecker/fail26.json b/data/jsonchecker/fail26.json new file mode 100644 index 0000000..845d26a --- /dev/null +++ b/data/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/data/jsonchecker/fail27.json b/data/jsonchecker/fail27.json new file mode 100644 index 0000000..6b01a2c --- /dev/null +++ b/data/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/data/jsonchecker/fail28.json b/data/jsonchecker/fail28.json new file mode 100644 index 0000000..621a010 --- /dev/null +++ b/data/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/data/jsonchecker/fail29.json b/data/jsonchecker/fail29.json new file mode 100644 index 0000000..47ec421 --- /dev/null +++ b/data/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/data/jsonchecker/fail30.json b/data/jsonchecker/fail30.json new file mode 100644 index 0000000..8ab0bc4 --- /dev/null +++ b/data/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/data/jsonchecker/fail31.json b/data/jsonchecker/fail31.json new file mode 100644 index 0000000..1cce602 --- /dev/null +++ b/data/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/data/jsonchecker/fail32.json b/data/jsonchecker/fail32.json new file mode 100644 index 0000000..cb1f560 --- /dev/null +++ b/data/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead of closing brace": true, diff --git a/data/jsonchecker/fail33.json b/data/jsonchecker/fail33.json new file mode 100644 index 0000000..ca5eb19 --- /dev/null +++ b/data/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/data/jsonchecker/fail34.json b/data/jsonchecker/fail34.json new file mode 100644 index 0000000..7ce16bd --- /dev/null +++ b/data/jsonchecker/fail34.json @@ -0,0 +1 @@ +{"garbage" before : "separator"} diff --git a/data/jsonchecker/fail35.json b/data/jsonchecker/fail35.json new file mode 100644 index 0000000..7a46973 --- /dev/null +++ b/data/jsonchecker/fail35.json @@ -0,0 +1 @@ +{"no separator" diff --git a/data/jsonchecker/fail36.json b/data/jsonchecker/fail36.json new file mode 100644 index 0000000..bf08400 --- /dev/null +++ b/data/jsonchecker/fail36.json @@ -0,0 +1 @@ +{"no closing brace": true diff --git a/data/jsonchecker/fail37.json b/data/jsonchecker/fail37.json new file mode 100644 index 0000000..558ed37 --- /dev/null +++ b/data/jsonchecker/fail37.json @@ -0,0 +1 @@ +[ diff --git a/data/jsonchecker/fail38.json b/data/jsonchecker/fail38.json new file mode 100644 index 0000000..98232c6 --- /dev/null +++ b/data/jsonchecker/fail38.json @@ -0,0 +1 @@ +{ diff --git a/data/jsonchecker/pass01.json b/data/jsonchecker/pass01.json new file mode 100644 index 0000000..2c10f22 --- /dev/null +++ b/data/jsonchecker/pass01.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] diff --git a/data/jsonchecker/pass02.json b/data/jsonchecker/pass02.json new file mode 100644 index 0000000..fea5710 --- /dev/null +++ b/data/jsonchecker/pass02.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] diff --git a/data/jsonchecker/pass03.json b/data/jsonchecker/pass03.json new file mode 100644 index 0000000..4528d51 --- /dev/null +++ b/data/jsonchecker/pass03.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/data/jsonchecker/readme.txt b/data/jsonchecker/readme.txt new file mode 100644 index 0000000..321d89d --- /dev/null +++ b/data/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/data/roundtrip/roundtrip01.json b/data/roundtrip/roundtrip01.json new file mode 100644 index 0000000..500db4a --- /dev/null +++ b/data/roundtrip/roundtrip01.json @@ -0,0 +1 @@ +[null] \ No newline at end of file diff --git a/data/roundtrip/roundtrip02.json b/data/roundtrip/roundtrip02.json new file mode 100644 index 0000000..de601e3 --- /dev/null +++ b/data/roundtrip/roundtrip02.json @@ -0,0 +1 @@ +[true] \ No newline at end of file diff --git a/data/roundtrip/roundtrip03.json b/data/roundtrip/roundtrip03.json new file mode 100644 index 0000000..67b2f07 --- /dev/null +++ b/data/roundtrip/roundtrip03.json @@ -0,0 +1 @@ +[false] \ No newline at end of file diff --git a/data/roundtrip/roundtrip04.json b/data/roundtrip/roundtrip04.json new file mode 100644 index 0000000..6e7ea63 --- /dev/null +++ b/data/roundtrip/roundtrip04.json @@ -0,0 +1 @@ +[0] \ No newline at end of file diff --git a/data/roundtrip/roundtrip05.json b/data/roundtrip/roundtrip05.json new file mode 100644 index 0000000..6dfd298 --- /dev/null +++ b/data/roundtrip/roundtrip05.json @@ -0,0 +1 @@ +["foo"] \ No newline at end of file diff --git a/data/roundtrip/roundtrip06.json b/data/roundtrip/roundtrip06.json new file mode 100644 index 0000000..0637a08 --- /dev/null +++ b/data/roundtrip/roundtrip06.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/data/roundtrip/roundtrip07.json b/data/roundtrip/roundtrip07.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/data/roundtrip/roundtrip07.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/data/roundtrip/roundtrip08.json b/data/roundtrip/roundtrip08.json new file mode 100644 index 0000000..bfa3412 --- /dev/null +++ b/data/roundtrip/roundtrip08.json @@ -0,0 +1 @@ +[0,1] \ No newline at end of file diff --git a/data/roundtrip/roundtrip09.json b/data/roundtrip/roundtrip09.json new file mode 100644 index 0000000..9f5dd4e --- /dev/null +++ b/data/roundtrip/roundtrip09.json @@ -0,0 +1 @@ +{"foo":"bar"} \ No newline at end of file diff --git a/data/roundtrip/roundtrip10.json b/data/roundtrip/roundtrip10.json new file mode 100644 index 0000000..2355b4d --- /dev/null +++ b/data/roundtrip/roundtrip10.json @@ -0,0 +1 @@ +{"a":null,"foo":"bar"} \ No newline at end of file diff --git a/data/roundtrip/roundtrip11.json b/data/roundtrip/roundtrip11.json new file mode 100644 index 0000000..99d21a2 --- /dev/null +++ b/data/roundtrip/roundtrip11.json @@ -0,0 +1 @@ +[-1] \ No newline at end of file diff --git a/data/roundtrip/roundtrip12.json b/data/roundtrip/roundtrip12.json new file mode 100644 index 0000000..56c78be --- /dev/null +++ b/data/roundtrip/roundtrip12.json @@ -0,0 +1 @@ +[-2147483648] \ No newline at end of file diff --git a/data/roundtrip/roundtrip13.json b/data/roundtrip/roundtrip13.json new file mode 100644 index 0000000..029580f --- /dev/null +++ b/data/roundtrip/roundtrip13.json @@ -0,0 +1 @@ +[-1234567890123456789] \ No newline at end of file diff --git a/data/roundtrip/roundtrip14.json b/data/roundtrip/roundtrip14.json new file mode 100644 index 0000000..d865800 --- /dev/null +++ b/data/roundtrip/roundtrip14.json @@ -0,0 +1 @@ +[-9223372036854775808] \ No newline at end of file diff --git a/data/roundtrip/roundtrip15.json b/data/roundtrip/roundtrip15.json new file mode 100644 index 0000000..bace2a0 --- /dev/null +++ b/data/roundtrip/roundtrip15.json @@ -0,0 +1 @@ +[1] \ No newline at end of file diff --git a/data/roundtrip/roundtrip16.json b/data/roundtrip/roundtrip16.json new file mode 100644 index 0000000..dfe696d --- /dev/null +++ b/data/roundtrip/roundtrip16.json @@ -0,0 +1 @@ +[2147483647] \ No newline at end of file diff --git a/data/roundtrip/roundtrip17.json b/data/roundtrip/roundtrip17.json new file mode 100644 index 0000000..6640b07 --- /dev/null +++ b/data/roundtrip/roundtrip17.json @@ -0,0 +1 @@ +[4294967295] \ No newline at end of file diff --git a/data/roundtrip/roundtrip18.json b/data/roundtrip/roundtrip18.json new file mode 100644 index 0000000..a3ab143 --- /dev/null +++ b/data/roundtrip/roundtrip18.json @@ -0,0 +1 @@ +[1234567890123456789] \ No newline at end of file diff --git a/data/roundtrip/roundtrip19.json b/data/roundtrip/roundtrip19.json new file mode 100644 index 0000000..8ab4a50 --- /dev/null +++ b/data/roundtrip/roundtrip19.json @@ -0,0 +1 @@ +[9223372036854775807] \ No newline at end of file diff --git a/data/roundtrip/roundtrip20.json b/data/roundtrip/roundtrip20.json new file mode 100644 index 0000000..92df1df --- /dev/null +++ b/data/roundtrip/roundtrip20.json @@ -0,0 +1 @@ +[0.0] \ No newline at end of file diff --git a/data/roundtrip/roundtrip21.json b/data/roundtrip/roundtrip21.json new file mode 100644 index 0000000..cfef815 --- /dev/null +++ b/data/roundtrip/roundtrip21.json @@ -0,0 +1 @@ +[-0.0] \ No newline at end of file diff --git a/data/roundtrip/roundtrip22.json b/data/roundtrip/roundtrip22.json new file mode 100644 index 0000000..a7b7eef --- /dev/null +++ b/data/roundtrip/roundtrip22.json @@ -0,0 +1 @@ +[1.2345] \ No newline at end of file diff --git a/data/roundtrip/roundtrip23.json b/data/roundtrip/roundtrip23.json new file mode 100644 index 0000000..b553e84 --- /dev/null +++ b/data/roundtrip/roundtrip23.json @@ -0,0 +1 @@ +[-1.2345] \ No newline at end of file diff --git a/data/roundtrip/roundtrip24.json b/data/roundtrip/roundtrip24.json new file mode 100644 index 0000000..f01efb6 --- /dev/null +++ b/data/roundtrip/roundtrip24.json @@ -0,0 +1 @@ +[5e-324] \ No newline at end of file diff --git a/data/roundtrip/roundtrip25.json b/data/roundtrip/roundtrip25.json new file mode 100644 index 0000000..cdef14d --- /dev/null +++ b/data/roundtrip/roundtrip25.json @@ -0,0 +1 @@ +[2.225073858507201e-308] \ No newline at end of file diff --git a/data/roundtrip/roundtrip26.json b/data/roundtrip/roundtrip26.json new file mode 100644 index 0000000..f4121b7 --- /dev/null +++ b/data/roundtrip/roundtrip26.json @@ -0,0 +1 @@ +[2.2250738585072014e-308] \ No newline at end of file diff --git a/data/roundtrip/roundtrip27.json b/data/roundtrip/roundtrip27.json new file mode 100644 index 0000000..17ce521 --- /dev/null +++ b/data/roundtrip/roundtrip27.json @@ -0,0 +1 @@ +[1.7976931348623157e308] \ No newline at end of file diff --git a/src/Common.jl b/src/Common.jl new file mode 100644 index 0000000..55b1fe5 --- /dev/null +++ b/src/Common.jl @@ -0,0 +1,11 @@ +""" +Internal implementation detail. +""" +module Common + +using Unicode + +include("bytes.jl") +include("errors.jl") + +end diff --git a/src/JSON.jl b/src/JSON.jl new file mode 100644 index 0000000..66fb855 --- /dev/null +++ b/src/JSON.jl @@ -0,0 +1,31 @@ +VERSION < v"0.7.0-beta2.199" && __precompile__() + +module JSON + +export json # returns a compact (or indented) JSON representation as a string +export JSONText # string wrapper to insert raw JSON into JSON output + +include("Common.jl") + +# Parser modules +include("Parser.jl") + +# Writer modules +include("Serializations.jl") +include("Writer.jl") + +# stuff to re-"export" +# note that this package does not actually export anything except `json` but +# all of the following are part of the public interface in one way or another +using .Parser: parse, parsefile +using .Writer: show_json, json, lower, print, StructuralContext, show_element, + show_string, show_key, show_pair, show_null, begin_array, + end_array, begin_object, end_object, indent, delimit, separate, + JSONText +using .Serializations: Serialization, CommonSerialization, + StandardSerialization + +# for pretty-printed (non-compact) output, JSONText must be re-parsed: +Writer.lower(json::JSONText) = parse(json.s) + +end # module diff --git a/src/Parser.jl b/src/Parser.jl new file mode 100644 index 0000000..b7556bb --- /dev/null +++ b/src/Parser.jl @@ -0,0 +1,444 @@ +module Parser # JSON + +using Mmap +using ..Common + +include("pushvector.jl") + +""" +Like `isspace`, but work on bytes and includes only the four whitespace +characters defined by the JSON standard: space, tab, line feed, and carriage +return. +""" +isjsonspace(b::UInt8) = b == SPACE || b == TAB || b == NEWLINE || b == RETURN + +""" +Like `isdigit`, but for bytes. +""" +isjsondigit(b::UInt8) = DIGIT_ZERO โ‰ค b โ‰ค DIGIT_NINE + +abstract type ParserState end + +mutable struct MemoryParserState <: ParserState + utf8::String + s::Int +end + +# it is convenient to access MemoryParserState like a Vector{UInt8} to avoid copies +Base.@propagate_inbounds Base.getindex(state::MemoryParserState, i::Int) = codeunit(state.utf8, i) +Base.length(state::MemoryParserState) = sizeof(state.utf8) +Base.unsafe_convert(::Type{Ptr{UInt8}}, state::MemoryParserState) = Base.unsafe_convert(Ptr{UInt8}, state.utf8) + +mutable struct StreamingParserState{T <: IO} <: ParserState + io::T + cur::UInt8 + used::Bool + utf8array::PushVector{UInt8, Vector{UInt8}} +end +StreamingParserState(io::IO) = StreamingParserState(io, 0x00, true, PushVector{UInt8}()) + +struct ParserContext{DictType, IntType} end + +""" +Return the byte at the current position of the `ParserState`. If there is no +byte (that is, the `ParserState` is done), then an error is thrown that the +input ended unexpectedly. +""" +@inline function byteat(ps::MemoryParserState) + @inbounds if hasmore(ps) + return ps[ps.s] + else + _error(E_UNEXPECTED_EOF, ps) + end +end + +@inline function byteat(ps::StreamingParserState) + if ps.used + ps.used = false + if eof(ps.io) + _error(E_UNEXPECTED_EOF, ps) + else + ps.cur = read(ps.io, UInt8) + end + end + ps.cur +end + +""" +Like `byteat`, but with no special bounds check and error message. Useful when +a current byte is known to exist. +""" +@inline current(ps::MemoryParserState) = ps[ps.s] +@inline current(ps::StreamingParserState) = byteat(ps) + +""" +Require the current byte of the `ParserState` to be the given byte, and then +skip past that byte. Otherwise, an error is thrown. +""" +@inline function skip!(ps::ParserState, c::UInt8) + if byteat(ps) == c + incr!(ps) + else + _error_expected_char(c, ps) + end +end +@noinline _error_expected_char(c, ps) = _error("Expected '$(Char(c))' here", ps) + +function skip!(ps::ParserState, cs::UInt8...) + for c in cs + skip!(ps, c) + end +end + +""" +Move the `ParserState` to the next byte. +""" +@inline incr!(ps::MemoryParserState) = (ps.s += 1) +@inline incr!(ps::StreamingParserState) = (ps.used = true) + +""" +Move the `ParserState` to the next byte, and return the value at the byte before +the advancement. If the `ParserState` is already done, then throw an error. +""" +@inline advance!(ps::ParserState) = (b = byteat(ps); incr!(ps); b) + +""" +Return `true` if there is a current byte, and `false` if all bytes have been +exausted. +""" +@inline hasmore(ps::MemoryParserState) = ps.s โ‰ค length(ps) +@inline hasmore(ps::StreamingParserState) = true # no more now โ‰  no more ever + +""" +Remove as many whitespace bytes as possible from the `ParserState` starting from +the current byte. +""" +@inline function chomp_space!(ps::ParserState) + @inbounds while hasmore(ps) && isjsonspace(current(ps)) + incr!(ps) + end +end + + +# Used for line counts +function _count_before(haystack::AbstractString, needle::Char, _end::Int) + count = 0 + for (i,c) in enumerate(haystack) + i >= _end && return count + count += c == needle + end + return count +end + + +# Throws an error message with an indicator to the source +@noinline function _error(message::AbstractString, ps::MemoryParserState) + orig = ps.utf8 + lines = _count_before(orig, '\n', ps.s) + # Replace all special multi-line/multi-space characters with a space. + strnl = replace(orig, r"[\b\f\n\r\t\s]" => " ") + li = (ps.s > 20) ? ps.s - 9 : 1 # Left index + ri = min(lastindex(orig), ps.s + 20) # Right index + error(message * + "\nLine: " * string(lines) * + "\nAround: ..." * strnl[li:ri] * "..." * + "\n " * (" " ^ (ps.s - li)) * "^\n" + ) +end + +@noinline function _error(message::AbstractString, ps::StreamingParserState) + error("$message\n ...when parsing byte with value '$(current(ps))'") +end + +# PARSING + +""" +Given a `ParserState`, after possibly any amount of whitespace, return the next +parseable value. +""" +function parse_value(pc::ParserContext, ps::ParserState) + chomp_space!(ps) + + @inbounds byte = byteat(ps) + if byte == STRING_DELIM + parse_string(ps) + elseif isjsondigit(byte) || byte == MINUS_SIGN + parse_number(pc, ps) + elseif byte == OBJECT_BEGIN + parse_object(pc, ps) + elseif byte == ARRAY_BEGIN + parse_array(pc, ps) + else + parse_jsconstant(ps::ParserState) + end +end + +function parse_jsconstant(ps::ParserState) + c = advance!(ps) + if c == LATIN_T # true + skip!(ps, LATIN_R, LATIN_U, LATIN_E) + true + elseif c == LATIN_F # false + skip!(ps, LATIN_A, LATIN_L, LATIN_S, LATIN_E) + false + elseif c == LATIN_N # null + skip!(ps, LATIN_U, LATIN_L, LATIN_L) + nothing + else + _error(E_UNEXPECTED_CHAR, ps) + end +end + +function parse_array(pc::ParserContext, ps::ParserState) + result = Any[] + @inbounds incr!(ps) # Skip over opening '[' + chomp_space!(ps) + if byteat(ps) โ‰  ARRAY_END # special case for empty array + @inbounds while true + push!(result, parse_value(pc, ps)) + chomp_space!(ps) + byteat(ps) == ARRAY_END && break + skip!(ps, DELIMITER) + end + end + + @inbounds incr!(ps) + result +end + + +function parse_object(pc::ParserContext{DictType, <:Real}, ps::ParserState) where DictType + obj = DictType() + keyT = keytype(typeof(obj)) + + incr!(ps) # Skip over opening '{' + chomp_space!(ps) + if byteat(ps) โ‰  OBJECT_END # special case for empty object + @inbounds while true + # Read key + chomp_space!(ps) + byteat(ps) == STRING_DELIM || _error(E_BAD_KEY, ps) + key = parse_string(ps) + chomp_space!(ps) + skip!(ps, SEPARATOR) + # Read value + value = parse_value(pc, ps) + chomp_space!(ps) + obj[keyT === Symbol ? Symbol(key) : convert(keyT, key)] = value + byteat(ps) == OBJECT_END && break + skip!(ps, DELIMITER) + end + end + + incr!(ps) + obj +end + + +utf16_is_surrogate(c::UInt16) = (c & 0xf800) == 0xd800 +utf16_get_supplementary(lead::UInt16, trail::UInt16) = Char(UInt32(lead-0xd7f7)<<10 + trail) + +function read_four_hex_digits!(ps::ParserState) + local n::UInt16 = 0 + + for _ in 1:4 + b = advance!(ps) + n = n << 4 + if isjsondigit(b) + b - DIGIT_ZERO + elseif LATIN_A โ‰ค b โ‰ค LATIN_F + b - (LATIN_A - UInt8(10)) + elseif LATIN_UPPER_A โ‰ค b โ‰ค LATIN_UPPER_F + b - (LATIN_UPPER_A - UInt8(10)) + else + _error(E_BAD_ESCAPE, ps) + end + end + + n +end + +function read_unicode_escape!(ps) + u1 = read_four_hex_digits!(ps) + if utf16_is_surrogate(u1) + skip!(ps, BACKSLASH) + skip!(ps, LATIN_U) + u2 = read_four_hex_digits!(ps) + utf16_get_supplementary(u1, u2) + else + Char(u1) + end +end + +function parse_string(ps::ParserState) + b = IOBuffer() + incr!(ps) # skip opening quote + while true + c = advance!(ps) + + if c == BACKSLASH + c = advance!(ps) + if c == LATIN_U # Unicode escape + write(b, read_unicode_escape!(ps)) + else + c = get(ESCAPES, c, 0x00) + c == 0x00 && _error(E_BAD_ESCAPE, ps) + write(b, c) + end + continue + elseif c < SPACE + _error(E_BAD_CONTROL, ps) + elseif c == STRING_DELIM + return String(take!(b)) + end + + write(b, c) + end +end + +""" +Return `true` if the given bytes vector, starting at `from` and ending at `to`, +has a leading zero. +""" +function hasleadingzero(bytes, from::Int, to::Int) + c = bytes[from] + from + 1 < to && c == UInt8('-') && + bytes[from + 1] == DIGIT_ZERO && isjsondigit(bytes[from + 2]) || + from < to && to > from + 1 && c == DIGIT_ZERO && + isjsondigit(bytes[from + 1]) +end + +""" +Parse a float from the given bytes vector, starting at `from` and ending at the +byte before `to`. Bytes enclosed should all be ASCII characters. +""" +function float_from_bytes(bytes, from::Int, to::Int) + # The ccall is not ideal (Base.tryparse would be better), but it actually + # makes an 2ร— difference to performance + hasvalue, val = ccall(:jl_try_substrtod, Tuple{Bool, Float64}, + (Ptr{UInt8}, Csize_t, Csize_t), bytes, from - 1, to - from + 1) + hasvalue ? val : nothing +end + +""" +Parse an integer from the given bytes vector, starting at `from` and ending at +the byte before `to`. Bytes enclosed should all be ASCII characters. +""" +function int_from_bytes(pc::ParserContext{<:Any,IntType}, + ps::ParserState, + bytes, + from::Int, + to::Int) where IntType <: Real + @inbounds isnegative = bytes[from] == MINUS_SIGN ? (from += 1; true) : false + num = IntType(0) + @inbounds for i in from:to + c = bytes[i] + dig = c - DIGIT_ZERO + if dig < 0x10 + num = IntType(10) * num + IntType(dig) + else + _error(E_BAD_NUMBER, ps) + end + end + ifelse(isnegative, -num, num) +end + +function number_from_bytes(pc::ParserContext, + ps::ParserState, + isint::Bool, + bytes, + from::Int, + to::Int) + @inbounds if hasleadingzero(bytes, from, to) + _error(E_LEADING_ZERO, ps) + end + + if isint + @inbounds if to == from && bytes[from] == MINUS_SIGN + _error(E_BAD_NUMBER, ps) + end + int_from_bytes(pc, ps, bytes, from, to) + else + res = float_from_bytes(bytes, from, to) + res === nothing ? _error(E_BAD_NUMBER, ps) : res + end +end + + +function parse_number(pc::ParserContext, ps::ParserState) + # Determine the end of the floating point by skipping past ASCII values + # 0-9, +, -, e, E, and . + number = ps.utf8array + isint = true + + @inbounds while hasmore(ps) + c = current(ps) + + if isjsondigit(c) || c == MINUS_SIGN + push!(number, UInt8(c)) + elseif c in (PLUS_SIGN, LATIN_E, LATIN_UPPER_E, DECIMAL_POINT) + push!(number, UInt8(c)) + isint = false + else + break + end + + incr!(ps) + end + + v = number_from_bytes(pc, ps, isint, number, 1, length(number)) + resize!(number, 0) + return v +end + +unparameterize_type(x) = x # Fallback for nontypes -- functions etc +function unparameterize_type(T::Type) + candidate = typeintersect(T, AbstractDict{String, Any}) + candidate <: Union{} ? T : candidate +end + +# Workaround for slow dynamic dispatch for creating objects +const DEFAULT_PARSERCONTEXT = ParserContext{Dict{String, Any}, Int64}() +function _get_parsercontext(dicttype, inttype) + if dicttype == Dict{String, Any} && inttype == Int64 + DEFAULT_PARSERCONTEXT + else + ParserContext{unparameterize_type(dicttype), inttype}.instance + end +end + +function parse(str::AbstractString; + dicttype=Dict{String,Any}, + inttype::Type{<:Real}=Int64) + pc = _get_parsercontext(dicttype, inttype) + ps = MemoryParserState(str, 1) + v = parse_value(pc, ps) + chomp_space!(ps) + if hasmore(ps) + _error(E_EXPECTED_EOF, ps) + end + v +end + +function parse(io::IO; + dicttype=Dict{String,Any}, + inttype::Type{<:Real}=Int64) + pc = _get_parsercontext(dicttype, inttype) + ps = StreamingParserState(io) + parse_value(pc, ps) +end + +function parsefile(filename::AbstractString; + dicttype=Dict{String, Any}, + inttype::Type{<:Real}=Int64, + use_mmap=true) + sz = filesize(filename) + open(filename) do io + s = use_mmap ? String(Mmap.mmap(io, Vector{UInt8}, sz)) : read(io, String) + parse(s; dicttype=dicttype, inttype=inttype) + end +end + +# Efficient implementations of some of the above for in-memory parsing +include("specialized.jl") + +end # module Parser diff --git a/src/Serializations.jl b/src/Serializations.jl new file mode 100644 index 0000000..e4398ce --- /dev/null +++ b/src/Serializations.jl @@ -0,0 +1,39 @@ +""" +JSON writer serialization contexts. + +This module defines the `Serialization` abstract type and several concrete +implementations, as they relate to JSON. +""" +module Serializations + +using ..Common + +""" +A `Serialization` defines how objects are lowered to JSON format. +""" +abstract type Serialization end + +""" +The `CommonSerialization` comes with a default set of rules for serializing +Julia types to their JSON equivalents. Additional rules are provided either by +packages explicitly defining `JSON.show_json` for this serialization, or by the +`JSON.lower` method. Most concrete implementations of serializers should subtype +`CommonSerialization`, unless it is desirable to bypass the `lower` system, in +which case `Serialization` should be subtyped. +""" +abstract type CommonSerialization <: Serialization end + +""" +The `StandardSerialization` defines a common, standard JSON serialization format +that is optimized to: + +- strictly follow the JSON standard +- be useful in the greatest number of situations + +All serializations defined for `CommonSerialization` are inherited by +`StandardSerialization`. It is therefore generally advised to add new +serialization behaviour to `CommonSerialization`. +""" +struct StandardSerialization <: CommonSerialization end + +end diff --git a/src/Writer.jl b/src/Writer.jl new file mode 100644 index 0000000..5c4cc63 --- /dev/null +++ b/src/Writer.jl @@ -0,0 +1,357 @@ +module Writer + +using Dates +using ..Common +using ..Serializations: Serialization, StandardSerialization, + CommonSerialization + +using Unicode + + +""" +Internal JSON.jl implementation detail; do not depend on this type. + +A JSON primitive that wraps around any composite type to enable `Dict`-like +serialization. +""" +struct CompositeTypeWrapper{T} + wrapped::T + fns::Vector{Symbol} +end + +CompositeTypeWrapper(x, syms) = CompositeTypeWrapper(x, collect(syms)) +CompositeTypeWrapper(x) = CompositeTypeWrapper(x, fieldnames(typeof(x))) + +""" + lower(x) + +Return a value of a JSON-encodable primitive type that `x` should be lowered +into before encoding as JSON. Supported types are: `AbstractDict` to JSON +objects, `Tuple` and `AbstractVector` to JSON arrays, `AbstractArray` to nested +JSON arrays, `AbstractString`, `Symbol`, `Enum`, or `Char` to JSON string, +`Integer` and `AbstractFloat` to JSON number, `Bool` to JSON boolean, and +`Nothing` to JSON null, or any other types with a `show_json` method defined. + +Extensions of this method should preserve the property that the return value is +one of the aforementioned types. If first lowering to some intermediate type is +required, then extensions should call `lower` before returning a value. + +Note that the return value need not be *recursively* loweredโ€”this function may +for instance return an `AbstractArray{Any, 1}` whose elements are not JSON +primitives. +""" +function lower(a) + if nfields(a) > 0 + CompositeTypeWrapper(a) + else + error("Cannot serialize type $(typeof(a))") + end +end + +# To avoid allocating an intermediate string, we directly define `show_json` +# for this type instead of lowering it to a string first (which would +# allocate). However, the `show_json` method does call `lower` so as to allow +# users to change the lowering of their `Enum` or even `AbstractString` +# subtypes if necessary. +const IsPrintedAsString = Union{ + Dates.TimeType, Char, Type, AbstractString, Enum, Symbol} +lower(x::IsPrintedAsString) = x + +lower(m::Module) = throw(ArgumentError("cannot serialize Module $m as JSON")) +lower(x::Real) = convert(Float64, x) +lower(x::Base.AbstractSet) = collect(x) + +""" +Abstract supertype of all JSON and JSON-like structural writer contexts. +""" +abstract type StructuralContext <: IO end + +""" +Internal implementation detail. + +A JSON structural context around an `IO` object. Structural writer contexts +define the behaviour of serializing JSON structural objects, such as objects, +arrays, and strings to JSON. The translation of Julia types to JSON structural +objects is not handled by a `JSONContext`, but by a `Serialization` wrapper +around it. Abstract supertype of `PrettyContext` and `CompactContext`. Data can +be written to a JSON context in the usual way, but often higher-level operations +such as `begin_array` or `begin_object` are preferred to directly writing bytes +to the stream. +""" +abstract type JSONContext <: StructuralContext end + +""" +Internal implementation detail. + +Keeps track of the current location in the array or object, which winds and +unwinds during serialization. +""" +mutable struct PrettyContext{T<:IO} <: JSONContext + io::T + step::Int # number of spaces to step + state::Int # number of steps at present + first::Bool # whether an object/array was just started +end +PrettyContext(io::IO, step) = PrettyContext(io, step, 0, false) + +""" +Internal implementation detail. + +For compact printing, which in JSON is fully recursive. +""" +mutable struct CompactContext{T<:IO} <: JSONContext + io::T + first::Bool +end +CompactContext(io::IO) = CompactContext(io, false) + +""" +Internal implementation detail. + +Implements an IO context safe for printing into JSON strings. +""" +struct StringContext{T<:IO} <: IO + io::T +end + +# These aliases make defining additional methods on `show_json` easier. +const CS = CommonSerialization +const SC = StructuralContext + +# Low-level direct access +Base.write(io::JSONContext, byte::UInt8) = write(io.io, byte) +Base.write(io::StringContext, byte::UInt8) = + write(io.io, ESCAPED_ARRAY[byte + 0x01]) +#= turn on if there's a performance benefit +write(io::StringContext, char::Char) = + char <= '\x7f' ? write(io, ESCAPED_ARRAY[UInt8(c) + 0x01]) : + Base.print(io, c) +=# + +""" + indent(io::StructuralContext) + +If appropriate, write a newline to the given context, then indent it by the +appropriate number of spaces. Otherwise, do nothing. +""" +@inline function indent(io::PrettyContext) + write(io, NEWLINE) + for _ in 1:io.state + write(io, SPACE) + end +end +@inline indent(io::CompactContext) = nothing + +""" + separate(io::StructuralContext) + +Write a colon, followed by a space if appropriate, to the given context. +""" +@inline separate(io::PrettyContext) = write(io, SEPARATOR, SPACE) +@inline separate(io::CompactContext) = write(io, SEPARATOR) + +""" + delimit(io::StructuralContext) + +If this is not the first item written in a collection, write a comma in the +structural context. Otherwise, do not write a comma, but set a flag that the +first element has been written already. +""" +@inline function delimit(io::JSONContext) + if !io.first + write(io, DELIMITER) + end + io.first = false +end + +for kind in ("object", "array") + beginfn = Symbol("begin_", kind) + beginsym = Symbol(uppercase(kind), "_BEGIN") + endfn = Symbol("end_", kind) + endsym = Symbol(uppercase(kind), "_END") + # Begin and end objects + @eval function $beginfn(io::PrettyContext) + write(io, $beginsym) + io.state += io.step + io.first = true + end + @eval $beginfn(io::CompactContext) = (write(io, $beginsym); io.first = true) + @eval function $endfn(io::PrettyContext) + io.state -= io.step + if !io.first + indent(io) + end + write(io, $endsym) + io.first = false + end + @eval $endfn(io::CompactContext) = (write(io, $endsym); io.first = false) +end + +""" + show_string(io::IO, str) + +Print `str` as a JSON string (that is, properly escaped and wrapped by double +quotes) to the given IO object `io`. +""" +function show_string(io::IO, x) + write(io, STRING_DELIM) + Base.print(StringContext(io), x) + write(io, STRING_DELIM) +end + +""" + show_null(io::IO) + +Print the string `null` to the given IO object `io`. +""" +show_null(io::IO) = Base.print(io, "null") + +""" + show_element(io::StructuralContext, s, x) + +Print object `x` as an element of a JSON array to context `io` using rules +defined by serialization `s`. +""" +function show_element(io::JSONContext, s, x) + delimit(io) + indent(io) + show_json(io, s, x) +end + +""" + show_key(io::StructuralContext, k) + +Print string `k` as the key of a JSON key-value pair to context `io`. +""" +function show_key(io::JSONContext, k) + delimit(io) + indent(io) + show_string(io, k) + separate(io) +end + +""" + show_pair(io::StructuralContext, s, k, v) + +Print the key-value pair defined by `k => v` as JSON to context `io`, using +rules defined by serialization `s`. +""" +function show_pair(io::JSONContext, s, k, v) + show_key(io, k) + show_json(io, s, v) +end +show_pair(io::JSONContext, s, kv) = show_pair(io, s, first(kv), last(kv)) + +# Default serialization rules for CommonSerialization (CS) +function show_json(io::SC, s::CS, x::IsPrintedAsString) + # We need this check to allow `lower(x::Enum)` overrides to work if needed; + # it should be optimized out if `lower` is a no-op + lx = lower(x) + if x === lx + show_string(io, x) + else + show_json(io, s, lx) + end +end + +function show_json(io::SC, s::CS, x::Union{Integer, AbstractFloat}) + if isfinite(x) + Base.print(io, x) + else + show_null(io) + end +end + +show_json(io::SC, ::CS, ::Nothing) = show_null(io) +show_json(io::SC, ::CS, ::Missing) = show_null(io) + +function show_json(io::SC, s::CS, a::AbstractDict) + begin_object(io) + for kv in a + show_pair(io, s, kv) + end + end_object(io) +end + +function show_json(io::SC, s::CS, kv::Pair) + begin_object(io) + show_pair(io, s, kv) + end_object(io) +end + +function show_json(io::SC, s::CS, x::CompositeTypeWrapper) + begin_object(io) + for fn in x.fns + show_pair(io, s, fn, getfield(x.wrapped, fn)) + end + end_object(io) +end + +function show_json(io::SC, s::CS, x::Union{AbstractVector, Tuple}) + begin_array(io) + for elt in x + show_element(io, s, elt) + end + end_array(io) +end + +""" +Serialize a multidimensional array to JSON in column-major format. That is, +`json([1 2 3; 4 5 6]) == "[[1,4],[2,5],[3,6]]"`. +""" +function show_json(io::SC, s::CS, A::AbstractArray{<:Any,n}) where n + begin_array(io) + newdims = ntuple(_ -> :, n - 1) + for j in axes(A, n) + show_element(io, s, view(A, newdims..., j)) + end + end_array(io) +end + +# special case for 0-dimensional arrays +show_json(io::SC, s::CS, A::AbstractArray{<:Any,0}) = show_json(io, s, A[]) + +show_json(io::SC, s::CS, a) = show_json(io, s, lower(a)) + +# Fallback show_json for non-SC types +""" +Serialize Julia object `obj` to IO `io` using the behaviour described by `s`. If +`indent` is provided, then the JSON will be pretty-printed; otherwise it will be +printed on one line. If pretty-printing is enabled, then a trailing newline will +be printed; otherwise there will be no trailing newline. +""" +function show_json(io::IO, s::Serialization, obj; indent=nothing) + ctx = indent === nothing ? CompactContext(io) : PrettyContext(io, indent) + show_json(ctx, s, obj) + if indent !== nothing + println(io) + end +end + +""" + JSONText(s::AbstractString) + +`JSONText` is a wrapper around a Julia string representing JSON-formatted +text, which is inserted *as-is* in the JSON output of `JSON.print` and `JSON.json` +for compact output, and is otherwise re-parsed for pretty-printed output. + +`s` *must* contain valid JSON text. Otherwise compact output will contain +the malformed `s` and other serialization output will throw a parsing exception. +""" +struct JSONText + s::String +end +show_json(io::CompactContext, s::CS, json::JSONText) = write(io, json.s) +# other contexts for JSONText are handled by lower(json) = parse(json.s) + +print(io::IO, obj, indent) = + show_json(io, StandardSerialization(), obj; indent=indent) +print(io::IO, obj) = show_json(io, StandardSerialization(), obj) + +print(a, indent) = print(stdout, a, indent) +print(a) = print(stdout, a) + +json(a) = sprint(print, a) +json(a, indent) = sprint(print, a, indent) + +end diff --git a/src/bytes.jl b/src/bytes.jl new file mode 100644 index 0000000..57b92a8 --- /dev/null +++ b/src/bytes.jl @@ -0,0 +1,67 @@ +# The following bytes have significant meaning in JSON +const BACKSPACE = UInt8('\b') +const TAB = UInt8('\t') +const NEWLINE = UInt8('\n') +const FORM_FEED = UInt8('\f') +const RETURN = UInt8('\r') +const SPACE = UInt8(' ') +const STRING_DELIM = UInt8('"') +const PLUS_SIGN = UInt8('+') +const DELIMITER = UInt8(',') +const MINUS_SIGN = UInt8('-') +const DECIMAL_POINT = UInt8('.') +const SOLIDUS = UInt8('/') +const DIGIT_ZERO = UInt8('0') +const DIGIT_NINE = UInt8('9') +const SEPARATOR = UInt8(':') +const LATIN_UPPER_A = UInt8('A') +const LATIN_UPPER_E = UInt8('E') +const LATIN_UPPER_F = UInt8('F') +const ARRAY_BEGIN = UInt8('[') +const BACKSLASH = UInt8('\\') +const ARRAY_END = UInt8(']') +const LATIN_A = UInt8('a') +const LATIN_B = UInt8('b') +const LATIN_E = UInt8('e') +const LATIN_F = UInt8('f') +const LATIN_L = UInt8('l') +const LATIN_N = UInt8('n') +const LATIN_R = UInt8('r') +const LATIN_S = UInt8('s') +const LATIN_T = UInt8('t') +const LATIN_U = UInt8('u') +const OBJECT_BEGIN = UInt8('{') +const OBJECT_END = UInt8('}') + +const ESCAPES = Dict( + STRING_DELIM => STRING_DELIM, + BACKSLASH => BACKSLASH, + SOLIDUS => SOLIDUS, + LATIN_B => BACKSPACE, + LATIN_F => FORM_FEED, + LATIN_N => NEWLINE, + LATIN_R => RETURN, + LATIN_T => TAB) + +const REVERSE_ESCAPES = Dict(reverse(p) for p in ESCAPES) +const ESCAPED_ARRAY = Vector{Vector{UInt8}}(undef, 256) +for c in 0x00:0xFF + ESCAPED_ARRAY[c + 1] = if c == SOLIDUS + [SOLIDUS] # don't escape this one + elseif c โ‰ฅ 0x80 + [c] # UTF-8 character copied verbatim + elseif haskey(REVERSE_ESCAPES, c) + [BACKSLASH, REVERSE_ESCAPES[c]] + elseif iscntrl(Char(c)) || !isprint(Char(c)) + UInt8[BACKSLASH, LATIN_U, string(c, base=16, pad=4)...] + else + [c] + end +end + +export BACKSPACE, TAB, NEWLINE, FORM_FEED, RETURN, SPACE, STRING_DELIM, + PLUS_SIGN, DELIMITER, MINUS_SIGN, DECIMAL_POINT, SOLIDUS, DIGIT_ZERO, + DIGIT_NINE, SEPARATOR, LATIN_UPPER_A, LATIN_UPPER_E, LATIN_UPPER_F, + ARRAY_BEGIN, BACKSLASH, ARRAY_END, LATIN_A, LATIN_B, LATIN_E, LATIN_F, + LATIN_L, LATIN_N, LATIN_R, LATIN_S, LATIN_T, LATIN_U, OBJECT_BEGIN, + OBJECT_END, ESCAPES, REVERSE_ESCAPES, ESCAPED_ARRAY diff --git a/src/errors.jl b/src/errors.jl new file mode 100644 index 0000000..c9c1c87 --- /dev/null +++ b/src/errors.jl @@ -0,0 +1,12 @@ +# The following errors may be thrown by the parser +const E_EXPECTED_EOF = "Expected end of input" +const E_UNEXPECTED_EOF = "Unexpected end of input" +const E_UNEXPECTED_CHAR = "Unexpected character" +const E_BAD_KEY = "Invalid object key" +const E_BAD_ESCAPE = "Invalid escape sequence" +const E_BAD_CONTROL = "ASCII control character in string" +const E_LEADING_ZERO = "Invalid leading zero in number" +const E_BAD_NUMBER = "Invalid number" + +export E_EXPECTED_EOF, E_UNEXPECTED_EOF, E_UNEXPECTED_CHAR, E_BAD_KEY, + E_BAD_ESCAPE, E_BAD_CONTROL, E_LEADING_ZERO, E_BAD_NUMBER diff --git a/src/pushvector.jl b/src/pushvector.jl new file mode 100644 index 0000000..01399f1 --- /dev/null +++ b/src/pushvector.jl @@ -0,0 +1,33 @@ +# This is a vector wrapper that we use as a workaround for `push!` +# being slow (it always calls into the runtime even if the underlying buffer, +# has enough space). Here we keep track of the length using an extra field +mutable struct PushVector{T, A<:AbstractVector{T}} <: AbstractVector{T} + v::A + l::Int +end + +# Default length of 20 should be enough to never need to grow in most cases +PushVector{T}() where {T} = PushVector(Vector{T}(undef, 20), 0) + +Base.unsafe_convert(::Type{Ptr{UInt8}}, v::PushVector) = pointer(v.v) +Base.length(v::PushVector) = v.l +Base.size(v::PushVector) = (v.l,) +@inline function Base.getindex(v::PushVector, i) + @boundscheck checkbounds(v, i) + @inbounds v.v[i] +end + +function Base.push!(v::PushVector, i) + v.l += 1 + if v.l > length(v.v) + resize!(v.v, v.l * 2) + end + v.v[v.l] = i + return v +end + +function Base.resize!(v::PushVector, l::Integer) + # Only support shrinking for now, since that is all we need + @assert l <= v.l + v.l = l +end diff --git a/src/specialized.jl b/src/specialized.jl new file mode 100644 index 0000000..e204299 --- /dev/null +++ b/src/specialized.jl @@ -0,0 +1,144 @@ +function maxsize_buffer(maxsize::Int) + IOBuffer(maxsize=maxsize) +end + +# Specialized functions for increased performance when JSON is in-memory +function parse_string(ps::MemoryParserState) + # "Dry Run": find length of string so we can allocate the right amount of + # memory from the start. Does not do full error checking. + fastpath, len = predict_string(ps) + + # Now read the string itself: + + # Fast path occurs when the string has no escaped characters. This is quite + # often the case in real-world data, especially when keys are short strings. + # We can just copy the data from the buffer in this case. + if fastpath + s = ps.s + ps.s = s + len + 2 # byte after closing quote + return unsafe_string(pointer(ps.utf8)+s, len) + else + String(take!(parse_string(ps, maxsize_buffer(len)))) + end +end + +""" +Scan through a string at the current parser state and return a tuple containing +information about the string. This function avoids memory allocation where +possible. + +The first element of the returned tuple is a boolean indicating whether the +string may be copied directly from the parser state. Special casing string +parsing when there are no escaped characters leads to substantially increased +performance in common situations. + +The second element of the returned tuple is an integer representing the exact +length of the string, in bytes when encoded as UTF-8. This information is useful +for pre-sizing a buffer to contain the parsed string. + +This function will throw an error if: + + - invalid control characters are found + - an invalid unicode escape is read + - the string is not terminated + +No error is thrown when other invalid backslash escapes are encountered. +""" +function predict_string(ps::MemoryParserState) + e = length(ps) + fastpath = true # true if no escapes in this string, so it can be copied + len = 0 # the number of UTF8 bytes the string contains + + s = ps.s + 1 # skip past opening string character " + @inbounds while s <= e + c = ps[s] + if c == BACKSLASH + fastpath = false + (s += 1) > e && break + if ps[s] == LATIN_U # Unicode escape + t = ps.s + ps.s = s + 1 + len += write(devnull, read_unicode_escape!(ps)) + s = ps.s + ps.s = t + continue + end + elseif c == STRING_DELIM + return fastpath, len + elseif c < SPACE + ps.s = s + _error(E_BAD_CONTROL, ps) + end + len += 1 + s += 1 + end + + ps.s = s + _error(E_UNEXPECTED_EOF, ps) +end + +""" +Parse the string starting at the parser stateโ€™s current location into the given +pre-sized IOBuffer. The only correctness checking is for escape sequences, so the +passed-in buffer must exactly represent the amount of space needed for parsing. +""" +function parse_string(ps::MemoryParserState, b::IOBuffer) + s = ps.s + e = length(ps) + + s += 1 # skip past opening string character " + len = b.maxsize + @inbounds while b.size < len + c = ps[s] + if c == BACKSLASH + s += 1 + s > e && break + c = ps[s] + if c == LATIN_U # Unicode escape + ps.s = s + 1 + write(b, read_unicode_escape!(ps)) + s = ps.s + continue + else + c = get(ESCAPES, c, 0x00) + if c == 0x00 + ps.s = s + _error(E_BAD_ESCAPE, ps) + end + end + end + + # UTF8-encoded non-ascii characters will be copied verbatim, which is + # the desired behaviour + write(b, c) + s += 1 + end + + # don't worry about non-termination or other edge cases; those should have + # been caught in the dry run. + ps.s = s + 1 + b +end + +function parse_number(pc::ParserContext, ps::MemoryParserState) + s = p = ps.s + e = length(ps) + isint = true + + # Determine the end of the floating point by skipping past ASCII values + # 0-9, +, -, e, E, and . + while p โ‰ค e + @inbounds c = ps[p] + if isjsondigit(c) || MINUS_SIGN == c # no-op + elseif PLUS_SIGN == c || LATIN_E == c || LATIN_UPPER_E == c || + DECIMAL_POINT == c + isint = false + else + break + end + p += 1 + end + ps.s = p + + number_from_bytes(pc, ps, isint, ps, s, p - 1) +end diff --git a/test/async.jl b/test/async.jl new file mode 100644 index 0000000..1612a6e --- /dev/null +++ b/test/async.jl @@ -0,0 +1,109 @@ +finished_async_tests = RemoteChannel() + +using Sockets + +@async begin + s = listen(7777) + s = accept(s) + + Base.start_reading(s) + + @test JSON.parse(s) != nothing # a + @test JSON.parse(s) != nothing # b + validate_c(s) # c + @test JSON.parse(s) != nothing # d + validate_svg_tviewer_menu(s) # svg_tviewer_menu + @test JSON.parse(s) != nothing # gmaps + @test JSON.parse(s) != nothing # colors1 + @test JSON.parse(s) != nothing # colors2 + @test JSON.parse(s) != nothing # colors3 + @test JSON.parse(s) != nothing # twitter + @test JSON.parse(s) != nothing # facebook + validate_flickr(s) # flickr + @test JSON.parse(s) != nothing # youtube + @test JSON.parse(s) != nothing # iphone + @test JSON.parse(s) != nothing # customer + @test JSON.parse(s) != nothing # product + @test JSON.parse(s) != nothing # interop + validate_unicode(s) # unicode + @test JSON.parse(s) != nothing # issue5 + @test JSON.parse(s) != nothing # dollars + @test JSON.parse(s) != nothing # brackets + + put!(finished_async_tests, nothing) +end + +w = connect("localhost", 7777) + +@test JSON.parse(a) != nothing +write(w, a) + +@test JSON.parse(b) != nothing +write(w, b) + +validate_c(c) +write(w, c) + +@test JSON.parse(d) != nothing +write(w, d) + +validate_svg_tviewer_menu(svg_tviewer_menu) +write(w, svg_tviewer_menu) + +@test JSON.parse(gmaps) != nothing +write(w, gmaps) + +@test JSON.parse(colors1) != nothing +write(w, colors1) + +@test JSON.parse(colors2) != nothing +write(w, colors2) + +@test JSON.parse(colors3) != nothing +write(w, colors3) + +@test JSON.parse(twitter) != nothing +write(w, twitter) + +@test JSON.parse(facebook) != nothing +write(w, facebook) + +validate_flickr(flickr) +write(w, flickr) + +@test JSON.parse(youtube) != nothing +write(w, youtube) + +@test JSON.parse(iphone) != nothing +write(w, iphone) + +@test JSON.parse(customer) != nothing +write(w, customer) + +@test JSON.parse(product) != nothing +write(w, product) + +@test JSON.parse(interop) != nothing +write(w, interop) + +validate_unicode(unicode) +write(w, unicode) + +# issue #5 +issue5 = "[\"A\",\"B\",\"C\\n\"]" +JSON.parse(issue5) +write(w, issue5) + +# $ escaping issue +dollars = ["all of the \$s", "ยตniรงรธโˆ‚\$"] +json_dollars = json(dollars) +@test JSON.parse(json_dollars) != nothing +write(w, json_dollars) + +# unmatched brackets +brackets = Dict("foo"=>"ba}r", "be}e]p"=>"boo{p") +json_brackets = json(brackets) +@test JSON.parse(json_brackets) != nothing +write(w, json_dollars) + +fetch(finished_async_tests) diff --git a/test/enum.jl b/test/enum.jl new file mode 100644 index 0000000..ead3d99 --- /dev/null +++ b/test/enum.jl @@ -0,0 +1,4 @@ +@enum Animal zebra aardvark horse +@test json(zebra) == "\"zebra\"" +@test json([aardvark, horse, Dict("z" => zebra)]) == + "[\"aardvark\",\"horse\",{\"z\":\"zebra\"}]" diff --git a/test/indentation.jl b/test/indentation.jl new file mode 100644 index 0000000..98fa5f0 --- /dev/null +++ b/test/indentation.jl @@ -0,0 +1,10 @@ +# check indented json has same final value as non indented +fb = JSON.parse(facebook) +fbjson1 = json(fb, 2) +fbjson2 = json(fb) +@test JSON.parse(fbjson1) == JSON.parse(fbjson2) + +ev = JSON.parse(svg_tviewer_menu) +ejson1 = json(ev, 2) +ejson2 = json(ev) +@test JSON.parse(ejson1) == JSON.parse(ejson2) diff --git a/test/json-checker.jl b/test/json-checker.jl new file mode 100644 index 0000000..7d0594b --- /dev/null +++ b/test/json-checker.jl @@ -0,0 +1,28 @@ +# Run modified JSON checker tests + +const JSON_DATA_DIR = joinpath(dirname(@__DIR__), "data") + +for i in 1:38 + file = "fail$(lpad(string(i), 2, "0")).json" + filepath = joinpath(JSON_DATA_DIR, "jsonchecker", file) + + @test_throws ErrorException JSON.parsefile(filepath) +end + +for i in 1:3 + # Test that the files parse successfully and match streaming parser + tf = joinpath(JSON_DATA_DIR, "jsonchecker", "pass$(lpad(string(i), 2, "0")).json") + @test JSON.parsefile(tf) == open(JSON.parse, tf) +end + +# Run JSON roundtrip tests (check consistency of .json) + +roundtrip(data) = JSON.json(JSON.Parser.parse(data)) + +for i in 1:27 + file = "roundtrip$(lpad(string(i), 2, "0")).json" + filepath = joinpath(JSON_DATA_DIR, "roundtrip", file) + + rt = roundtrip(read(filepath, String)) + @test rt == roundtrip(rt) +end diff --git a/test/json-samples.jl b/test/json-samples.jl new file mode 100644 index 0000000..2df326f --- /dev/null +++ b/test/json-samples.jl @@ -0,0 +1,644 @@ +#Examples from http://json.org/example.html +a="{\"menu\": { + \"id\": \"file\", + \"value\": \"File\", + \"popup\": { + \"menuitem\": [ + {\"value\": \"New\", \"onclick\": \"CreateNewDoc()\"}, + {\"value\": \"Open\", \"onclick\": \"OpenDoc()\"}, + {\"value\": \"Close\", \"onclick\": \"CloseDoc()\"} + ] + } + }} + " + + +b="{ + \"glossary\": { + \"title\": \"example glossary\", + \"GlossDiv\": { + \"title\": \"S\", + \"GlossList\": { + \"GlossEntry\": { + \"ID\": \"SGML\", + \"SortAs\": \"SGML\", + \"GlossTerm\": \"Standard Generalized Markup Language\", + \"Acronym\": \"SGML\", + \"Abbrev\": \"ISO 8879:1986\", + \"GlossDef\": { + \"para\": \"A meta-markup language, used to create markup languages such as DocBook.\", + \"GlossSeeAlso\": [\"GML\", \"XML\"] + }, + \"GlossSee\": \"markup\" + } + } + } + } +} +" + +const c = """ +{"widget": { + "debug": "on", + "window": { + "title": "Sample Konfabulator Widget", + "name": "main_window", + "width": 500, + "height": 500 + }, + "image": { + "src": "Images/Sun.png", + "name": "sun1", + "hOffset": 250, + "vOffset": 250, + "alignment": "center" + }, + "text": { + "data": "Click Here", + "size": 36.5, + "style": "bold", + "name": "text1", + "hOffset": 250, + "vOffset": 100, + "alignment": "center", + "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" + } +}}""" +function validate_c(c) + j = JSON.parse(c) + @test j != nothing + @test typeof(j["widget"]["image"]["hOffset"]) == Int64 + @test j["widget"]["image"]["hOffset"] == 250 + @test typeof(j["widget"]["text"]["size"]) == Float64 + @test j["widget"]["text"]["size"] == 36.5 +end + +d = "{\"web-app\": { + \"servlet\": [ + { + \"servlet-name\": \"cofaxCDS\", + \"servlet-class\": \"org.cofax.cds.CDSServlet\", + \"init-param\": { + \"configGlossary:installationAt\": \"Philadelphia, PA\", + \"configGlossary:adminEmail\": \"ksm@pobox.com\", + \"configGlossary:poweredBy\": \"Cofax\", + \"configGlossary:poweredByIcon\": \"/images/cofax.gif\", + \"configGlossary:staticPath\": \"/content/static\", + \"templateProcessorClass\": \"org.cofax.WysiwygTemplate\", + \"templateLoaderClass\": \"org.cofax.FilesTemplateLoader\", + \"templatePath\": \"templates\", + \"templateOverridePath\": \"\", + \"defaultListTemplate\": \"listTemplate.htm\", + \"defaultFileTemplate\": \"articleTemplate.htm\", + \"useJSP\": false, + \"jspListTemplate\": \"listTemplate.jsp\", + \"jspFileTemplate\": \"articleTemplate.jsp\", + \"cachePackageTagsTrack\": 200, + \"cachePackageTagsStore\": 200, + \"cachePackageTagsRefresh\": 60, + \"cacheTemplatesTrack\": 100, + \"cacheTemplatesStore\": 50, + \"cacheTemplatesRefresh\": 15, + \"cachePagesTrack\": 200, + \"cachePagesStore\": 100, + \"cachePagesRefresh\": 10, + \"cachePagesDirtyRead\": 10, + \"searchEngineListTemplate\": \"forSearchEnginesList.htm\", + \"searchEngineFileTemplate\": \"forSearchEngines.htm\", + \"searchEngineRobotsDb\": \"WEB-INF/robots.db\", + \"useDataStore\": true, + \"dataStoreClass\": \"org.cofax.SqlDataStore\", + \"redirectionClass\": \"org.cofax.SqlRedirection\", + \"dataStoreName\": \"cofax\", + \"dataStoreDriver\": \"com.microsoft.jdbc.sqlserver.SQLServerDriver\", + \"dataStoreUrl\": \"jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon\", + \"dataStoreUser\": \"sa\", + \"dataStorePassword\": \"dataStoreTestQuery\", + \"dataStoreTestQuery\": \"SET NOCOUNT ON;select test='test';\", + \"dataStoreLogFile\": \"/usr/local/tomcat/logs/datastore.log\", + \"dataStoreInitConns\": 10, + \"dataStoreMaxConns\": 100, + \"dataStoreConnUsageLimit\": 100, + \"dataStoreLogLevel\": \"debug\", + \"maxUrlLength\": 500}}, + { + \"servlet-name\": \"cofaxEmail\", + \"servlet-class\": \"org.cofax.cds.EmailServlet\", + \"init-param\": { + \"mailHost\": \"mail1\", + \"mailHostOverride\": \"mail2\"}}, + { + \"servlet-name\": \"cofaxAdmin\", + \"servlet-class\": \"org.cofax.cds.AdminServlet\"}, + + { + \"servlet-name\": \"fileServlet\", + \"servlet-class\": \"org.cofax.cds.FileServlet\"}, + { + \"servlet-name\": \"cofaxTools\", + \"servlet-class\": \"org.cofax.cms.CofaxToolsServlet\", + \"init-param\": { + \"templatePath\": \"toolstemplates/\", + \"log\": 1, + \"logLocation\": \"/usr/local/tomcat/logs/CofaxTools.log\", + \"logMaxSize\": \"\", + \"dataLog\": 1, + \"dataLogLocation\": \"/usr/local/tomcat/logs/dataLog.log\", + \"dataLogMaxSize\": \"\", + \"removePageCache\": \"/content/admin/remove?cache=pages&id=\", + \"removeTemplateCache\": \"/content/admin/remove?cache=templates&id=\", + \"fileTransferFolder\": \"/usr/local/tomcat/webapps/content/fileTransferFolder\", + \"lookInContext\": 1, + \"adminGroupID\": 4, + \"betaServer\": true}}], + \"servlet-mapping\": { + \"cofaxCDS\": \"/\", + \"cofaxEmail\": \"/cofaxutil/aemail/*\", + \"cofaxAdmin\": \"/admin/*\", + \"fileServlet\": \"/static/*\", + \"cofaxTools\": \"/tools/*\"}, + + \"taglib\": { + \"taglib-uri\": \"cofax.tld\", + \"taglib-location\": \"/WEB-INF/tlds/cofax.tld\"}}}" + +const svg_tviewer_menu = """ +{"menu": { + "header": "SVG\\tViewer\\u03b1", + "items": [ + {"id": "Open"}, + {"id": "OpenNew", "label": "Open New"}, + null, + {"id": "ZoomIn", "label": "Zoom In"}, + {"id": "ZoomOut", "label": "Zoom Out"}, + {"id": "OriginalView", "label": "Original View"}, + null, + {"id": "Quality"}, + {"id": "Pause"}, + {"id": "Mute"}, + null, + {"id": "Find", "label": "Find..."}, + {"id": "FindAgain", "label": "Find Again"}, + {"id": "Copy"}, + {"id": "CopyAgain", "label": "Copy Again"}, + {"id": "CopySVG", "label": "Copy SVG"}, + {"id": "ViewSVG", "label": "View SVG"}, + {"id": "ViewSource", "label": "View Source"}, + {"id": "SaveAs", "label": "Save As"}, + null, + {"id": "Help"}, + {"id": "About", "label": "About Adobe SVG Viewer..."} + ] +}}""" +function validate_svg_tviewer_menu(str) + j = JSON.parse(str) + @test j != nothing + @test typeof(j) == Dict{String, Any} + @test length(j) == 1 + @test typeof(j["menu"]) == Dict{String, Any} + @test length(j["menu"]) == 2 + @test j["menu"]["header"] == "SVG\tViewerฮฑ" + @test isa(j["menu"]["items"], Vector{Any}) + @test length(j["menu"]["items"]) == 22 + @test j["menu"]["items"][3] == nothing + @test j["menu"]["items"][2]["id"] == "OpenNew" + @test j["menu"]["items"][2]["label"] == "Open New" +end + + +#Example JSON strings from http://www.jquery4u.com/json/10-example-json-files/ + +gmaps= "{\"markers\": [ + { + \"point\":\"new GLatLng(40.266044,-74.718479)\", + \"homeTeam\":\"Lawrence Library\", + \"awayTeam\":\"LUGip\", + \"markerImage\":\"images/red.png\", + \"information\": \"Linux users group meets second Wednesday of each month.\", + \"fixture\":\"Wednesday 7pm\", + \"capacity\":\"\", + \"previousScore\":\"\" + }, + { + \"point\":\"new GLatLng(40.211600,-74.695702)\", + \"homeTeam\":\"Hamilton Library\", + \"awayTeam\":\"LUGip HW SIG\", + \"markerImage\":\"images/white.png\", + \"information\": \"Linux users can meet the first Tuesday of the month to work out harward and configuration issues.\", + \"fixture\":\"Tuesday 7pm\", + \"capacity\":\"\", + \"tv\":\"\" + }, + { + \"point\":\"new GLatLng(40.294535,-74.682012)\", + \"homeTeam\":\"Applebees\", + \"awayTeam\":\"After LUPip Mtg Spot\", + \"markerImage\":\"images/newcastle.png\", + \"information\": \"Some of us go there after the main LUGip meeting, drink brews, and talk.\", + \"fixture\":\"Wednesday whenever\", + \"capacity\":\"2 to 4 pints\", + \"tv\":\"\" + } +] }" + +colors1 = "{ + \"colorsArray\":[{ + \"colorName\":\"red\", + \"hexValue\":\"#f00\" + }, + { + \"colorName\":\"green\", + \"hexValue\":\"#0f0\" + }, + { + \"colorName\":\"blue\", + \"hexValue\":\"#00f\" + }, + { + \"colorName\":\"cyan\", + \"hexValue\":\"#0ff\" + }, + { + \"colorName\":\"magenta\", + \"hexValue\":\"#f0f\" + }, + { + \"colorName\":\"yellow\", + \"hexValue\":\"#ff0\" + }, + { + \"colorName\":\"black\", + \"hexValue\":\"#000\" + } + ] +}" + +colors2 = "{ + \"colorsArray\":[{ + \"red\":\"#f00\", + \"green\":\"#0f0\", + \"blue\":\"#00f\", + \"cyan\":\"#0ff\", + \"magenta\":\"#f0f\", + \"yellow\":\"#ff0\", + \"black\":\"#000\" + } + ] +}" + +colors3 = "{ + \"red\":\"#f00\", + \"green\":\"#0f0\", + \"blue\":\"#00f\", + \"cyan\":\"#0ff\", + \"magenta\":\"#f0f\", + \"yellow\":\"#ff0\", + \"black\":\"#000\" +}" + +twitter = "{\"results\":[ + + {\"text\":\"@twitterapi http://tinyurl.com/ctrefg\", + \"to_user_id\":396524, + \"to_user\":\"TwitterAPI\", + \"from_user\":\"jkoum\", + \"metadata\": + { + \"result_type\":\"popular\", + \"recent_retweets\": 109 + }, + \"id\":1478555574, + \"from_user_id\":1833773, + \"iso_language_code\":\"nl\", + \"source\":\"twitter\", + \"profile_image_url\":\"http://s3.amazonaws.com/twitter_production/profile_images/118412707/2522215727_a5f07da155_b_normal.jpg\", + \"created_at\":\"Wed, 08 Apr 2009 19:22:10 +0000\"}], + \"since_id\":0, + \"max_id\":1480307926, + \"refresh_url\":\"?since_id=1480307926&q=%40twitterapi\", + \"results_per_page\":15, + \"next_page\":\"?page=2&max_id=1480307926&q=%40twitterapi\", + \"completed_in\":0.031704, + \"page\":1, + \"query\":\"%40twitterapi\"}" + +facebook= "{ + \"data\": [ + { + \"id\": \"X999_Y999\", + \"from\": { + \"name\": \"Tom Brady\", \"id\": \"X12\" + }, + \"message\": \"Looking forward to 2010!\", + \"actions\": [ + { + \"name\": \"Comment\", + \"link\": \"http://www.facebook.com/X999/posts/Y999\" + }, + { + \"name\": \"Like\", + \"link\": \"http://www.facebook.com/X999/posts/Y999\" + } + ], + \"type\": \"status\", + \"created_time\": \"2010-08-02T21:27:44+0000\", + \"updated_time\": \"2010-08-02T21:27:44+0000\" + }, + { + \"id\": \"X998_Y998\", + \"from\": { + \"name\": \"Peyton Manning\", \"id\": \"X18\" + }, + \"message\": \"Where's my contract?\", + \"actions\": [ + { + \"name\": \"Comment\", + \"link\": \"http://www.facebook.com/X998/posts/Y998\" + }, + { + \"name\": \"Like\", + \"link\": \"http://www.facebook.com/X998/posts/Y998\" + } + ], + \"type\": \"status\", + \"created_time\": \"2010-08-02T21:27:44+0000\", + \"updated_time\": \"2010-08-02T21:27:44+0000\" + } + ] +}" + +const flickr = """{ + "title": "Talk On Travel Pool", + "link": "http://www.flickr.com/groups/talkontravel/pool/", + "description": "Travel and vacation photos from around the world.", + "modified": "2009-02-02T11:10:27Z", + "generator": "http://www.flickr.com/", + "totalItems":222, + "items": [ + { + "title": "View from the hotel", + "link": "http://www.flickr.com/photos/33112458@N08/3081564649/in/pool-998875@N22", + "media": {"m":"http://farm4.static.flickr.com/3037/3081564649_4a6569750c_m.jpg"}, + "date_taken": "2008-12-04T04:43:03-08:00", + "description": "

Talk On Travel has added a photo to the pool:

\\"View

", + "published": "2008-12-04T12:43:03Z", + "author": "nobody@flickr.com (Talk On Travel)", + "author_id": "33112458@N08", + "tags": "spain dolphins tenerife canaries lagomera aqualand playadelasamericas junglepark losgigantos loscristines talkontravel" + } + ] +}""" +function validate_flickr(str) + k = JSON.parse(str) + @test k != nothing + @test k["totalItems"] == 222 + @test k["items"][1]["description"][12] == '\"' +end + +youtube = "{\"apiVersion\":\"2.0\", + \"data\":{ + \"updated\":\"2010-01-07T19:58:42.949Z\", + \"totalItems\":800, + \"startIndex\":1, + \"itemsPerPage\":1, + \"items\":[ + {\"id\":\"hYB0mn5zh2c\", + \"uploaded\":\"2007-06-05T22:07:03.000Z\", + \"updated\":\"2010-01-07T13:26:50.000Z\", + \"uploader\":\"GoogleDeveloperDay\", + \"category\":\"News\", + \"title\":\"Google Developers Day US - Maps API Introduction\", + \"description\":\"Google Maps API Introduction ...\", + \"tags\":[ + \"GDD07\",\"GDD07US\",\"Maps\" + ], + \"thumbnail\":{ + \"default\":\"http://i.ytimg.com/vi/hYB0mn5zh2c/default.jpg\", + \"hqDefault\":\"http://i.ytimg.com/vi/hYB0mn5zh2c/hqdefault.jpg\" + }, + \"player\":{ + \"default\":\"http://www.youtube.com/watch?v\u003dhYB0mn5zh2c\" + }, + \"content\":{ + \"1\":\"rtsp://v5.cache3.c.youtube.com/CiILENy.../0/0/0/video.3gp\", + \"5\":\"http://www.youtube.com/v/hYB0mn5zh2c?f...\", + \"6\":\"rtsp://v1.cache1.c.youtube.com/CiILENy.../0/0/0/video.3gp\" + }, + \"duration\":2840, + \"aspectRatio\":\"widescreen\", + \"rating\":4.63, + \"ratingCount\":68, + \"viewCount\":220101, + \"favoriteCount\":201, + \"commentCount\":22, + \"status\":{ + \"value\":\"restricted\", + \"reason\":\"limitedSyndication\" + }, + \"accessControl\":{ + \"syndicate\":\"allowed\", + \"commentVote\":\"allowed\", + \"rate\":\"allowed\", + \"list\":\"allowed\", + \"comment\":\"allowed\", + \"embed\":\"allowed\", + \"videoRespond\":\"moderated\" + } + } + ] + } +}" + +iphone = "{ + \"menu\": { + \"header\": \"xProgress SVG Viewer\", + \"items\": [ + { + \"id\": \"Open\" + }, + { + \"id\": \"OpenNew\", + \"label\": \"Open New\" + }, + null, + { + \"id\": \"ZoomIn\", + \"label\": \"Zoom In\" + }, + { + \"id\": \"ZoomOut\", + \"label\": \"Zoom Out\" + }, + { + \"id\": \"OriginalView\", + \"label\": \"Original View\" + }, + null, + { + \"id\": \"Quality\" + }, + { + \"id\": \"Pause\" + }, + { + \"id\": \"Mute\" + }, + null, + { + \"id\": \"Find\", + \"label\": \"Find...\" + }, + { + \"id\": \"FindAgain\", + \"label\": \"Find Again\" + }, + { + \"id\": \"Copy\" + }, + { + \"id\": \"CopyAgain\", + \"label\": \"Copy Again\" + }, + { + \"id\": \"CopySVG\", + \"label\": \"Copy SVG\" + }, + { + \"id\": \"ViewSVG\", + \"label\": \"View SVG\" + }, + { + \"id\": \"ViewSource\", + \"label\": \"View Source\" + }, + { + \"id\": \"SaveAs\", + \"label\": \"Save As\" + }, + null, + { + \"id\": \"Help\" + }, + { + \"id\": \"About\", + \"label\": \"About xProgress CVG Viewer...\" + } + ] + } +}" + +customer = "{ + \"firstName\": \"John\", + \"lastName\": \"Smith\", + \"age\": 25, + \"address\": + { + \"streetAddress\": \"21 2nd Street\", + \"city\": \"New York\", + \"state\": \"NY\", + \"postalCode\": \"10021\" + }, + \"phoneNumber\": + [ + { + \"type\": \"home\", + \"number\": \"212 555-1234\" + }, + { + \"type\": \"fax\", + \"number\": \"646 555-4567\" + } + ] + }" + + product = "{ + \"name\":\"Product\", + \"properties\": + { + \"id\": + { + \"type\":\"number\", + \"description\":\"Product identifier\", + \"required\":true + }, + \"name\": + { + \"description\":\"Name of the product\", + \"type\":\"string\", + \"required\":true + }, + \"price\": + { + \"type\":\"number\", + \"minimum\":0, + \"required\":true + }, + \"tags\": + { + \"type\":\"array\", + \"items\": + { + \"type\":\"string\" + } + } + } +}" + +interop = "{ + \"ResultSet\": { + \"totalResultsAvailable\": \"1827221\", + \"totalResultsReturned\": 2, + \"firstResultPosition\": 1, + \"Result\": [ + { + \"Title\": \"potato jpg\", + \"Summary\": \"Kentang Si bungsu dari keluarga Solanum tuberosum L ini ternyata memiliki khasiat untuk mengurangi kerutan jerawat bintik hitam dan kemerahan pada kulit Gunakan seminggu sekali sebagai\", + \"Url\": \"http://www.mediaindonesia.com/spaw/uploads/images/potato.jpg\", + \"ClickUrl\": \"http://www.mediaindonesia.com/spaw/uploads/images/potato.jpg\", + \"RefererUrl\": \"http://www.mediaindonesia.com/mediaperempuan/index.php?ar_id=Nzkw\", + \"FileSize\": 22630, + \"FileFormat\": \"jpeg\", + \"Height\": \"362\", + \"Width\": \"532\", + \"Thumbnail\": { + \"Url\": \"http://thm-a01.yimg.com/nimage/557094559c18f16a\", + \"Height\": \"98\", + \"Width\": \"145\" + } + }, + { + \"Title\": \"potato jpg\", + \"Summary\": \"Introduction of puneri aloo This is a traditional potato preparation flavoured with curry leaves and peanuts and can be eaten on fasting day Preparation time 10 min\", + \"Url\": \"http://www.infovisual.info/01/photo/potato.jpg\", + \"ClickUrl\": \"http://www.infovisual.info/01/photo/potato.jpg\", + \"RefererUrl\": \"http://sundayfood.com/puneri-aloo-indian-%20recipe\", + \"FileSize\": 119398, + \"FileFormat\": \"jpeg\", + \"Height\": \"685\", + \"Width\": \"1024\", + \"Thumbnail\": { + \"Url\": \"http://thm-a01.yimg.com/nimage/7fa23212efe84b64\", + \"Height\": \"107\", + \"Width\": \"160\" + } + } + ] + } +}" + +const unicode = """ +{"เฆ…เฆฒเฆฟเฆฎเงเฆชเฆฟเฆ•เฆธ": { + "เฆ…เงเฆฏเฆพเฆฅเฆฒเง‡เฆŸ": "เงจเงจเฆŸเฆฟ เฆฆเง‡เฆถ เฆฅเง‡เฆ•เง‡ เงจ,เงฆเงฉเงซ เฆœเฆจ เฆชเงเฆฐเฆคเฆฟเฆฏเง‹เฆ—เง€", + "เฆ‡เฆญเง‡เฆจเงเฆŸ": "เงจเงจเฆŸเฆฟ เฆ‡เฆญเง‡เฆจเงเฆŸเง‡เฆฐ เฆฎเฆงเงเฆฏเง‡ เฆ›เฆฟเฆฒ เฆฆเงœเฆฟ เฆŸเฆพเฆจเฆพเฆŸเฆพเฆจเฆฟ", + "เฆฐเง‡เฆ•เฆฐเงเฆก": [ + {"เงงเงฆเงฆเฆฎเฆฟ. เฆธเงเฆชเงเฆฐเฆฟเฆจเงเฆŸ": "เฆฐเง‡เฆœเฆฟ เฆ“เงŸเฆพเฆ•เฆพเฆฐ, เฆฆเฆ•เงเฆทเฆฟเฆฃ เฆ†เฆซเงเฆฐเฆฟเฆ•เฆพ"}, + {"Marathon": "เฆœเฆจเฆฟ เฆนเง‡เฆ‡เฆธ"}, + {" เฆซเงเฆฐเฆฟ-เฆธเงเฆŸเฆพเฆ‡เฆฒ เฆธเฆพเฆเฆคเฆพเฆฐ": "Henry Taylor, Britain"} + ] +}} +""" +function validate_unicode(str) + u = JSON.parse(str) + @test u != nothing + @test u["เฆ…เฆฒเฆฟเฆฎเงเฆชเฆฟเฆ•เฆธ"]["เฆฐเง‡เฆ•เฆฐเงเฆก"][2]["Marathon"] == "เฆœเฆจเฆฟ เฆนเง‡เฆ‡เฆธ" +end diff --git a/test/lowering.jl b/test/lowering.jl new file mode 100644 index 0000000..388cff1 --- /dev/null +++ b/test/lowering.jl @@ -0,0 +1,37 @@ +module TestLowering + +using JSON +using Test +using Dates +using FixedPointNumbers: Fixed + +@test JSON.json(Date(2016, 8, 3)) == "\"2016-08-03\"" + +@test JSON.json(:x) == "\"x\"" +@test_throws ArgumentError JSON.json(Base) + +struct Type151{T} + x::T +end + +@test JSON.parse(JSON.json(Type151)) == string(Type151) + +JSON.lower(v::Type151{T}) where {T} = Dict(:type => T, :value => v.x) +@test JSON.parse(JSON.json(Type151(1.0))) == Dict( + "type" => "Float64", + "value" => 1.0) + +fixednum = Fixed{Int16, 15}(0.1234) +@test JSON.parse(JSON.json(fixednum)) == convert(Float64, fixednum) + +# test that the default string-serialization of enums can be overriden by +# `lower` if needed +@enum Fruit apple orange banana +JSON.lower(x::Fruit) = string("Fruit: ", x) +@test JSON.json(apple) == "\"Fruit: apple\"" + +@enum Vegetable carrot tomato potato +JSON.lower(x::Vegetable) = Dict(string(x) => Int(x)) +@test JSON.json(potato) == "{\"potato\":2}" + +end diff --git a/test/parser/dicttype.jl b/test/parser/dicttype.jl new file mode 100644 index 0000000..6e4d328 --- /dev/null +++ b/test/parser/dicttype.jl @@ -0,0 +1,22 @@ +MissingDict() = DataStructures.DefaultDict{String,Any}(Missing) + +@testset for T in [ + DataStructures.OrderedDict, + Dict{Symbol, Int32}, + MissingDict +] + val = JSON.parse("{\"x\": 3}", dicttype=T) + @test length(val) == 1 + key = collect(keys(val))[1] + @test string(key) == "x" + @test val[key] == 3 + + if T == MissingDict + @test val isa DataStructures.DefaultDict{String} + @test val["y"] === missing + else + @test val isa T + @test_throws KeyError val["y"] + end +end + diff --git a/test/parser/inttype.jl b/test/parser/inttype.jl new file mode 100644 index 0000000..30e9ca1 --- /dev/null +++ b/test/parser/inttype.jl @@ -0,0 +1,16 @@ +@testset for T in [Int32, Int64, Int128, BigInt] + val = JSON.parse("{\"x\": 3}", inttype=T) + @test isa(val, Dict{String, Any}) + @test length(val) == 1 + key = collect(keys(val))[1] + @test string(key) == "x" + value = val[key] + @test value == 3 + @test typeof(value) == T +end + +@testset begin + teststr = """{"201736327611975630": 18005722827070440994}""" + val = JSON.parse(teststr, inttype=Int128) + @test val == Dict{String,Any}("201736327611975630"=> 18005722827070440994) +end diff --git a/test/parser/invalid-input.jl b/test/parser/invalid-input.jl new file mode 100644 index 0000000..924f225 --- /dev/null +++ b/test/parser/invalid-input.jl @@ -0,0 +1,33 @@ +const FAILURES = [ + # Unexpected character in array + "[1,2,3/4,5,6,7]", + # Unexpected character in object + "{\"1\":2, \"2\":3 _ \"4\":5}", + # Invalid escaped character + "[\"alpha\\ฮฑ\"]", + "[\"\\u05AG\"]", + # Invalid 'simple' and 'unknown value' + "[tXXe]", + "[fail]", + "โˆž", + # Invalid number + "[5,2,-]", + "[5,2,+ฮฒ]", + # Incomplete escape + "\"\\", + # Control character + "\"\0\"", + # Issue #99 + "[\"๐Ÿ•\"_\"๐Ÿ•\"", + # Issue #260 + "1997-03-03", + "1997.1-", +] + +@testset for fail in FAILURES + # Test memory parser + @test_throws ErrorException JSON.parse(fail) + + # Test streaming parser + @test_throws ErrorException JSON.parse(IOBuffer(fail)) +end diff --git a/test/parser/parsefile.jl b/test/parser/parsefile.jl new file mode 100644 index 0000000..f5b9f6c --- /dev/null +++ b/test/parser/parsefile.jl @@ -0,0 +1,10 @@ +tmppath, io = mktemp() +write(io, facebook) +close(io) +if Sys.iswindows() + # don't use mmap on Windows, to avoid ERROR: unlink: operation not permitted (EPERM) + @test haskey(JSON.parsefile(tmppath; use_mmap=false), "data") +else + @test haskey(JSON.parsefile(tmppath), "data") +end +rm(tmppath) diff --git a/test/regression/issue021.jl b/test/regression/issue021.jl new file mode 100644 index 0000000..856f820 --- /dev/null +++ b/test/regression/issue021.jl @@ -0,0 +1,4 @@ +test21 = "[\r\n{\r\n\"a\": 1,\r\n\"b\": 2\r\n},\r\n{\r\n\"a\": 3,\r\n\"b\": 4\r\n}\r\n]" +a = JSON.parse(test21) +@test isa(a, Vector{Any}) +@test length(a) == 2 diff --git a/test/regression/issue026.jl b/test/regression/issue026.jl new file mode 100644 index 0000000..ff9ea6d --- /dev/null +++ b/test/regression/issue026.jl @@ -0,0 +1,2 @@ +obj = JSON.parse("{\"a\":2e10}") +@test obj["a"] == 2e10 diff --git a/test/regression/issue057.jl b/test/regression/issue057.jl new file mode 100644 index 0000000..1797a8a --- /dev/null +++ b/test/regression/issue057.jl @@ -0,0 +1,2 @@ +obj = JSON.parse("{\"\U0001d712\":\"\\ud835\\udf12\"}") +@test(obj["๐œ’"] == "๐œ’") diff --git a/test/regression/issue109.jl b/test/regression/issue109.jl new file mode 100644 index 0000000..6dc2d9d --- /dev/null +++ b/test/regression/issue109.jl @@ -0,0 +1,8 @@ +mutable struct t109 + i::Int +end + +let iob = IOBuffer() + JSON.print(iob, t109(1)) + @test get(JSON.parse(String(take!(iob))), "i", 0) == 1 +end diff --git a/test/regression/issue152.jl b/test/regression/issue152.jl new file mode 100644 index 0000000..5b4a01b --- /dev/null +++ b/test/regression/issue152.jl @@ -0,0 +1,2 @@ +@test json([Int64[] Int64[]]) == "[[],[]]" +@test json([Int64[] Int64[]]') == "[]" diff --git a/test/regression/issue163.jl b/test/regression/issue163.jl new file mode 100644 index 0000000..5ace4fa --- /dev/null +++ b/test/regression/issue163.jl @@ -0,0 +1 @@ +@test Float32(JSON.parse(json(2.1f-8))) == 2.1f-8 diff --git a/test/runtests.jl b/test/runtests.jl new file mode 100644 index 0000000..e732e5d --- /dev/null +++ b/test/runtests.jl @@ -0,0 +1,80 @@ +using JSON +using Test +using Dates +using Distributed: RemoteChannel +using OffsetArrays + +import DataStructures + +include("json-samples.jl") + +@testset "Parser" begin + @testset "Parser Failures" begin + include("parser/invalid-input.jl") + end + + @testset "parsefile" begin + include("parser/parsefile.jl") + end + + @testset "dicttype" begin + include("parser/dicttype.jl") + end + + @testset "inttype" begin + include("parser/inttype.jl") + end + + @testset "Miscellaneous" begin + # test for single values + @test JSON.parse("true") == true + @test JSON.parse("null") == nothing + @test JSON.parse("\"hello\"") == "hello" + @test JSON.parse("\"a\"") == "a" + @test JSON.parse("1") == 1 + @test JSON.parse("1.5") == 1.5 + @test JSON.parse("[true]") == [true] + end +end + +@testset "Serializer" begin + @testset "Standard Serializer" begin + include("standard-serializer.jl") + end + + @testset "Lowering" begin + include("lowering.jl") + end + + @testset "Custom Serializer" begin + include("serializer.jl") + end +end + +@testset "Integration" begin + # ::Nothing values should be encoded as null + testDict = Dict("a" => nothing) + nothingJson = JSON.json(testDict) + nothingDict = JSON.parse(nothingJson) + @test testDict == nothingDict + + @testset "async" begin + include("async.jl") + end + + @testset "indentation" begin + include("indentation.jl") + end + + @testset "JSON Checker" begin + include("json-checker.jl") + end +end + +@testset "Regression" begin + @testset "for issue #$i" for i in [21, 26, 57, 109, 152, 163] + include("regression/issue$(lpad(string(i), 3, "0")).jl") + end +end + +# Check that printing to the default stdout doesn't fail diff --git a/test/serializer.jl b/test/serializer.jl new file mode 100644 index 0000000..87927fe --- /dev/null +++ b/test/serializer.jl @@ -0,0 +1,95 @@ +module TestSerializer + +using JSON +using Test + +# to define a new serialization behaviour, import these first +import JSON.Serializations: CommonSerialization, StandardSerialization +import JSON: StructuralContext + +# those names are long so we can define some type aliases +const CS = CommonSerialization +const SC = StructuralContext + +# for test harness purposes +function sprint_kwarg(f, args...; kwargs...) + b = IOBuffer() + f(b, args...; kwargs...) + String(take!(b)) +end + +# issue #168: Print NaN and Inf as Julia would +struct NaNSerialization <: CS end +JSON.show_json(io::SC, ::NaNSerialization, f::AbstractFloat) = Base.print(io, f) + +@test sprint(JSON.show_json, NaNSerialization(), [NaN, Inf, -Inf, 0.0]) == + "[NaN,Inf,-Inf,0.0]" + +@test sprint_kwarg( + JSON.show_json, + NaNSerialization(), + [NaN, Inf, -Inf, 0.0]; + indent=4 +) == """ +[ + NaN, + Inf, + -Inf, + 0.0 +] +""" + +# issue #170: Print JavaScript functions directly +struct JSSerialization <: CS end +struct JSFunction + data::String +end + +function JSON.show_json(io::SC, ::JSSerialization, f::JSFunction) + first = true + for line in split(f.data, '\n') + if !first + JSON.indent(io) + end + first = false + Base.print(io, line) + end +end + +@test sprint_kwarg(JSON.show_json, JSSerialization(), Any[ + 1, + 2, + JSFunction("function test() {\n return 1;\n}") +]; indent=2) == """ +[ + 1, + 2, + function test() { + return 1; + } +] +""" + +# test serializing a type without any fields +struct SingletonType end +@test_throws ErrorException json(SingletonType()) + +# test printing to stdout +let filename = tempname() + open(filename, "w") do f + redirect_stdout(f) do + JSON.print(Any[1, 2, 3.0]) + end + end + @test read(filename, String) == "[1,2,3.0]" + rm(filename) +end + +# issue #184: serializing a 0-dimensional array +@test sprint(JSON.show_json, JSON.StandardSerialization(), view([184], 1)) == "184" + +# test serializing with a JSONText object +@test json([JSONText("{\"bar\": [3,4,5]}"),314159]) == "[{\"bar\": [3,4,5]},314159]" +@test json([JSONText("{\"bar\": [3,4,5]}"),314159], 1) == "[\n {\n \"bar\": [\n 3,\n 4,\n 5\n ]\n },\n 314159\n]\n" + +end diff --git a/test/standard-serializer.jl b/test/standard-serializer.jl new file mode 100644 index 0000000..034bfc4 --- /dev/null +++ b/test/standard-serializer.jl @@ -0,0 +1,72 @@ +@testset "Symbol" begin + symtest = Dict(:symbolarray => [:apple, :pear], :symbolsingleton => :hello) + @test (JSON.json(symtest) == "{\"symbolarray\":[\"apple\",\"pear\"],\"symbolsingleton\":\"hello\"}" + || JSON.json(symtest) == "{\"symbolsingleton\":\"hello\",\"symbolarray\":[\"apple\",\"pear\"]}") +end + +@testset "Floats" begin + @test sprint(JSON.print, [NaN]) == "[null]" + @test sprint(JSON.print, [Inf]) == "[null]" +end + +@testset "Union{Nothing,T} (old Nullable)" begin + @test sprint(JSON.print, Union{Any,Nothing}[nothing]) == "[null]" + @test sprint(JSON.print, Union{Int64,Nothing}[nothing]) == "[null]" + @test sprint(JSON.print, Union{Int64,Nothing}[1]) == "[1]" +end + +@testset "Char" begin + @test json('a') == "\"a\"" + @test json('\\') == "\"\\\\\"" + @test json('\n') == "\"\\n\"" + @test json('๐Ÿฉ') =="\"๐Ÿฉ\"" +end + +@testset "Enum" begin + include("enum.jl") +end + +@testset "Type" begin + @test sprint(JSON.print, Float64) == string("\"Float64\"") +end + +@testset "Module" begin + @test_throws ArgumentError sprint(JSON.print, JSON) +end + +@testset "Dates" begin + @test json(Date("2016-04-13")) == "\"2016-04-13\"" + @test json([Date("2016-04-13"), Date("2016-04-12")]) == "[\"2016-04-13\",\"2016-04-12\"]" + @test json(DateTime("2016-04-13T00:00:00")) == "\"2016-04-13T00:00:00\"" + @test json([DateTime("2016-04-13T00:00:00"), DateTime("2016-04-12T00:00:00")]) == "[\"2016-04-13T00:00:00\",\"2016-04-12T00:00:00\"]" +end + +@testset "Null bytes" begin + zeros = Dict("\0" => "\0") + json_zeros = json(zeros) + @test occursin("\\u0000", json_zeros) + @test !occursin("\\0", json_zeros) + @test JSON.parse(json_zeros) == zeros +end + +@testset "Arrays" begin + # Printing an empty array or Dict shouldn't cause a BoundsError + @test json(String[]) == "[]" + @test json(Dict()) == "{}" + + #Multidimensional arrays + @test json([0 1; 2 0]) == "[[0,2],[1,0]]" + @test json(OffsetArray([0 1; 2 0], 0:1, 10:11)) == "[[0,2],[1,0]]" +end + +@testset "Pairs" begin + @test json(1 => 2) == "{\"1\":2}" + @test json(:foo => 2) == "{\"foo\":2}" + @test json([1, 2] => [3, 4]) == "{\"$([1, 2])\":[3,4]}" + @test json([1 => 2]) == "[{\"1\":2}]" +end + +@testset "Sets" begin + @test json(Set()) == "[]" + @test json(Set([1, 2])) in ["[1,2]", "[2,1]"] +end From 4141403a2dc8210c08b045e14772639c40da5e98 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:26:50 -0800 Subject: [PATCH 11/24] Squashed 'packages/CancellationTokens/' content from commit 9130370 git-subtree-dir: packages/CancellationTokens git-subtree-split: 9130370b57161b38d5a8b2bb0f947f60d7adc988 --- .../workflows/jlpkgbutler-butler-workflow.yml | 20 ++++ .../jlpkgbutler-ci-master-workflow.yml | 40 +++++++ .../workflows/jlpkgbutler-ci-pr-workflow.yml | 38 +++++++ .../jlpkgbutler-codeformat-pr-workflow.yml | 21 ++++ .../jlpkgbutler-compathelper-workflow.yml | 19 ++++ .../workflows/jlpkgbutler-tagbot-workflow.yml | 13 +++ .gitignore | 8 ++ .jlpkgbutler.toml | 1 + LICENSE.md | 7 ++ Project.toml | 13 +++ README.md | 5 + src/CancellationTokens.jl | 106 ++++++++++++++++++ src/augment_base.jl | 19 ++++ src/event.jl | 45 ++++++++ test/runtests.jl | 37 ++++++ 15 files changed, 392 insertions(+) create mode 100644 .github/workflows/jlpkgbutler-butler-workflow.yml create mode 100644 .github/workflows/jlpkgbutler-ci-master-workflow.yml create mode 100644 .github/workflows/jlpkgbutler-ci-pr-workflow.yml create mode 100644 .github/workflows/jlpkgbutler-codeformat-pr-workflow.yml create mode 100644 .github/workflows/jlpkgbutler-compathelper-workflow.yml create mode 100644 .github/workflows/jlpkgbutler-tagbot-workflow.yml create mode 100644 .gitignore create mode 100644 .jlpkgbutler.toml create mode 100644 LICENSE.md create mode 100644 Project.toml create mode 100644 README.md create mode 100644 src/CancellationTokens.jl create mode 100644 src/augment_base.jl create mode 100644 src/event.jl create mode 100644 test/runtests.jl diff --git a/.github/workflows/jlpkgbutler-butler-workflow.yml b/.github/workflows/jlpkgbutler-butler-workflow.yml new file mode 100644 index 0000000..f7894d9 --- /dev/null +++ b/.github/workflows/jlpkgbutler-butler-workflow.yml @@ -0,0 +1,20 @@ +name: Run the Julia Package Butler + +on: + push: + branches: + - master + schedule: + - cron: '0 */1 * * *' + +jobs: + butler: + name: "Run Package Butler" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: davidanthoff/julia-pkgbutler@releases/v1 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + ssh-private-key: ${{ secrets.JLPKGBUTLER_TOKEN }} + channel: stable diff --git a/.github/workflows/jlpkgbutler-ci-master-workflow.yml b/.github/workflows/jlpkgbutler-ci-master-workflow.yml new file mode 100644 index 0000000..dba8521 --- /dev/null +++ b/.github/workflows/jlpkgbutler-ci-master-workflow.yml @@ -0,0 +1,40 @@ +name: Run CI on master + +on: + push: + branches: + - master + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + julia-version: ['1.0', '1.1', '1.2', '1.3', '1.4', '1.5', '1.6'] + julia-arch: [x64, x86] + os: [ubuntu-latest, windows-latest, macOS-latest] + exclude: + - os: macOS-latest + julia-arch: x86 + + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@latest + with: + version: ${{ matrix.julia-version }} + arch: ${{ matrix.julia-arch }} + - uses: julia-actions/julia-buildpkg@latest + env: + PYTHON: "" + - uses: julia-actions/julia-runtest@latest + env: + PYTHON: "" + - uses: julia-actions/julia-processcoverage@v1 + - uses: codecov/codecov-action@v1 + with: + file: ./lcov.info + flags: unittests + name: codecov-umbrella + fail_ci_if_error: false + token: ${{ secrets.CODECOV_TOKEN }} + \ No newline at end of file diff --git a/.github/workflows/jlpkgbutler-ci-pr-workflow.yml b/.github/workflows/jlpkgbutler-ci-pr-workflow.yml new file mode 100644 index 0000000..339da96 --- /dev/null +++ b/.github/workflows/jlpkgbutler-ci-pr-workflow.yml @@ -0,0 +1,38 @@ +name: Run CI on PR + +on: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + test: + runs-on: ${{ matrix.os }} + strategy: + matrix: + julia-version: ['1.0', '1.1', '1.2', '1.3', '1.4', '1.5', '1.6'] + julia-arch: [x64, x86] + os: [ubuntu-latest, windows-latest, macOS-latest] + exclude: + - os: macOS-latest + julia-arch: x86 + + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@latest + with: + version: ${{ matrix.julia-version }} + arch: ${{ matrix.julia-arch }} + - uses: julia-actions/julia-buildpkg@latest + env: + PYTHON: "" + - uses: julia-actions/julia-runtest@latest + env: + PYTHON: "" + - uses: julia-actions/julia-processcoverage@v1 + - uses: codecov/codecov-action@v1 + with: + file: ./lcov.info + flags: unittests + name: codecov-umbrella + fail_ci_if_error: false + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/jlpkgbutler-codeformat-pr-workflow.yml b/.github/workflows/jlpkgbutler-codeformat-pr-workflow.yml new file mode 100644 index 0000000..88b7a00 --- /dev/null +++ b/.github/workflows/jlpkgbutler-codeformat-pr-workflow.yml @@ -0,0 +1,21 @@ +name: Code Formatting + +on: + push: + branches: + - master + +jobs: + format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/julia-codeformat@releases/v1 + - name: Create Pull Request + uses: peter-evans/create-pull-request@v3 + with: + token: ${{ secrets.GITHUB_TOKEN }} + commit-message: Format files using DocumentFormat + title: '[AUTO] Format files using DocumentFormat' + body: '[DocumentFormat.jl](https://github.com/julia-vscode/DocumentFormat.jl) would suggest these formatting changes' + labels: no changelog diff --git a/.github/workflows/jlpkgbutler-compathelper-workflow.yml b/.github/workflows/jlpkgbutler-compathelper-workflow.yml new file mode 100644 index 0000000..e41d211 --- /dev/null +++ b/.github/workflows/jlpkgbutler-compathelper-workflow.yml @@ -0,0 +1,19 @@ +name: Run CompatHelper + +on: + schedule: + - cron: '00 * * * *' + issues: + types: [opened, reopened] + +jobs: + compathelper: + name: "Run CompatHelper.jl" + runs-on: ubuntu-latest + steps: + - name: Pkg.add("CompatHelper") + run: julia -e 'using Pkg; Pkg.add("CompatHelper")' + - name: CompatHelper.main() + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: julia -e 'using CompatHelper; CompatHelper.main()' diff --git a/.github/workflows/jlpkgbutler-tagbot-workflow.yml b/.github/workflows/jlpkgbutler-tagbot-workflow.yml new file mode 100644 index 0000000..8c6b404 --- /dev/null +++ b/.github/workflows/jlpkgbutler-tagbot-workflow.yml @@ -0,0 +1,13 @@ +name: TagBot +on: + schedule: + - cron: 0 * * * * +jobs: + TagBot: + runs-on: ubuntu-latest + steps: + - uses: JuliaRegistries/TagBot@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + ssh: ${{ secrets.JLPKGBUTLER_TOKEN }} + branches: true diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d788581 --- /dev/null +++ b/.gitignore @@ -0,0 +1,8 @@ +*.jl.cov +*.jl.*.cov +*.jl.*.mem +*.junk.jl +.vscode +*.ignore.* +Manifest.toml +lcov.info diff --git a/.jlpkgbutler.toml b/.jlpkgbutler.toml new file mode 100644 index 0000000..b72304f --- /dev/null +++ b/.jlpkgbutler.toml @@ -0,0 +1 @@ +template = "bach" diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..d7eb1ee --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,7 @@ +Copyright 2020-2021 David Anthoff + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Project.toml b/Project.toml new file mode 100644 index 0000000..7519a81 --- /dev/null +++ b/Project.toml @@ -0,0 +1,13 @@ +name = "CancellationTokens" +uuid = "2e8d271d-f2e2-407b-a864-17eb2156783e" +authors = ["David Anthoff "] +version = "1.0.0" + +[extras] +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[compat] +julia = "1" + +[targets] +test = ["Test"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..86931f5 --- /dev/null +++ b/README.md @@ -0,0 +1,5 @@ +# CancellationTokens + +A Julia implementation of .Net's Cancellation Framework. See [here](https://devblogs.microsoft.com/pfxteam/net-4-cancellation-framework/) and [here](https://docs.microsoft.com/en-us/dotnet/standard/threading/cancellation-in-managed-threads) for details. + +The package is currently _not_ thread safe, so it should only be used with single threaded tasks for now. diff --git a/src/CancellationTokens.jl b/src/CancellationTokens.jl new file mode 100644 index 0000000..6e51e5c --- /dev/null +++ b/src/CancellationTokens.jl @@ -0,0 +1,106 @@ +module CancellationTokens + +export CancellationTokenSource, get_token, is_cancellation_requested, cancel, OperationCanceledException + +include("event.jl") + +@enum CancellationTokenSourceStates NotCanceledState=1 NotifyingState=2 NotifyingCompleteState=3 + +mutable struct CancellationTokenSource + _state::CancellationTokenSourceStates + _timer::Union{Nothing,Timer} + _kernel_event::Union{Nothing,Event} # TODO Event is Julia > 1.1, make it work on 1.0 + + function CancellationTokenSource() + return new(NotCanceledState, nothing, nothing) + end +end + +function CancellationTokenSource(timespan_in_seconds::Real) + x = CancellationTokenSource() + + x._timer = Timer(timespan_in_seconds) do _ + _internal_notify(x) + end + + return x +end + +function _internal_notify(x::CancellationTokenSource) + if x._state==NotCanceledState + x._state = NotifyingState + + if x._timer!==nothing + close(x._timer) + x._timer = nothing + end + + if x._kernel_event!==nothing + notify(x._kernel_event) + x._kernel_event = nothing + end + + x._state = NotifyingCompleteState + end +end + +function cancel(x::CancellationTokenSource) + _internal_notify(x) + + return +end + +is_cancellation_requested(x::CancellationTokenSource) = x._state > NotCanceledState + +function _waithandle(x::CancellationTokenSource) + if x._kernel_event===nothing + x._kernel_event = Event() + end + + return x._kernel_event +end + +# CancellationToken + +struct CancellationToken + _source::CancellationTokenSource +end + +get_token(x::CancellationTokenSource) = CancellationToken(x) + +is_cancellation_requested(x::CancellationToken) = is_cancellation_requested(x._source) + +_waithandle(x::CancellationToken) = _waithandle(x._source) + +function Base.wait(x::CancellationToken) + if is_cancellation_requested(x) + return + else + wait(_waithandle(x)) + end +end + +# OperationCanceledException + +struct OperationCanceledException <: Exception + _token::CancellationToken +end + +get_token(x::OperationCanceledException) = x._token + +function CancellationTokenSource(tokens::CancellationToken...) + x = CancellationTokenSource() + + for t in tokens + @async begin + wait(t) + _internal_notify(x) + end + end + + return x +end + +include("augment_base.jl") + +end # module diff --git a/src/augment_base.jl b/src/augment_base.jl new file mode 100644 index 0000000..a3d4cb1 --- /dev/null +++ b/src/augment_base.jl @@ -0,0 +1,19 @@ +function Base.sleep(sec::Real, token::CancellationToken) + # Create a cancel source with a timeout + timer_src = CancellationTokenSource(sec) + + timer_token = get_token(timer_src) + + # Create a cancel source that cancels either if the timeout source cancels, + # or when the passed token cancels + combined = CancellationTokenSource(timer_token, token) + + # Wait for the combined source to cancel + wait(get_token(combined)) + + if is_cancellation_requested(timer_src) + return + else + throw(OperationCanceledException(token)) + end +end diff --git a/src/event.jl b/src/event.jl new file mode 100644 index 0000000..73ed9ff --- /dev/null +++ b/src/event.jl @@ -0,0 +1,45 @@ +@static if VERSION < v"1.1" + mutable struct Event + lock::Base.Threads.Mutex + q::Vector{Task} + set::Bool + # TODO: use a Condition with its paired lock + Event() = new(Base.Threads.Mutex(), Task[], false) + end + + function Base.wait(e::Event) + e.set && return + lock(e.lock) + while !e.set + ct = current_task() + push!(e.q, ct) + unlock(e.lock) + try + wait() + catch + filter!(x->x!==ct, e.q) + rethrow() + end + lock(e.lock) + end + unlock(e.lock) + return nothing + end + + function Base.notify(e::Event) + lock(e.lock) + if !e.set + e.set = true + for t in e.q + schedule(t) + end + empty!(e.q) + end + unlock(e.lock) + return nothing + end +elseif VERSION < v"1.2" + using Base.Threads: Event +else + using Base: Event +end diff --git a/test/runtests.jl b/test/runtests.jl new file mode 100644 index 0000000..3eecdce --- /dev/null +++ b/test/runtests.jl @@ -0,0 +1,37 @@ +using Test: get_test_counts +using CancellationTokens +using Test + +@testset "CancellationTokens" begin + + src = CancellationTokenSource() + cancel(src) + wait(get_token(src)) + + src = CancellationTokenSource() + @async begin + sleep(0.1) + cancel(src) + end + wait(get_token(src)) + + src = CancellationTokenSource(0.1) + wait(get_token(src)) + + src = CancellationTokenSource() + @async begin + sleep(0.1) + cancel(src) + end + wait(get_token(src)) + + src = CancellationTokenSource() + sleep(0.1, get_token(src)) + + src = CancellationTokenSource() + @async begin + sleep(0.1) + cancel(src) + end + @test_throws OperationCanceledException sleep(20.0, get_token(src)) +end From cdcc2dae99d390df68544228aed1b230a85e3f88 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:32:15 -0800 Subject: [PATCH 12/24] Add dynamic analysis process scaffolding --- .../JuliaDynamicAnalysisProcess/Project.toml | 30 +++++++++++++++++++ .../src/JuliaDynamicAnalysisProcess.jl | 2 ++ scripts/update_app_environments.jl | 10 +++---- scripts/update_vendored_packages.jl | 2 +- 4 files changed, 38 insertions(+), 6 deletions(-) create mode 100644 juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml create mode 100644 juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml new file mode 100644 index 0000000..12ea29a --- /dev/null +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml @@ -0,0 +1,30 @@ +name = "JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +authors = ["David Anthoff "] +version = "1.0.0" + +[deps] +Base64 = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +Dates = "ade2ca70-3891-5945-98fb-dc099432e06a" +Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" +FileWatching = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" +InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433" +Mmap = "a63ad114-7e13-5084-954f-fe012c677804" +Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +REPL = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +Unicode = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" +Sockets = "6462fe0b-24de-5631-8697-dd941f90decc" +Logging = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[compat] +julia = "1" + +[extras] +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[targets] +test = ["Test"] diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl new file mode 100644 index 0000000..b39f4c5 --- /dev/null +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl @@ -0,0 +1,2 @@ +module JuliaDynamicAnalysisProcess +end diff --git a/scripts/update_app_environments.jl b/scripts/update_app_environments.jl index 3e55450..7a0f51a 100644 --- a/scripts/update_app_environments.jl +++ b/scripts/update_app_environments.jl @@ -15,18 +15,18 @@ julia_versions = [ ] for i in julia_versions - version_path = normpath(joinpath(@__DIR__, "../testprocess/environments/v$i")) + version_path = normpath(joinpath(@__DIR__, "../juliadynamicanalysisprocess/environments/v$i")) mkpath(version_path) - run(Cmd(`julia +$i --project=. -e 'using Pkg; Pkg.develop(PackageSpec(path="../../TestItemServer"))'`, dir=version_path)) + run(Cmd(`julia +$i --project=. -e 'using Pkg; Pkg.develop(PackageSpec(path="../../JuliaDynamicAnalysisProcess"))'`, dir=version_path)) end -version_path = normpath(joinpath(@__DIR__, "../testprocess/environments/fallback")) +version_path = normpath(joinpath(@__DIR__, "../juliadynamicanalysisprocess/environments/fallback")) mkpath(version_path) -run(Cmd(`julia +nightly --project=. -e 'using Pkg; Pkg.develop(PackageSpec(path="../../TestItemServer"))'`, dir=version_path)) +run(Cmd(`julia +nightly --project=. -e 'using Pkg; Pkg.develop(PackageSpec(path="../../JuliaDynamicAnalysisProcess"))'`, dir=version_path)) function replace_backslash_in_manifest(version) - filename = joinpath(@__DIR__, "../testprocess/environments/v$version/Manifest.toml") + filename = joinpath(@__DIR__, "../juliadynamicanalysisprocess/environments/v$version/Manifest.toml") manifest_content = read(filename, String) new_content = replace(manifest_content, "\\\\"=>'/') diff --git a/scripts/update_vendored_packages.jl b/scripts/update_vendored_packages.jl index a053dea..f33c7f4 100644 --- a/scripts/update_vendored_packages.jl +++ b/scripts/update_vendored_packages.jl @@ -21,7 +21,7 @@ packages = Dict( # "Revise" => "timholy/Revise.jl", # "TestEnv" => "JuliaTesting/TestEnv.jl", # "URIParser" => "JuliaWeb/URIParser.jl", - # "CancellationTokens" => "davidanthoff/CancellationTokens.jl" + "CancellationTokens" => "davidanthoff/CancellationTokens.jl" ) latest_versions = Dict{String,VersionNumber}() From c54f405a5abc76af908e050f60175eb6b7c8d5f9 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:33:08 -0800 Subject: [PATCH 13/24] Update .gitignore --- .gitignore | 1 + scripts/Manifest.toml | 382 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 383 insertions(+) create mode 100644 scripts/Manifest.toml diff --git a/.gitignore b/.gitignore index 57e80b7..0e17992 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ docs/build bin/run_julia bin/docs cspell.json +!scripts/Manifest.toml diff --git a/scripts/Manifest.toml b/scripts/Manifest.toml new file mode 100644 index 0000000..ae4a0f3 --- /dev/null +++ b/scripts/Manifest.toml @@ -0,0 +1,382 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.12.2" +manifest_format = "2.0" +project_hash = "e2fa6d3ba1b7ebc2b382edb9947dbc8f7d9976a3" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" +version = "1.11.0" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +version = "1.11.0" + +[[deps.BitFlags]] +git-tree-sha1 = "0691e34b3bb8be9307330f88d1a3c3f25466c24d" +uuid = "d1d4a3ce-64b1-5f1a-9ba4-7e7e69966f35" +version = "0.1.9" + +[[deps.CodecZlib]] +deps = ["TranscodingStreams", "Zlib_jll"] +git-tree-sha1 = "962834c22b66e32aa10f7611c08c8ca4e20749a9" +uuid = "944b1d66-785c-5afd-91f1-9de20f533193" +version = "0.7.8" + +[[deps.Compat]] +deps = ["TOML", "UUIDs"] +git-tree-sha1 = "9d8a54ce4b17aa5bdce0ea5c34bc5e7c340d16ad" +uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" +version = "4.18.1" +weakdeps = ["Dates", "LinearAlgebra"] + + [deps.Compat.extensions] + CompatLinearAlgebraExt = "LinearAlgebra" + +[[deps.CompilerSupportLibraries_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" +version = "1.3.0+1" + +[[deps.ConcurrentUtilities]] +deps = ["Serialization", "Sockets"] +git-tree-sha1 = "d9d26935a0bcffc87d2613ce14c527c99fc543fd" +uuid = "f0e56b4a-5159-44fe-b623-3e5288b988bb" +version = "2.5.0" + +[[deps.DataAPI]] +git-tree-sha1 = "abe83f3a2f1b857aac70ef8b269080af17764bbe" +uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" +version = "1.16.0" + +[[deps.DataStructures]] +deps = ["Compat", "InteractiveUtils", "OrderedCollections"] +git-tree-sha1 = "4e1fe97fdaed23e9dc21d4d664bea76b65fc50a0" +uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" +version = "0.18.22" + +[[deps.DataValueInterfaces]] +git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" +uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464" +version = "1.0.0" + +[[deps.DataValues]] +deps = ["DataValueInterfaces", "Dates"] +git-tree-sha1 = "d88a19299eba280a6d062e135a43f00323ae70bf" +uuid = "e7dc6d0d-1eca-5fa6-8ad6-5aecde8b7ea5" +version = "0.4.13" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +version = "1.11.0" + +[[deps.ExceptionUnwrapping]] +deps = ["Test"] +git-tree-sha1 = "d36f682e590a83d63d1c7dbd287573764682d12a" +uuid = "460bff9d-24e4-43bc-9d9f-a8973cb893f4" +version = "0.1.11" + +[[deps.GitHub]] +deps = ["Base64", "Dates", "HTTP", "JSON", "MbedTLS", "Sockets", "SodiumSeal", "URIs"] +git-tree-sha1 = "12d0b1886e60c9d2a3d42ef1612bdfbedec68b42" +uuid = "bc5e4493-9b4d-5f90-b8aa-2b2bcaad7a26" +version = "5.11.0" + +[[deps.HTTP]] +deps = ["Base64", "CodecZlib", "ConcurrentUtilities", "Dates", "ExceptionUnwrapping", "Logging", "LoggingExtras", "MbedTLS", "NetworkOptions", "OpenSSL", "PrecompileTools", "Random", "SimpleBufferStream", "Sockets", "URIs", "UUIDs"] +git-tree-sha1 = "5e6fe50ae7f23d171f44e311c2960294aaa0beb5" +uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3" +version = "1.10.19" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +version = "1.11.0" + +[[deps.IterableTables]] +deps = ["DataValues", "IteratorInterfaceExtensions", "Requires", "TableTraits", "TableTraitsUtils"] +git-tree-sha1 = "70300b876b2cebde43ebc0df42bc8c94a144e1b4" +uuid = "1c8ee90f-4401-5389-894e-7a04a3dc0f4d" +version = "1.0.0" + +[[deps.IteratorInterfaceExtensions]] +git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856" +uuid = "82899510-4779-5014-852e-03e436cf321d" +version = "1.0.0" + +[[deps.JLLWrappers]] +deps = ["Artifacts", "Preferences"] +git-tree-sha1 = "0533e564aae234aff59ab625543145446d8b6ec2" +uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" +version = "1.7.1" + +[[deps.JSON]] +deps = ["Dates", "Logging", "Parsers", "PrecompileTools", "StructUtils", "UUIDs", "Unicode"] +git-tree-sha1 = "5b6bb73f555bc753a6153deec3717b8904f5551c" +uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +version = "1.3.0" + + [deps.JSON.extensions] + JSONArrowExt = ["ArrowTypes"] + + [deps.JSON.weakdeps] + ArrowTypes = "31f734f8-188a-4ce0-8406-c8a06bd891cd" + +[[deps.JuliaSyntaxHighlighting]] +deps = ["StyledStrings"] +uuid = "ac6e5ff7-fb65-4e79-a425-ec3bc9c03011" +version = "1.12.0" + +[[deps.LibGit2]] +deps = ["LibGit2_jll", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +version = "1.11.0" + +[[deps.LibGit2_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "OpenSSL_jll"] +uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5" +version = "1.9.0+0" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "OpenSSL_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.3+1" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" +version = "1.11.0" + +[[deps.LinearAlgebra]] +deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"] +uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +version = "1.12.0" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +version = "1.11.0" + +[[deps.LoggingExtras]] +deps = ["Dates", "Logging"] +git-tree-sha1 = "f00544d95982ea270145636c181ceda21c4e2575" +uuid = "e6f89c97-d47a-5376-807f-9c37f3926c36" +version = "1.2.0" + +[[deps.MacroTools]] +git-tree-sha1 = "1e0228a030642014fe5cfe68c2c0a818f9e3f522" +uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" +version = "0.5.16" + +[[deps.Markdown]] +deps = ["Base64", "JuliaSyntaxHighlighting", "StyledStrings"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +version = "1.11.0" + +[[deps.MbedTLS]] +deps = ["Dates", "MbedTLS_jll", "MozillaCACerts_jll", "NetworkOptions", "Random", "Sockets"] +git-tree-sha1 = "c067a280ddc25f196b5e7df3877c6b226d390aaf" +uuid = "739be429-bea8-5141-9913-cc70e7f3736d" +version = "1.1.9" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "ff69a2b1330bcb730b9ac1ab7dd680176f5896b8" +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.1010+0" + +[[deps.Missings]] +deps = ["DataAPI"] +git-tree-sha1 = "ec4f7fbeab05d7747bdf98eb74d130a2a2ed298d" +uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" +version = "1.2.0" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2025.5.20" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.3.0" + +[[deps.OpenBLAS_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] +uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" +version = "0.3.29+0" + +[[deps.OpenSSL]] +deps = ["BitFlags", "Dates", "MozillaCACerts_jll", "NetworkOptions", "OpenSSL_jll", "Sockets"] +git-tree-sha1 = "1d1aaa7d449b58415f97d2839c318b70ffb525a0" +uuid = "4d8831e6-92b7-49fb-bdf8-b643e874388c" +version = "1.6.1" + +[[deps.OpenSSL_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +version = "3.5.4+0" + +[[deps.OrderedCollections]] +git-tree-sha1 = "05868e21324cede2207c6f0f466b4bfef6d5e7ee" +uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" +version = "1.8.1" + +[[deps.Parsers]] +deps = ["Dates", "PrecompileTools", "UUIDs"] +git-tree-sha1 = "7d2f8f21da5db6a806faf7b9b292296da42b2810" +uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" +version = "2.8.3" + +[[deps.PrecompileTools]] +deps = ["Preferences"] +git-tree-sha1 = "07a921781cab75691315adc645096ed5e370cb77" +uuid = "aea7be01-6a6a-4083-8856-8a6e6704d82a" +version = "1.3.3" + +[[deps.Preferences]] +deps = ["TOML"] +git-tree-sha1 = "0f27480397253da18fe2c12a4ba4eb9eb208bf3d" +uuid = "21216c6a-2e73-6563-6e65-726566657250" +version = "1.5.0" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" +version = "1.11.0" + +[[deps.Query]] +deps = ["DataValues", "IterableTables", "MacroTools", "QueryOperators", "Statistics"] +git-tree-sha1 = "a66aa7ca6f5c29f0e303ccef5c8bd55067df9bbe" +uuid = "1a8c2f83-1ff3-5112-b086-8aa67b057ba1" +version = "1.0.0" + +[[deps.QueryOperators]] +deps = ["DataStructures", "DataValues", "IteratorInterfaceExtensions", "TableShowUtils"] +git-tree-sha1 = "911c64c204e7ecabfd1872eb93c49b4e7c701f02" +uuid = "2aef5ad7-51ca-5a8f-8e88-e75cf067b44b" +version = "0.9.3" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +version = "1.11.0" + +[[deps.Requires]] +deps = ["UUIDs"] +git-tree-sha1 = "62389eeff14780bfe55195b7204c0d8738436d64" +uuid = "ae029012-a4dd-5104-9daa-d747884805df" +version = "1.3.1" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +version = "1.11.0" + +[[deps.SimpleBufferStream]] +git-tree-sha1 = "f305871d2f381d21527c770d4788c06c097c9bc1" +uuid = "777ac1f9-54b0-4bf8-805c-2214025038e7" +version = "1.2.0" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +version = "1.11.0" + +[[deps.SodiumSeal]] +deps = ["Base64", "Libdl", "libsodium_jll"] +git-tree-sha1 = "80cef67d2953e33935b41c6ab0a178b9987b1c99" +uuid = "2133526b-2bfb-4018-ac12-889fb3908a75" +version = "0.1.1" + +[[deps.Statistics]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "ae3bb1eb3bba077cd276bc5cfc337cc65c3075c0" +uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +version = "1.11.1" + + [deps.Statistics.extensions] + SparseArraysExt = ["SparseArrays"] + + [deps.Statistics.weakdeps] + SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + +[[deps.StructUtils]] +deps = ["Dates", "UUIDs"] +git-tree-sha1 = "79529b493a44927dd5b13dde1c7ce957c2d049e4" +uuid = "ec057cc2-7a8d-4b58-b3b3-92acb9f63b42" +version = "2.6.0" + + [deps.StructUtils.extensions] + StructUtilsMeasurementsExt = ["Measurements"] + StructUtilsTablesExt = ["Tables"] + + [deps.StructUtils.weakdeps] + Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7" + Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" + +[[deps.StyledStrings]] +uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b" +version = "1.11.0" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.TableShowUtils]] +deps = ["DataValues", "Dates", "JSON", "Markdown", "Unicode"] +git-tree-sha1 = "d52a581bba952fcf5a2db7a416ab2834f7c9e208" +uuid = "5e66a065-1f0a-5976-b372-e0b8c017ca10" +version = "0.2.7" + +[[deps.TableTraits]] +deps = ["IteratorInterfaceExtensions"] +git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39" +uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c" +version = "1.0.1" + +[[deps.TableTraitsUtils]] +deps = ["DataValues", "IteratorInterfaceExtensions", "Missings", "TableTraits"] +git-tree-sha1 = "78fecfe140d7abb480b53a44f3f85b6aa373c293" +uuid = "382cd787-c1b6-5bf2-a167-d5b971a19bda" +version = "1.0.2" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +version = "1.11.0" + +[[deps.TranscodingStreams]] +git-tree-sha1 = "0c45878dcfdcfa8480052b6ab162cdd138781742" +uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" +version = "0.11.3" + +[[deps.URIs]] +git-tree-sha1 = "bef26fb046d031353ef97a82e3fdb6afe7f21b1a" +uuid = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4" +version = "1.6.1" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +version = "1.11.0" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" +version = "1.11.0" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.3.1+2" + +[[deps.libblastrampoline_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" +version = "5.15.0+0" + +[[deps.libsodium_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "011b0a7331b41c25524b64dc42afc9683ee89026" +uuid = "a9144af2-ca23-56d9-984f-0d03f7b5ccf8" +version = "1.0.21+0" From f056176d0539351a0c2a48be24e7a07c93e4fc84 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:49:40 -0800 Subject: [PATCH 14/24] Update .gitignore --- .gitignore | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 0e17992..7ad80a8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,6 @@ *.jl.cov *.jl.*.cov *.jl.mem -cache/docs.cache .vscode -Manifest.toml +./Manifest.toml docs/build -bin/run_julia -bin/docs -cspell.json -!scripts/Manifest.toml From a72d98f33760281cc0fafc76d16ee2f552ca4ab2 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:50:02 -0800 Subject: [PATCH 15/24] Add docs manifest --- docs/Manifest.toml | 336 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 336 insertions(+) create mode 100644 docs/Manifest.toml diff --git a/docs/Manifest.toml b/docs/Manifest.toml new file mode 100644 index 0000000..5a008e3 --- /dev/null +++ b/docs/Manifest.toml @@ -0,0 +1,336 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.11.1" +manifest_format = "2.0" +project_hash = "5db6b303ca03509cc4e73ab148e05599c266554b" + +[[deps.ANSIColoredPrinters]] +git-tree-sha1 = "574baf8110975760d391c710b6341da1afa48d8c" +uuid = "a4c015fc-c6ff-483c-b24f-f7ea428134e9" +version = "0.0.1" + +[[deps.AbstractTrees]] +git-tree-sha1 = "2d9c9a55f9c93e8887ad391fbae72f8ef55e1177" +uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" +version = "0.4.5" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.2" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" +version = "1.11.0" + +[[deps.AutoHashEquals]] +git-tree-sha1 = "4ec6b48702dacc5994a835c1189831755e4e76ef" +uuid = "15f4f7f2-30c1-5605-9d31-71845cf9641f" +version = "2.2.0" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +version = "1.11.0" + +[[deps.CancellationTokens]] +git-tree-sha1 = "eacd96906188a977d9cd4b00c48552cdd46e5f47" +uuid = "2e8d271d-f2e2-407b-a864-17eb2156783e" +version = "1.0.0" + +[[deps.CodecZlib]] +deps = ["TranscodingStreams", "Zlib_jll"] +git-tree-sha1 = "bce6804e5e6044c6daab27bb533d1295e4a2e759" +uuid = "944b1d66-785c-5afd-91f1-9de20f533193" +version = "0.7.6" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +version = "1.11.0" + +[[deps.DocStringExtensions]] +deps = ["LibGit2"] +git-tree-sha1 = "2fb1e02f2b635d0845df5d7c167fec4dd739b00d" +uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" +version = "0.9.3" + +[[deps.Documenter]] +deps = ["ANSIColoredPrinters", "AbstractTrees", "Base64", "CodecZlib", "Dates", "DocStringExtensions", "Downloads", "Git", "IOCapture", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "MarkdownAST", "Pkg", "PrecompileTools", "REPL", "RegistryInstances", "SHA", "TOML", "Test", "Unicode"] +git-tree-sha1 = "d0ea2c044963ed6f37703cead7e29f70cba13d7e" +uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +version = "1.8.0" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" + +[[deps.ExceptionUnwrapping]] +deps = ["Test"] +git-tree-sha1 = "d36f682e590a83d63d1c7dbd287573764682d12a" +uuid = "460bff9d-24e4-43bc-9d9f-a8973cb893f4" +version = "0.1.11" + +[[deps.Expat_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "cc5231d52eb1771251fbd37171dbc408bcc8a1b6" +uuid = "2e619515-83b5-522b-bb60-26c02a35a201" +version = "2.6.4+0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" +version = "1.11.0" + +[[deps.Git]] +deps = ["Git_jll"] +git-tree-sha1 = "04eff47b1354d702c3a85e8ab23d539bb7d5957e" +uuid = "d7ba0133-e1db-5d97-8f8c-041e4b3a1eb2" +version = "1.3.1" + +[[deps.Git_jll]] +deps = ["Artifacts", "Expat_jll", "JLLWrappers", "LibCURL_jll", "Libdl", "Libiconv_jll", "OpenSSL_jll", "PCRE2_jll", "Zlib_jll"] +git-tree-sha1 = "399f4a308c804b446ae4c91eeafadb2fe2c54ff9" +uuid = "f8c6e375-362e-5223-8a59-34ff63f689eb" +version = "2.47.1+0" + +[[deps.IOCapture]] +deps = ["Logging", "Random"] +git-tree-sha1 = "b6d6bfdd7ce25b0f9b2f6b3dd56b2673a66c8770" +uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89" +version = "0.2.5" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +version = "1.11.0" + +[[deps.JLLWrappers]] +deps = ["Artifacts", "Preferences"] +git-tree-sha1 = "be3dc50a92e5a386872a493a10050136d4703f9b" +uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" +version = "1.6.1" + +[[deps.JSON]] +deps = ["Dates", "Mmap", "Parsers", "Unicode"] +git-tree-sha1 = "31e996f0a15c7b280ba9f76636b3ff9e2ae58c9a" +uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +version = "0.21.4" + +[[deps.JuliaSyntax]] +git-tree-sha1 = "937da4713526b96ac9a178e2035019d3b78ead4a" +uuid = "70703baa-626e-46a2-a12c-08ffd08c73b4" +version = "0.4.10" + +[[deps.JuliaWorkspaces]] +deps = ["AutoHashEquals", "CancellationTokens", "JuliaSyntax", "Pkg", "Salsa", "TestItemDetection", "UUIDs"] +path = ".." +uuid = "e554591c-7f10-434f-9f27-2097f62a04fd" +version = "4.6.1-DEV" + +[[deps.LazilyInitializedFields]] +git-tree-sha1 = "0f2da712350b020bc3957f269c9caad516383ee0" +uuid = "0e77f7df-68c5-4e49-93ce-4cd80f5598bf" +version = "1.3.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.4" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "8.6.0+0" + +[[deps.LibGit2]] +deps = ["Base64", "LibGit2_jll", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +version = "1.11.0" + +[[deps.LibGit2_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll"] +uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5" +version = "1.7.2+0" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.0+1" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" +version = "1.11.0" + +[[deps.Libiconv_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "61dfdba58e585066d8bce214c5a51eaa0539f269" +uuid = "94ce4f54-9a6c-5748-9c1c-f9c7231a4531" +version = "1.17.0+1" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +version = "1.11.0" + +[[deps.MacroTools]] +deps = ["Markdown", "Random"] +git-tree-sha1 = "2fa9ee3e63fd3a4f7a9a4f4744a52f4856de82df" +uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" +version = "0.5.13" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +version = "1.11.0" + +[[deps.MarkdownAST]] +deps = ["AbstractTrees", "Markdown"] +git-tree-sha1 = "465a70f0fc7d443a00dcdc3267a497397b8a3899" +uuid = "d0879d2d-cac2-40c8-9cee-1863dc0c7391" +version = "0.1.2" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.6+0" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" +version = "1.11.0" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2023.12.12" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" + +[[deps.OpenSSL_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl"] +git-tree-sha1 = "7493f61f55a6cce7325f197443aa80d32554ba10" +uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +version = "3.0.15+1" + +[[deps.PCRE2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "efcefdf7-47ab-520b-bdef-62a2eaa19f15" +version = "10.42.0+1" + +[[deps.Parsers]] +deps = ["Dates", "PrecompileTools", "UUIDs"] +git-tree-sha1 = "8489905bcdbcfac64d1daa51ca07c0d8f0283821" +uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" +version = "2.8.1" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "Random", "SHA", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.11.0" +weakdeps = ["REPL"] + + [deps.Pkg.extensions] + REPLExt = "REPL" + +[[deps.PrecompileTools]] +deps = ["Preferences"] +git-tree-sha1 = "5aa36f7049a63a1528fe8f7c3f2113413ffd4e1f" +uuid = "aea7be01-6a6a-4083-8856-8a6e6704d82a" +version = "1.2.1" + +[[deps.Preferences]] +deps = ["TOML"] +git-tree-sha1 = "9306f6085165d270f7e3db02af26a400d580f5c6" +uuid = "21216c6a-2e73-6563-6e65-726566657250" +version = "1.4.3" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" +version = "1.11.0" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "StyledStrings", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" +version = "1.11.0" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +version = "1.11.0" + +[[deps.RegistryInstances]] +deps = ["LazilyInitializedFields", "Pkg", "TOML", "Tar"] +git-tree-sha1 = "ffd19052caf598b8653b99404058fce14828be51" +uuid = "2792f1a3-b283-48e8-9a74-f99dce5104f3" +version = "0.1.0" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Salsa]] +deps = ["ExceptionUnwrapping", "MacroTools"] +git-tree-sha1 = "6df846d5b37e47e3dfb27c971ce58384c4b6618c" +uuid = "1fbf2c77-44e2-4d5d-8131-0fa618a5c278" +version = "2.2.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +version = "1.11.0" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +version = "1.11.0" + +[[deps.StyledStrings]] +uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b" +version = "1.11.0" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +version = "1.11.0" + +[[deps.TestItemDetection]] +deps = ["JuliaSyntax"] +git-tree-sha1 = "cc24849d7d685195215cd3c38ae3bc5a57b6deaf" +uuid = "76b0de8b-5c4b-48ef-a724-914b33ca988d" +version = "1.1.0" + +[[deps.TranscodingStreams]] +git-tree-sha1 = "0c45878dcfdcfa8480052b6ab162cdd138781742" +uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" +version = "0.11.3" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +version = "1.11.0" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" +version = "1.11.0" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.2.13+1" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.59.0+0" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+2" From 3f8665ac4913d7a725d43f843f5b3bb8de8e0a3c Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:50:13 -0800 Subject: [PATCH 16/24] Add environments --- .../environments/fallback/Manifest.toml | 201 ++++++++++++++++++ .../environments/fallback/Project.toml | 5 + .../environments/v1.0/Manifest.toml | 79 +++++++ .../environments/v1.0/Project.toml | 2 + .../environments/v1.1/Manifest.toml | 74 +++++++ .../environments/v1.1/Project.toml | 2 + .../environments/v1.10/Manifest.toml | 154 ++++++++++++++ .../environments/v1.10/Project.toml | 2 + .../environments/v1.11/Manifest.toml | 181 ++++++++++++++++ .../environments/v1.11/Project.toml | 2 + .../environments/v1.12/Manifest.toml | 191 +++++++++++++++++ .../environments/v1.12/Project.toml | 5 + .../environments/v1.2/Manifest.toml | 74 +++++++ .../environments/v1.2/Project.toml | 2 + .../environments/v1.3/Manifest.toml | 77 +++++++ .../environments/v1.3/Project.toml | 2 + .../environments/v1.4/Manifest.toml | 78 +++++++ .../environments/v1.4/Project.toml | 2 + .../environments/v1.5/Manifest.toml | 78 +++++++ .../environments/v1.5/Project.toml | 2 + .../environments/v1.6/Manifest.toml | 130 +++++++++++ .../environments/v1.6/Project.toml | 2 + .../environments/v1.7/Manifest.toml | 133 ++++++++++++ .../environments/v1.7/Project.toml | 2 + .../environments/v1.8/Manifest.toml | 149 +++++++++++++ .../environments/v1.8/Project.toml | 2 + .../environments/v1.9/Manifest.toml | 149 +++++++++++++ .../environments/v1.9/Project.toml | 2 + 28 files changed, 1782 insertions(+) create mode 100644 juliadynamicanalysisprocess/environments/fallback/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/fallback/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.0/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.0/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.1/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.1/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.10/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.10/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.11/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.11/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.12/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.12/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.2/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.2/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.3/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.3/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.4/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.4/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.5/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.5/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.6/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.6/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.7/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.7/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.8/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.8/Project.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.9/Manifest.toml create mode 100644 juliadynamicanalysisprocess/environments/v1.9/Project.toml diff --git a/juliadynamicanalysisprocess/environments/fallback/Manifest.toml b/juliadynamicanalysisprocess/environments/fallback/Manifest.toml new file mode 100644 index 0000000..9abc5e8 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/fallback/Manifest.toml @@ -0,0 +1,201 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.14.0-DEV" +manifest_format = "2.1" +project_hash = "6021ff4916269ecda073eb2b70bf5c2084bb7929" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.2" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" +version = "1.11.0" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +version = "1.11.0" + +[[deps.CompilerSupportLibraries_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" +version = "1.3.0+1" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +version = "1.11.0" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" +version = "1.11.0" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.7.0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" +version = "1.11.0" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +version = "1.11.0" + +[[deps.JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[deps.JuliaSyntaxHighlighting]] +deps = ["StyledStrings"] +uuid = "ac6e5ff7-fb65-4e79-a425-ec3bc9c03011" +version = "1.12.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "1.0.0" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "LibSSH2_jll", "Libdl", "OpenSSL_jll", "Zlib_jll", "Zstd_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "8.17.0+0" + +[[deps.LibGit2]] +deps = ["LibGit2_jll", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +version = "1.11.0" + +[[deps.LibGit2_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "LibSSH2_jll", "Libdl", "OpenSSL_jll", "PCRE2_jll", "Zlib_jll"] +uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5" +version = "1.9.1+0" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl", "OpenSSL_jll", "Zlib_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.3+1" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" +version = "1.11.0" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +version = "1.11.0" + +[[deps.Markdown]] +deps = ["Base64", "JuliaSyntaxHighlighting", "StyledStrings"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +version = "1.11.0" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" +version = "1.11.0" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2025.11.4" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.3.0" + +[[deps.OpenSSL_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +version = "3.5.4+0" + +[[deps.PCRE2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "efcefdf7-47ab-520b-bdef-62a2eaa19f15" +version = "10.47.0+0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "Random", "SHA", "TOML", "Tar", "UUIDs", "Zstd_jll", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.13.0" +weakdeps = ["REPL"] + + [deps.Pkg.extensions] + REPLExt = "REPL" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" +version = "1.11.0" + +[[deps.REPL]] +deps = ["Dates", "FileWatching", "InteractiveUtils", "JuliaSyntaxHighlighting", "Markdown", "Sockets", "StyledStrings", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" +version = "1.11.0" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +version = "1.11.0" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "1.0.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +version = "1.11.0" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +version = "1.11.0" + +[[deps.StyledStrings]] +uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b" +version = "1.11.0" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +version = "1.11.0" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +version = "1.11.0" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" +version = "1.11.0" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.3.1+2" + +[[deps.Zstd_jll]] +deps = ["CompilerSupportLibraries_jll", "Libdl"] +uuid = "3161d3a3-bdf6-5164-811a-617609db77b4" +version = "1.5.7+1" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.68.0+1" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.7.0+0" diff --git a/juliadynamicanalysisprocess/environments/fallback/Project.toml b/juliadynamicanalysisprocess/environments/fallback/Project.toml new file mode 100644 index 0000000..174e86d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/fallback/Project.toml @@ -0,0 +1,5 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" + +[sources] +JuliaDynamicAnalysisProcess = {path = "../../JuliaDynamicAnalysisProcess"} diff --git a/juliadynamicanalysisprocess/environments/v1.0/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.0/Manifest.toml new file mode 100644 index 0000000..f4ddd84 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.0/Manifest.toml @@ -0,0 +1,79 @@ +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[Distributed]] +deps = ["LinearAlgebra", "Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[InteractiveUtils]] +deps = ["LinearAlgebra", "Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[LibGit2]] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[LinearAlgebra]] +deps = ["Libdl"] +uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[Pkg]] +deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[Test]] +deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[UUIDs]] +deps = ["Random"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" diff --git a/juliadynamicanalysisprocess/environments/v1.0/Project.toml b/juliadynamicanalysisprocess/environments/v1.0/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.0/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.1/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.1/Manifest.toml new file mode 100644 index 0000000..f78cb7f --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.1/Manifest.toml @@ -0,0 +1,74 @@ +# This file is machine-generated - editing it directly is not advised + +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[LibGit2]] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[Pkg]] +deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[Test]] +deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" diff --git a/juliadynamicanalysisprocess/environments/v1.1/Project.toml b/juliadynamicanalysisprocess/environments/v1.1/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.1/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.10/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.10/Manifest.toml new file mode 100644 index 0000000..c7ca693 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.10/Manifest.toml @@ -0,0 +1,154 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.10.10" +manifest_format = "2.0" +project_hash = "53a1a45582b1157821940ccc09e27a8d1066029f" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.1" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[deps.JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.4" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "8.4.0+0" + +[[deps.LibGit2]] +deps = ["Base64", "LibGit2_jll", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[deps.LibGit2_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll"] +uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5" +version = "1.6.4+0" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.0+1" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.2+1" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2023.1.10" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.10.0" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.2.13+1" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.52.0+1" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+2" diff --git a/juliadynamicanalysisprocess/environments/v1.10/Project.toml b/juliadynamicanalysisprocess/environments/v1.10/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.10/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.11/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.11/Manifest.toml new file mode 100644 index 0000000..87f062c --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.11/Manifest.toml @@ -0,0 +1,181 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.11.7" +manifest_format = "2.0" +project_hash = "53a1a45582b1157821940ccc09e27a8d1066029f" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.2" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" +version = "1.11.0" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +version = "1.11.0" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +version = "1.11.0" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" +version = "1.11.0" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" +version = "1.11.0" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +version = "1.11.0" + +[[deps.JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.4" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "8.6.0+0" + +[[deps.LibGit2]] +deps = ["Base64", "LibGit2_jll", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +version = "1.11.0" + +[[deps.LibGit2_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll"] +uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5" +version = "1.7.2+0" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.0+1" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" +version = "1.11.0" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +version = "1.11.0" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +version = "1.11.0" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.6+0" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" +version = "1.11.0" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2023.12.12" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "Random", "SHA", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.11.0" +weakdeps = ["REPL"] + + [deps.Pkg.extensions] + REPLExt = "REPL" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" +version = "1.11.0" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "StyledStrings", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" +version = "1.11.0" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +version = "1.11.0" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +version = "1.11.0" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +version = "1.11.0" + +[[deps.StyledStrings]] +uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b" +version = "1.11.0" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +version = "1.11.0" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +version = "1.11.0" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" +version = "1.11.0" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.2.13+1" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.59.0+0" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+2" diff --git a/juliadynamicanalysisprocess/environments/v1.11/Project.toml b/juliadynamicanalysisprocess/environments/v1.11/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.11/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.12/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.12/Manifest.toml new file mode 100644 index 0000000..f455c36 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.12/Manifest.toml @@ -0,0 +1,191 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.12.2" +manifest_format = "2.0" +project_hash = "6021ff4916269ecda073eb2b70bf5c2084bb7929" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.2" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" +version = "1.11.0" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +version = "1.11.0" + +[[deps.CompilerSupportLibraries_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" +version = "1.3.0+1" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +version = "1.11.0" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" +version = "1.11.0" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.7.0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" +version = "1.11.0" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +version = "1.11.0" + +[[deps.JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[deps.JuliaSyntaxHighlighting]] +deps = ["StyledStrings"] +uuid = "ac6e5ff7-fb65-4e79-a425-ec3bc9c03011" +version = "1.12.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.4" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "OpenSSL_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "8.15.0+0" + +[[deps.LibGit2]] +deps = ["LibGit2_jll", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +version = "1.11.0" + +[[deps.LibGit2_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "OpenSSL_jll"] +uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5" +version = "1.9.0+0" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "OpenSSL_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.3+1" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" +version = "1.11.0" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +version = "1.11.0" + +[[deps.Markdown]] +deps = ["Base64", "JuliaSyntaxHighlighting", "StyledStrings"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +version = "1.11.0" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" +version = "1.11.0" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2025.5.20" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.3.0" + +[[deps.OpenSSL_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" +version = "3.5.4+0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "Random", "SHA", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.12.0" +weakdeps = ["REPL"] + + [deps.Pkg.extensions] + REPLExt = "REPL" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" +version = "1.11.0" + +[[deps.REPL]] +deps = ["InteractiveUtils", "JuliaSyntaxHighlighting", "Markdown", "Sockets", "StyledStrings", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" +version = "1.11.0" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +version = "1.11.0" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +version = "1.11.0" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +version = "1.11.0" + +[[deps.StyledStrings]] +uuid = "f489334b-da3d-4c2e-b8f0-e476e12c162b" +version = "1.11.0" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +version = "1.11.0" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" +version = "1.11.0" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" +version = "1.11.0" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.3.1+2" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.64.0+1" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.7.0+0" diff --git a/juliadynamicanalysisprocess/environments/v1.12/Project.toml b/juliadynamicanalysisprocess/environments/v1.12/Project.toml new file mode 100644 index 0000000..e27a75d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.12/Project.toml @@ -0,0 +1,5 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" + +[sources] +JuliaDynamicAnalysisProcess = {path = "..\\..\\JuliaDynamicAnalysisProcess"} diff --git a/juliadynamicanalysisprocess/environments/v1.2/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.2/Manifest.toml new file mode 100644 index 0000000..f78cb7f --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.2/Manifest.toml @@ -0,0 +1,74 @@ +# This file is machine-generated - editing it directly is not advised + +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[LibGit2]] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[Pkg]] +deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[Test]] +deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" diff --git a/juliadynamicanalysisprocess/environments/v1.2/Project.toml b/juliadynamicanalysisprocess/environments/v1.2/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.2/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.3/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.3/Manifest.toml new file mode 100644 index 0000000..100e571 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.3/Manifest.toml @@ -0,0 +1,77 @@ +# This file is machine-generated - editing it directly is not advised + +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[LibGit2]] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[Pkg]] +deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Test", "UUIDs"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[Test]] +deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" diff --git a/juliadynamicanalysisprocess/environments/v1.3/Project.toml b/juliadynamicanalysisprocess/environments/v1.3/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.3/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.4/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.4/Manifest.toml new file mode 100644 index 0000000..bbeb099 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.4/Manifest.toml @@ -0,0 +1,78 @@ +# This file is machine-generated - editing it directly is not advised + +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[LibGit2]] +deps = ["Printf"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[Pkg]] +deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[Test]] +deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" diff --git a/juliadynamicanalysisprocess/environments/v1.4/Project.toml b/juliadynamicanalysisprocess/environments/v1.4/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.4/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.5/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.5/Manifest.toml new file mode 100644 index 0000000..bbeb099 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.5/Manifest.toml @@ -0,0 +1,78 @@ +# This file is machine-generated - editing it directly is not advised + +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[LibGit2]] +deps = ["Printf"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[Pkg]] +deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[Test]] +deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" diff --git a/juliadynamicanalysisprocess/environments/v1.5/Project.toml b/juliadynamicanalysisprocess/environments/v1.5/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.5/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.6/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.6/Manifest.toml new file mode 100644 index 0000000..b60d2c3 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.6/Manifest.toml @@ -0,0 +1,130 @@ +# This file is machine-generated - editing it directly is not advised + +[[ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" + +[[Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + +[[Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[Downloads]] +deps = ["ArgTools", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" + +[[FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" + +[[LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" + +[[LibGit2]] +deps = ["Base64", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" + +[[Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" + +[[Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" + +[[NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" + +[[Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[Random]] +deps = ["Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" + +[[Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" + +[[Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" + +[[nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" + +[[p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" diff --git a/juliadynamicanalysisprocess/environments/v1.6/Project.toml b/juliadynamicanalysisprocess/environments/v1.6/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.6/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.7/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.7/Manifest.toml new file mode 100644 index 0000000..dd17beb --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.7/Manifest.toml @@ -0,0 +1,133 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.7.3" +manifest_format = "2.0" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[deps.JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" + +[[deps.LibGit2]] +deps = ["Base64", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[deps.Random]] +deps = ["SHA", "Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" diff --git a/juliadynamicanalysisprocess/environments/v1.7/Project.toml b/juliadynamicanalysisprocess/environments/v1.7/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.7/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.8/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.8/Manifest.toml new file mode 100644 index 0000000..98ec150 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.8/Manifest.toml @@ -0,0 +1,149 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.8.5" +manifest_format = "2.0" +project_hash = "53a1a45582b1157821940ccc09e27a8d1066029f" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.1" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[deps.JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.3" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "7.84.0+0" + +[[deps.LibGit2]] +deps = ["Base64", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.10.2+0" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.0+0" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2022.2.1" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.8.0" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[deps.Random]] +deps = ["SHA", "Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.0" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.1" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.2.12+3" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.48.0+0" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+0" diff --git a/juliadynamicanalysisprocess/environments/v1.8/Project.toml b/juliadynamicanalysisprocess/environments/v1.8/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.8/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" diff --git a/juliadynamicanalysisprocess/environments/v1.9/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.9/Manifest.toml new file mode 100644 index 0000000..2f6b061 --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.9/Manifest.toml @@ -0,0 +1,149 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.9.4" +manifest_format = "2.0" +project_hash = "53a1a45582b1157821940ccc09e27a8d1066029f" + +[[deps.ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.1" + +[[deps.Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" + +[[deps.Dates]] +deps = ["Printf"] +uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" + +[[deps.Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + +[[deps.Downloads]] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" + +[[deps.JuliaDynamicAnalysisProcess]] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +path = "../../JuliaDynamicAnalysisProcess" +uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" +version = "1.0.0" + +[[deps.LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.4" + +[[deps.LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "8.4.0+0" + +[[deps.LibGit2]] +deps = ["Base64", "NetworkOptions", "Printf", "SHA"] +uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" + +[[deps.LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.11.0+1" + +[[deps.Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" + +[[deps.MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.2+0" + +[[deps.Mmap]] +uuid = "a63ad114-7e13-5084-954f-fe012c677804" + +[[deps.MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2022.10.11" + +[[deps.NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" + +[[deps.Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.9.2" + +[[deps.Printf]] +deps = ["Unicode"] +uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" + +[[deps.REPL]] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] +uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" + +[[deps.Random]] +deps = ["SHA", "Serialization"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" + +[[deps.Sockets]] +uuid = "6462fe0b-24de-5631-8697-dd941f90decc" + +[[deps.TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.3" + +[[deps.Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" + +[[deps.UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + +[[deps.Unicode]] +uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[deps.Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" +version = "1.2.13+0" + +[[deps.nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.52.0+1" + +[[deps.p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+0" diff --git a/juliadynamicanalysisprocess/environments/v1.9/Project.toml b/juliadynamicanalysisprocess/environments/v1.9/Project.toml new file mode 100644 index 0000000..d58e04d --- /dev/null +++ b/juliadynamicanalysisprocess/environments/v1.9/Project.toml @@ -0,0 +1,2 @@ +[deps] +JuliaDynamicAnalysisProcess = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" From ac372081caec449e4984ec5a0aa47a1b034119cc Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 13:50:23 -0800 Subject: [PATCH 17/24] Progress on jdap --- .../src/JuliaDynamicAnalysisProcess.jl | 36 +++++ .../src/pkg_imports.jl | 9 ++ .../julia_dynamic_analysis_process_main.jl | 43 ++++++ ...julia_dynamic_analysis_process_protocol.jl | 136 ++++++++++++++++++ 4 files changed, 224 insertions(+) create mode 100644 juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/pkg_imports.jl create mode 100644 juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl create mode 100644 shared/julia_dynamic_analysis_process_protocol.jl diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl index b39f4c5..ed91d65 100644 --- a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl @@ -1,2 +1,38 @@ module JuliaDynamicAnalysisProcess + +include("pkg_imports.jl") +include("../../../shared/julia_dynamic_analysis_process_protocol.jl") + +JSONRPC.@message_dispatcher dispatch_msg begin + TestItemServerProtocol.testserver_revise_request_type => revise_request + TestItemServerProtocol.testserver_activate_env_request_type => activate_env_request + TestItemServerProtocol.configure_testrun_request_type => configure_test_run_request + TestItemServerProtocol.testserver_run_testitems_batch_request_type => run_testitems_batch_request + TestItemServerProtocol.testserver_steal_testitems_request_type => steal_testitems_request + TestItemServerProtocol.testserver_shutdown_request_type => shutdown_request +end + +function serve(pipename, error_handler=nothing) + conn = Sockets.connect(pipename) + + endpoint = JSONRPC.JSONRPCEndpoint(conn, conn) + + run(endpoint) + + while true + msg = JSONRPC.get_next_message(endpoint) + + if msg.method == "testserver/shutdown" + dispatch_msg(endpoint, msg, state) + break + else + @async try + dispatch_msg(endpoint, msg, state) + catch err + Base.display_error(err, catch_backtrace()) + end + end + end +end + end diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/pkg_imports.jl b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/pkg_imports.jl new file mode 100644 index 0000000..8ee2318 --- /dev/null +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/pkg_imports.jl @@ -0,0 +1,9 @@ +include("../../../packages/JSON/src/JSON.jl") +include("../../../packages/CancellationTokens/src/CancellationTokens.jl") + +module JSONRPC +import ..CancellationTokens +import ..JSON +import UUIDs +include("../../../packages/JSONRPC/src/packagedef.jl") +end diff --git a/juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl b/juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl new file mode 100644 index 0000000..e110ea1 --- /dev/null +++ b/juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl @@ -0,0 +1,43 @@ +@info "Julia dynamic analysis process launching" + +import Pkg +version_specific_env_path = joinpath(@__DIR__, "../environments", "v$(VERSION.major).$(VERSION.minor)") +if isdir(version_specific_env_path) + @static if VERSION >= v"1.6" + Pkg.activate(version_specific_env_path, io=devnull) + else + Pkg.activate(version_specific_env_path) + end +else + @static if VERSION >= v"1.6" + Pkg.activate(joinpath(@__DIR__, "../environments", "fallback"), io=devnull) + else + Pkg.activate(joinpath(@__DIR__, "../environments", "fallback")) + end +end + +let + has_error_handler = false + + try + + if length(ARGS) > 2 + include(ARGS[3]) + has_error_handler = true + end + + using JuliaDynamicAnalysisProcess + + JuliaDynamicAnalysisProcess.serve( + ARGS[1], + ARGS[2], + has_error_handler ? (err, bt) -> global_err_handler(err, bt, Base.ARGS[4], "Julia Dynamic Analysis Process") : nothing) + catch err + bt = catch_backtrace() + if has_error_handler + global_err_handler(err, bt, Base.ARGS[4], "Julia Dynamic Analysis Process") + else + Base.display_error(err, bt) + end + end +end diff --git a/shared/julia_dynamic_analysis_process_protocol.jl b/shared/julia_dynamic_analysis_process_protocol.jl new file mode 100644 index 0000000..869469e --- /dev/null +++ b/shared/julia_dynamic_analysis_process_protocol.jl @@ -0,0 +1,136 @@ +module JuliaDynamicAnalysisProtocol + +import ..JSONRPC +import ..JSONRPC.JSON + +using ..JSONRPC: @dict_readable, RequestType, NotificationType, Outbound + +@dict_readable struct Position <: JSONRPC.Outbound + line::Int + character::Int +end + +struct Range + start::Position + stop::Position +end +function Range(d::Dict) + Range(Position(d["start"]), Position(d["end"])) +end +function JSON.lower(a::Range) + Dict("start" => a.start, "end" => a.stop) +end + +@dict_readable struct Location <: JSONRPC.Outbound + uri::String + position::Position +end + +@dict_readable struct TestMessage <: JSONRPC.Outbound + message::String + expectedOutput::Union{String,Missing} + actualOutput::Union{String,Missing} + location::Location +end + +TestMessage(message, location) = TestMessage(message, missing, missing, location) + +@dict_readable struct RunTestItem <: JSONRPC.Outbound + id::String + uri::String + name::String + packageName::String + packageUri::String + useDefaultUsings::Bool + testSetups::Vector{String} + line::Int + column::Int + code::String +end + +struct FileCoverage <: JSONRPC.Outbound + uri::String + coverage::Vector{Union{Int,Nothing}} +end + +function FileCoverage(d::Dict) + return FileCoverage( + d["uri"], + Union{Int,Nothing}[i for i in d["coverage"]] + ) +end + +@dict_readable struct TestsetupDetails <: JSONRPC.Outbound + packageUri::String + name::String + kind::String + uri::String + line::Int + column::Int + code::String +end + +@dict_readable struct ConfigureTestRunRequestParams <: JSONRPC.Outbound + mode::String + coverageRootUris::Union{Missing,Vector{String}} + testSetups::Union{Missing,Vector{TestsetupDetails}} +end + +@dict_readable struct RunTestItemsRequestParams <: JSONRPC.Outbound + mode::String + coverageRootUris::Union{Vector{String},Missing} + testItems::Vector{RunTestItem} +end + +@dict_readable struct StealTestItemsRequestParams <: JSONRPC.Outbound + testItemIds::Vector{String} +end + +@dict_readable struct ActivateEnvParams <: JSONRPC.Outbound + projectUri::Union{Missing,String} + packageUri::String + packageName::String +end + +@dict_readable struct StartedParams <: JSONRPC.Outbound + testItemId::String +end + +@dict_readable struct PassedParams <: JSONRPC.Outbound + testItemId::String + duration::Float64 + coverage::Union{Missing,Vector{FileCoverage}} +end + +@dict_readable struct ErroredParams <: JSONRPC.Outbound + testItemId::String + messages::Vector{TestMessage} + duration::Union{Float64,Missing} +end + +@dict_readable struct FailedParams <: JSONRPC.Outbound + testItemId::String + messages::Vector{TestMessage} + duration::Union{Float64,Missing} +end + +@dict_readable struct SkippedStolenParams <: JSONRPC.Outbound + testItemId::String +end + +# Messages from the controller to the test process +const testserver_revise_request_type = JSONRPC.RequestType("testserver/revise", Nothing, String) +const testserver_activate_env_request_type = JSONRPC.RequestType("activateEnv", ActivateEnvParams, Nothing) +const configure_testrun_request_type = JSONRPC.RequestType("testserver/ConfigureTestRun", ConfigureTestRunRequestParams, Nothing) +const testserver_run_testitems_batch_request_type = JSONRPC.RequestType("testserver/runTestItems", RunTestItemsRequestParams, Nothing) +const testserver_steal_testitems_request_type = JSONRPC.RequestType("testserver/stealTestItems", StealTestItemsRequestParams, Nothing) +const testserver_shutdown_request_type = JSONRPC.RequestType("testserver/shutdown", Nothing, Nothing) + +# Messages from the test process to the controller +const started_notification_type = JSONRPC.NotificationType("started", StartedParams) +const passed_notification_type = JSONRPC.NotificationType("passed", PassedParams) +const errored_notification_type = JSONRPC.NotificationType("errored", ErroredParams) +const failed_notification_type = JSONRPC.NotificationType("failed", FailedParams) +const skipped_stolen_notification_type = JSONRPC.NotificationType("skippedStolen", SkippedStolenParams) + +end From d9c7e1c1654eba24c82827cbeaf169fe5e333837 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 15:48:26 -0800 Subject: [PATCH 18/24] Progress --- Project.toml | 2 + .../JuliaDynamicAnalysisProcess/Project.toml | 1 + .../src/JuliaDynamicAnalysisProcess.jl | 28 +- .../src/symbolserver.jl | 117 +++++++++ .../julia_dynamic_analysis_process_main.jl | 23 +- ...julia_dynamic_analysis_process_protocol.jl | 146 ++--------- .../symbolserver}/faketypes.jl | 0 .../symbolserver}/serialize.jl | 0 .../symbolserver}/symbols.jl | 0 .../symbolserver}/utils.jl | 8 +- src/JuliaWorkspaces.jl | 2 +- src/SymbolServer/SymbolServer.jl | 22 +- src/dynamic_feature.jl | 239 +++++++++++++++++- src/fileio.jl | 6 +- src/types.jl | 2 +- 15 files changed, 423 insertions(+), 173 deletions(-) create mode 100644 juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl rename {src/SymbolServer => shared/symbolserver}/faketypes.jl (100%) rename {src/SymbolServer => shared/symbolserver}/serialize.jl (100%) rename {src/SymbolServer => shared/symbolserver}/symbols.jl (100%) rename {src/SymbolServer => shared/symbolserver}/utils.jl (98%) diff --git a/Project.toml b/Project.toml index de44da3..be204b6 100644 --- a/Project.toml +++ b/Project.toml @@ -17,6 +17,7 @@ Sockets = "6462fe0b-24de-5631-8697-dd941f90decc" REPL = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" LibGit2 = "76f85450-5226-5b5a-8eaa-529ad045b433" InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +JSONRPC = "b9b8584e-8fd3-41f9-ad0c-7255d428e418" [extras] TestItemRunner = "f8b46487-2199-4994-9208-9a1283c18c0a" @@ -35,6 +36,7 @@ Sockets = "<0.0.1, 1" REPL = "<0.0.1, 1" LibGit2 = "<0.0.1, 1" InteractiveUtils = "<0.0.1, 1" +JSONRPC = "2" [targets] test = ["Test", "TestItemRunner"] diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml index 12ea29a..c53f5a2 100644 --- a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/Project.toml @@ -19,6 +19,7 @@ UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" Unicode = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" Sockets = "6462fe0b-24de-5631-8697-dd941f90decc" Logging = "56ddb016-857b-54e1-b83d-db4d58db5568" +SHA = "ea8e919c-243c-51af-8825-aaa63cd721ce" [compat] julia = "1" diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl index ed91d65..900e974 100644 --- a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl @@ -2,35 +2,35 @@ module JuliaDynamicAnalysisProcess include("pkg_imports.jl") include("../../../shared/julia_dynamic_analysis_process_protocol.jl") +include("symbolserver.jl") + +struct JuliaDynamicAnalysisProcessState +end + +function get_store_request(params::JuliaDynamicAnalysisProtocol.GetStoreParams, state::JuliaDynamicAnalysisProcessState, token) + Pkg.activate(uri2filepath(params.projectUri)) + + SymbolServer.get_store(params.storePath, nothing) +end JSONRPC.@message_dispatcher dispatch_msg begin - TestItemServerProtocol.testserver_revise_request_type => revise_request - TestItemServerProtocol.testserver_activate_env_request_type => activate_env_request - TestItemServerProtocol.configure_testrun_request_type => configure_test_run_request - TestItemServerProtocol.testserver_run_testitems_batch_request_type => run_testitems_batch_request - TestItemServerProtocol.testserver_steal_testitems_request_type => steal_testitems_request - TestItemServerProtocol.testserver_shutdown_request_type => shutdown_request + JuliaDynamicAnalysisProtocol.get_store_request_type => get_store_request end function serve(pipename, error_handler=nothing) conn = Sockets.connect(pipename) endpoint = JSONRPC.JSONRPCEndpoint(conn, conn) - run(endpoint) + state = JuliaDynamicAnalysisProcessState() + while true msg = JSONRPC.get_next_message(endpoint) + dispatch_msg(endpoint, msg, state) if msg.method == "testserver/shutdown" - dispatch_msg(endpoint, msg, state) break - else - @async try - dispatch_msg(endpoint, msg, state) - catch err - Base.display_error(err, catch_backtrace()) - end end end end diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl new file mode 100644 index 0000000..c15b7fb --- /dev/null +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl @@ -0,0 +1,117 @@ +module SymbolServer + +# !in("@stdlib", LOAD_PATH) && push!(LOAD_PATH, "@stdlib") # Make sure we can load stdlibs + +start_time = time_ns() + + +module LoadingBay +end + +using Pkg, SHA +using Base: UUID + +include("../../../shared/symbolserver/faketypes.jl") +include("../../../shared/symbolserver/symbols.jl") +include("../../../shared/symbolserver/utils.jl") +include("../../../shared/symbolserver/serialize.jl") +using .CacheStore + +# Add some methods to check whether a package is part of the standard library and so +# won't need recaching. +@static if isdefined(Pkg.Types, :is_stdlib) + is_stdlib(uuid::UUID) = Pkg.Types.is_stdlib(uuid) +else + is_stdlib(uuid::UUID) = uuid in keys(ctx.stdlibs) +end + +function get_store(store_path::String, progress_callback) + ctx = try + Pkg.Types.Context() + catch err + @info "Package environment can't be read." + exit() + end + + server = Server(store_path, ctx, Dict{UUID,Package}()) + + written_caches = String[] # List of caches that have already been written + toplevel_pkgs = deps(project(ctx)) # First get a list of all package UUIds that we want to cache + packages_to_load = [] + + # Obtain the directory containing the active Manifest.toml. Any 'develop'ed dependencies + # will contain a path that is relative to this directory. + manifest_dir = dirname(ctx.env.manifest_file) + + # Next make sure the cache is up-to-date for all of these. + for (pk_name, uuid) in toplevel_pkgs + uuid isa UUID || (uuid = UUID(uuid)) + if !isinmanifest(ctx, uuid) + @info "$pk_name not in manifest, skipping." + continue + end + pe = frommanifest(manifest(ctx), uuid) + cache_path = joinpath(server.storedir, SymbolServer.get_cache_path(manifest(ctx), uuid)...) + + if isfile(cache_path) + if is_package_deved(manifest(ctx), uuid) + try + cached_version = open(cache_path) do io + CacheStore.read(io) + end + if sha_pkg(manifest_dir, frommanifest(manifest(ctx), uuid)) != cached_version.sha + @info "Outdated sha, will recache package $pk_name ($uuid)" + push!(packages_to_load, uuid) + else + @info "Package $pk_name ($uuid) is cached." + end + catch err + @info "Couldn't load $pk_name ($uuid) from file, will recache." + end + else + @info "Package $pk_name ($uuid) is cached." + end + else + @info "Will cache package $pk_name ($uuid)" + push!(packages_to_load, uuid) + end + end + + # Load all packages together + # This is important, or methods added to functions in other packages that are loaded earlier would not be in the cache + for (i, uuid) in enumerate(packages_to_load) + load_package(ctx, uuid, progress_callback, LoadingBay, round(Int, 100*(i - 1)/length(packages_to_load))) + end + + # Create image of whole package env. This creates the module structure only. + env_symbols = getenvtree() + + # Populate the above with symbols, skipping modules that don't need caching. + # symbols (env_symbols) + visited = Base.IdSet{Module}([Base, Core]) + + for (pid, m) in Base.loaded_modules + if pid.uuid !== nothing && is_stdlib(pid.uuid) && + isinmanifest(ctx, pid.uuid) && + isfile(joinpath(server.storedir, SymbolServer.get_cache_path(manifest(ctx), pid.uuid)...)) + push!(visited, m) + delete!(env_symbols, Symbol(pid.name)) + end + end + + symbols(env_symbols, nothing, getallns(), visited) + + # Wrap the `ModuleStore`s as `Package`s. + for (pkg_name, cache) in env_symbols + !isinmanifest(ctx, String(pkg_name)) && continue + uuid = packageuuid(ctx, String(pkg_name)) + pe = frommanifest(ctx, uuid) + server.depot[uuid] = Package(String(pkg_name), cache, uuid, sha_pkg(manifest_dir, pe)) + end + + write_depot(server, server.context, written_caches) + + @info "Symbol server indexing took $((time_ns() - start_time) / 1e9) seconds." +end + +end diff --git a/juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl b/juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl index e110ea1..040eb38 100644 --- a/juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl +++ b/juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl @@ -17,12 +17,26 @@ else end let + # Try to lower the priority of this process so that it doesn't block the + # user system. + @static if Sys.iswindows() + # Get process handle + p_handle = ccall(:GetCurrentProcess, stdcall, Ptr{Cvoid}, ()) + + # Set BELOW_NORMAL_PRIORITY_CLASS + ret = ccall(:SetPriorityClass, stdcall, Cint, (Ptr{Cvoid}, Culong), p_handle, 0x00004000) + ret != 1 && @warn "Something went wrong when setting BELOW_NORMAL_PRIORITY_CLASS." + else + ret = ccall(:nice, Cint, (Cint,), 1) + # We don't check the return value because it doesn't really matter + end + has_error_handler = false try - if length(ARGS) > 2 - include(ARGS[3]) + if length(ARGS) > 1 + include(ARGS[2]) has_error_handler = true end @@ -30,12 +44,11 @@ let JuliaDynamicAnalysisProcess.serve( ARGS[1], - ARGS[2], - has_error_handler ? (err, bt) -> global_err_handler(err, bt, Base.ARGS[4], "Julia Dynamic Analysis Process") : nothing) + has_error_handler ? (err, bt) -> global_err_handler(err, bt, Base.ARGS[3], "Julia Dynamic Analysis Process") : nothing) catch err bt = catch_backtrace() if has_error_handler - global_err_handler(err, bt, Base.ARGS[4], "Julia Dynamic Analysis Process") + global_err_handler(err, bt, Base.ARGS[3], "Julia Dynamic Analysis Process") else Base.display_error(err, bt) end diff --git a/shared/julia_dynamic_analysis_process_protocol.jl b/shared/julia_dynamic_analysis_process_protocol.jl index 869469e..fed40dd 100644 --- a/shared/julia_dynamic_analysis_process_protocol.jl +++ b/shared/julia_dynamic_analysis_process_protocol.jl @@ -5,132 +5,24 @@ import ..JSONRPC.JSON using ..JSONRPC: @dict_readable, RequestType, NotificationType, Outbound -@dict_readable struct Position <: JSONRPC.Outbound - line::Int - character::Int -end - -struct Range - start::Position - stop::Position -end -function Range(d::Dict) - Range(Position(d["start"]), Position(d["end"])) -end -function JSON.lower(a::Range) - Dict("start" => a.start, "end" => a.stop) -end - -@dict_readable struct Location <: JSONRPC.Outbound - uri::String - position::Position -end - -@dict_readable struct TestMessage <: JSONRPC.Outbound - message::String - expectedOutput::Union{String,Missing} - actualOutput::Union{String,Missing} - location::Location -end - -TestMessage(message, location) = TestMessage(message, missing, missing, location) - -@dict_readable struct RunTestItem <: JSONRPC.Outbound - id::String - uri::String - name::String - packageName::String - packageUri::String - useDefaultUsings::Bool - testSetups::Vector{String} - line::Int - column::Int - code::String -end - -struct FileCoverage <: JSONRPC.Outbound - uri::String - coverage::Vector{Union{Int,Nothing}} -end - -function FileCoverage(d::Dict) - return FileCoverage( - d["uri"], - Union{Int,Nothing}[i for i in d["coverage"]] - ) -end - -@dict_readable struct TestsetupDetails <: JSONRPC.Outbound - packageUri::String - name::String - kind::String - uri::String - line::Int - column::Int - code::String -end - -@dict_readable struct ConfigureTestRunRequestParams <: JSONRPC.Outbound - mode::String - coverageRootUris::Union{Missing,Vector{String}} - testSetups::Union{Missing,Vector{TestsetupDetails}} -end - -@dict_readable struct RunTestItemsRequestParams <: JSONRPC.Outbound - mode::String - coverageRootUris::Union{Vector{String},Missing} - testItems::Vector{RunTestItem} -end - -@dict_readable struct StealTestItemsRequestParams <: JSONRPC.Outbound - testItemIds::Vector{String} -end - -@dict_readable struct ActivateEnvParams <: JSONRPC.Outbound - projectUri::Union{Missing,String} - packageUri::String - packageName::String -end - -@dict_readable struct StartedParams <: JSONRPC.Outbound - testItemId::String -end - -@dict_readable struct PassedParams <: JSONRPC.Outbound - testItemId::String - duration::Float64 - coverage::Union{Missing,Vector{FileCoverage}} -end - -@dict_readable struct ErroredParams <: JSONRPC.Outbound - testItemId::String - messages::Vector{TestMessage} - duration::Union{Float64,Missing} -end - -@dict_readable struct FailedParams <: JSONRPC.Outbound - testItemId::String - messages::Vector{TestMessage} - duration::Union{Float64,Missing} -end - -@dict_readable struct SkippedStolenParams <: JSONRPC.Outbound - testItemId::String -end - -# Messages from the controller to the test process -const testserver_revise_request_type = JSONRPC.RequestType("testserver/revise", Nothing, String) -const testserver_activate_env_request_type = JSONRPC.RequestType("activateEnv", ActivateEnvParams, Nothing) -const configure_testrun_request_type = JSONRPC.RequestType("testserver/ConfigureTestRun", ConfigureTestRunRequestParams, Nothing) -const testserver_run_testitems_batch_request_type = JSONRPC.RequestType("testserver/runTestItems", RunTestItemsRequestParams, Nothing) -const testserver_steal_testitems_request_type = JSONRPC.RequestType("testserver/stealTestItems", StealTestItemsRequestParams, Nothing) -const testserver_shutdown_request_type = JSONRPC.RequestType("testserver/shutdown", Nothing, Nothing) - -# Messages from the test process to the controller -const started_notification_type = JSONRPC.NotificationType("started", StartedParams) -const passed_notification_type = JSONRPC.NotificationType("passed", PassedParams) -const errored_notification_type = JSONRPC.NotificationType("errored", ErroredParams) -const failed_notification_type = JSONRPC.NotificationType("failed", FailedParams) -const skipped_stolen_notification_type = JSONRPC.NotificationType("skippedStolen", SkippedStolenParams) +@dict_readable struct GetStoreParams <: JSONRPC.Outbound + projectUri::String + storePath::String +end + +# Messages to the dynamic analysis process +const get_store_request_type = JSONRPC.RequestType("juliadynamicanalysisprocess/getStore", GetStoreParams, Nothing) +# const testserver_activate_env_request_type = JSONRPC.RequestType("activateEnv", ActivateEnvParams, Nothing) +# const configure_testrun_request_type = JSONRPC.RequestType("testserver/ConfigureTestRun", ConfigureTestRunRequestParams, Nothing) +# const testserver_run_testitems_batch_request_type = JSONRPC.RequestType("testserver/runTestItems", RunTestItemsRequestParams, Nothing) +# const testserver_steal_testitems_request_type = JSONRPC.RequestType("testserver/stealTestItems", StealTestItemsRequestParams, Nothing) +# const testserver_shutdown_request_type = JSONRPC.RequestType("testserver/shutdown", Nothing, Nothing) + +# Messages from the dynamic analysis process +# const started_notification_type = JSONRPC.NotificationType("started", StartedParams) +# const passed_notification_type = JSONRPC.NotificationType("passed", PassedParams) +# const errored_notification_type = JSONRPC.NotificationType("errored", ErroredParams) +# const failed_notification_type = JSONRPC.NotificationType("failed", FailedParams) +# const skipped_stolen_notification_type = JSONRPC.NotificationType("skippedStolen", SkippedStolenParams) end diff --git a/src/SymbolServer/faketypes.jl b/shared/symbolserver/faketypes.jl similarity index 100% rename from src/SymbolServer/faketypes.jl rename to shared/symbolserver/faketypes.jl diff --git a/src/SymbolServer/serialize.jl b/shared/symbolserver/serialize.jl similarity index 100% rename from src/SymbolServer/serialize.jl rename to shared/symbolserver/serialize.jl diff --git a/src/SymbolServer/symbols.jl b/shared/symbolserver/symbols.jl similarity index 100% rename from src/SymbolServer/symbols.jl rename to shared/symbolserver/symbols.jl diff --git a/src/SymbolServer/utils.jl b/shared/symbolserver/utils.jl similarity index 98% rename from src/SymbolServer/utils.jl rename to shared/symbolserver/utils.jl index bdcb47a..853f8f3 100644 --- a/src/SymbolServer/utils.jl +++ b/shared/symbolserver/utils.jl @@ -636,20 +636,20 @@ function get_pkg_path(pkg::Base.PkgId, env, depot_path) return nothing end -function load_package(c::Pkg.Types.Context, uuid, conn, loadingbay, percentage = missing) +function load_package(c::Pkg.Types.Context, uuid, progress_callback, loadingbay, percentage = missing) isinmanifest(c, uuid isa String ? Base.UUID(uuid) : uuid) || return pe_name = packagename(c, uuid) pid = Base.PkgId(uuid isa String ? Base.UUID(uuid) : uuid, pe_name) if pid in keys(Base.loaded_modules) - conn !== nothing && println(conn, "PROCESSPKG;$pe_name;$uuid;noversion;$percentage") + progress_callback !== nothing && progress_callback(:PROCESSPKG, pe_name, uuid, :noversion, percentage) loadingbay.eval(:($(Symbol(pe_name)) = $(Base.loaded_modules[pid]))) m = Base.invokelatest(() -> getfield(loadingbay, Symbol(pe_name))) else m = try - conn !== nothing && println(conn, "STARTLOAD;$pe_name;$uuid;noversion;$percentage") + progress_callback !== nothing && progress_callback(:STARTLOAD, pe_name, uuid, :noversion, percentage) loadingbay.eval(:(import $(Symbol(pe_name)))) - conn !== nothing && println(conn, "STOPLOAD;$pe_name") + progress_callback !== nothing && progress_callback(:STOPLOAD, pe_name) m = Base.invokelatest(() -> getfield(loadingbay, Symbol(pe_name))) catch return diff --git a/src/JuliaWorkspaces.jl b/src/JuliaWorkspaces.jl index b64a24f..5319b01 100644 --- a/src/JuliaWorkspaces.jl +++ b/src/JuliaWorkspaces.jl @@ -1,6 +1,6 @@ module JuliaWorkspaces -import UUIDs, JuliaSyntax, TestItemDetection, CSTParser +import UUIDs, JuliaSyntax, TestItemDetection, CSTParser, JSONRPC, Sockets using UUIDs: UUID, uuid4 using JuliaSyntax: SyntaxNode using Salsa diff --git a/src/SymbolServer/SymbolServer.jl b/src/SymbolServer/SymbolServer.jl index 78bd0c5..286bf72 100644 --- a/src/SymbolServer/SymbolServer.jl +++ b/src/SymbolServer/SymbolServer.jl @@ -10,10 +10,10 @@ import Sockets, UUIDs # moved there using REPL -include("faketypes.jl") -include("symbols.jl") -include("utils.jl") -include("serialize.jl") +include("../../shared/symbolserver/faketypes.jl") +include("../../shared/symbolserver/symbols.jl") +include("../../shared/symbolserver/utils.jl") +include("../../shared/symbolserver/serialize.jl") using .CacheStore mutable struct SymbolServerInstance @@ -299,7 +299,7 @@ function pipe_name() return pipename end -function load_project_packages_into_store!(ssi::SymbolServerInstance, environment_path, store, progress_callback = nothing) +function load_project_packages_into_store!(store_path, depot_path, environment_path, store, progress_callback = nothing) project_filename = isfile(joinpath(environment_path, "JuliaProject.toml")) ? joinpath(environment_path, "JuliaProject.toml") : joinpath(environment_path, "Project.toml") project = try Pkg.API.read_project(project_filename) @@ -315,18 +315,18 @@ function load_project_packages_into_store!(ssi::SymbolServerInstance, environmen num_uuids = length(values(deps(project))) t0 = time() for (i, uuid) in enumerate(uuids) - load_package_from_cache_into_store!(ssi, uuid isa UUID ? uuid : UUID(uuid), environment_path, manifest, store, progress_callback, round(Int, 100 * (i - 1) / num_uuids)) + load_package_from_cache_into_store!(store_path, depot_path, uuid isa UUID ? uuid : UUID(uuid), environment_path, manifest, store, progress_callback, round(Int, 100 * (i - 1) / num_uuids)) end took = round(time() - t0, sigdigits = 2) progress_callback("Loaded all packages into cache in $(took)s", 100) end """ - load_package_from_cache_into_store!(ssp::SymbolServerInstance, uuid, store) + load_package_from_cache_into_store!(store_path, depot_path, uuid, store) Tries to load the on-disc stored cache for a package (uuid). Attempts to generate (and save to disc) a new cache if the file does not exist or is unopenable. """ -function load_package_from_cache_into_store!(ssi::SymbolServerInstance, uuid::UUID, environment_path, manifest, store, progress_callback = nothing, percentage = missing) +function load_package_from_cache_into_store!(store_path, depot_path, uuid::UUID, environment_path, manifest, store, progress_callback = nothing, percentage = missing) yield() isinmanifest(manifest, uuid) || return pe = frommanifest(manifest, uuid) @@ -335,7 +335,7 @@ function load_package_from_cache_into_store!(ssi::SymbolServerInstance, uuid::UU # further existence checks needed? - cache_path = joinpath(ssi.store_path, get_cache_path(manifest, uuid)...) + cache_path = joinpath(store_path, get_cache_path(manifest, uuid)...) if isfile(cache_path) t0 = time() progress_callback("Loading $pe_name from cache...", percentage) @@ -346,7 +346,7 @@ function load_package_from_cache_into_store!(ssi::SymbolServerInstance, uuid::UU pkg_path = Base.locate_package(Base.PkgId(uuid, pe_name)) if pkg_path === nothing || !isfile(pkg_path) - pkg_path = get_pkg_path(Base.PkgId(uuid, pe_name), environment_path, ssi.depot_path) + pkg_path = get_pkg_path(Base.PkgId(uuid, pe_name), environment_path, depot_path) end if pkg_path !== nothing modify_dirs(package_data.val, f -> modify_dir(f, r"^PLACEHOLDER", joinpath(pkg_path, "src"))) @@ -361,7 +361,7 @@ function load_package_from_cache_into_store!(ssi::SymbolServerInstance, uuid::UU progress_callback(msg, percentage) t0 = time() for dep in deps(pe) - load_package_from_cache_into_store!(ssi, packageuuid(dep), environment_path, manifest, store, progress_callback, percentage) + load_package_from_cache_into_store!(store_path, depot_path, packageuuid(dep), environment_path, manifest, store, progress_callback, percentage) end catch err Base.display_error(stderr, err, catch_backtrace()) diff --git a/src/dynamic_feature.jl b/src/dynamic_feature.jl index 40a5b30..0620086 100644 --- a/src/dynamic_feature.jl +++ b/src/dynamic_feature.jl @@ -1,16 +1,216 @@ -struct DynamicJuliaProcess - project::String +mutable struct DynamicJuliaProcess + project_uri::String proc::Union{Nothing, Base.Process} + endpoint::Union{Nothing, JSONRPC.JSONRPCEndpoint} - function DynamicJuliaProcess(project::String) + function DynamicJuliaProcess(project_uri::String) return new( - project, + project_uri, + nothing, nothing ) end end -function Base.start(djp::DynamicJuliaProcess) +function get_store(djp::DynamicJuliaProcess, store_path::String, depot_path) + @info "Calling get_store" store_path, depot_path + JSONRPC.send( + djp.endpoint, + JuliaDynamicAnalysisProtocol.get_store_request_type, + JuliaDynamicAnalysisProtocol.GetStoreParams( + projectUri = djp.project_uri, + storePath = store_path + ) + ) + + new_store = SymbolServer.recursive_copy(SymbolServer.stdlibs) + SymbolServer.load_project_packages_into_store!(store_path, depot_path, djp.project_uri, new_store, nothing) + + return new_store +end + +function start(djp::DynamicJuliaProcess) + pipe_name = JSONRPC.generate_pipe_name() + server = Sockets.listen(pipe_name) + + julia_dynamic_analysis_process_script = joinpath(@__DIR__, "../juliadynamicanalysisprocess/app/julia_dynamic_analysis_process_main.jl") + + # pipe_out = Pipe() + + # jlArgs = copy(env.juliaArgs) + + # if env.juliaNumThreads!==missing && env.juliaNumThreads == "auto" + # push!(jlArgs, "--threads=auto") + # end + + # jlEnv = copy(ENV) + + # for (k,v) in pairs(env.env) + # if v!==nothing + # jlEnv[k] = v + # elseif haskey(jlEnv, k) + # delete!(jlEnv, k) + # end + # end + + # if env.juliaNumThreads!==missing && env.juliaNumThreads!="auto" && env.juliaNumThreads!="" + # jlEnv["JULIA_NUM_THREADS"] = env.juliaNumThreads + # end + + error_handler_file = nothing + crash_reporting_pipename = nothing + + error_handler_file = error_handler_file === nothing ? [] : [error_handler_file] + crash_reporting_pipename = crash_reporting_pipename === nothing ? [] : [crash_reporting_pipename] + + @debug "Launch proc" + djp.proc = open( + pipeline( + Cmd(`julia --startup-file=no --history-file=no --depwarn=no $julia_dynamic_analysis_process_script $pipe_name $(error_handler_file...) $(crash_reporting_pipename...)`, detach=false), + # stdout = pipe_out, + # stderr = pipe_out + ) + ) + + # @async try + # begin_marker = "\x1f3805a0ad41b54562a46add40be31ca27" + # end_marker = "\x1f4031af828c3d406ca42e25628bb0aa77" + # buffer = "" + # current_output_testitem_id = nothing + # while !eof(pipe_out) + # data = readavailable(pipe_out) + # data_as_string = String(data) + + # buffer *= data_as_string + + # output_for_test_proc = IOBuffer() + # output_for_test_items = Pair{Union{Nothing,String},IOBuffer}[] + + # i = 1 + # while i<=length(buffer) + # might_be_begin_marker = false + # might_be_end_marker = false + + # if current_output_testitem_id === nothing + # j = 1 + # might_be_begin_marker = true + # while i + j - 1<=length(buffer) && j <= length(begin_marker) + # if buffer[i + j - 1] != begin_marker[j] || nextind(buffer, i + j - 1) != i + j + # might_be_begin_marker = false + # break + # end + # j += 1 + # end + # is_begin_marker = might_be_begin_marker && length(buffer) - i + 1 >= length(begin_marker) + + # if is_begin_marker + # ti_id_end_index = findfirst("\"", SubString(buffer, i)) + # if ti_id_end_index === nothing + # break + # else + # current_output_testitem_id = SubString(buffer, i + length(begin_marker), i + ti_id_end_index.start - 2) + # i = nextind(buffer, i + ti_id_end_index.start - 1) + # end + # elseif might_be_begin_marker + # break + # end + # else + # j = 1 + # might_be_end_marker = true + # while i + j - 1<=length(buffer) && j <= length(end_marker) + # if buffer[i + j - 1] != end_marker[j] || nextind(buffer, i + j - 1) != i + j + # might_be_end_marker = false + # break + # end + # j += 1 + # end + # is_end_marker = might_be_end_marker && length(buffer) - i + 1 >= length(end_marker) + + # if is_end_marker + # current_output_testitem_id = nothing + # i = i + length(end_marker) + # elseif might_be_end_marker + # break + # end + # end + + # if !might_be_begin_marker && !might_be_end_marker + # print(output_for_test_proc, buffer[i]) + + # if length(output_for_test_items) == 0 || output_for_test_items[end].first != current_output_testitem_id + # push!(output_for_test_items, current_output_testitem_id => IOBuffer()) + # end + + # output_for_ti = output_for_test_items[end].second + # if !CancellationTokens.is_cancellation_requested(token) + # print(output_for_ti, buffer[i]) + # end + + # i = nextind(buffer, i) + # end + # end + + # buffer = buffer[i:end] + + # output_for_test_proc_as_string = String(take!(output_for_test_proc)) + + # if length(output_for_test_proc_as_string) > 0 + # put!( + # controller_msg_channel, + # ( + # event = :testprocess_output, + # id = testprocess_id, + # output = output_for_test_proc_as_string + # ) + # ) + # end + + # for (k,v) in output_for_test_items + # output_for_ti_as_string = String(take!(v)) + + # if length(output_for_ti_as_string) > 0 + # put!( + # testprocess_msg_channel, + # ( + # event = :append_output, + # testitem_id = something(k, missing), + # output = replace(output_for_ti_as_string, "\n"=>"\r\n") + # ) + # ) + # end + # end + # end + # catch err + # bt = catch_backtrace() + # if controller.err_handler !== nothing + # controller.err_handler(err, bt) + # else + # Base.display_error(err, bt) + # end + # end + + @debug "Waiting for connection from test process" + socket = Sockets.accept(server) + @debug "Connection established" + + djp.endpoint = JSONRPC.JSONRPCEndpoint(socket, socket) + + run(djp.endpoint) + + # while true + # msg = try + # JSONRPC.get_next_message(endpoint) + # catch err + # if CancellationTokens.is_cancellation_requested(token) + # break + # else + # rethrow(err) + # end + # end + # # @info "Processing msg from test process" msg + + # dispatch_testprocess_msg(endpoint, msg, testprocess_msg_channel) + # end end function Base.kill(djp::DynamicJuliaProcess) @@ -30,11 +230,13 @@ struct DynamicFeature end end -function Base.start(df::DynamicFeature) - Threads.@async begin +function start(df::DynamicFeature) + Threads.@async try while true msg = take!(df.in_channel) + @info "Processing message" msg + if msg.command == :set_environments # Delete Julia procs we no longer need foreach(setdiff(keys(df.procs), msg.environments)) do i @@ -43,14 +245,35 @@ function Base.start(df::DynamicFeature) end # Add new required procs - foreach(msg.environments, setdiff(keys(df.procs), )) do i + foreach(setdiff(msg.environments, keys(df.procs))) do i djp = DynamicJuliaProcess(i) df.procs[i] = djp + + @info "Adding proc for" i start(djp) end + + for i in msg.environments + env = get_store(df.procs[i], joinpath(homeidr(), "djpstore"), joinpath(homedir(), ".julia")) + + @info "WE GOT ENV FOR $i" + end else error("Unknown message: $msg") end end + catch err + flush(stderr) + bt = catch_backtrace() + Base.display_error(err, bt) + flush(stderr) + end +end + +function update_dynamic(jw::JuliaWorkspace) + projects = uri2filepath.(derived_project_folders(jw.runtime)) + + if jw.dynamic_feature !== nothing + put!(jw.dynamic_feature.in_channel, (command = :set_environments, environments = projects)) end end diff --git a/src/fileio.jl b/src/fileio.jl index 2b9e950..552a274 100644 --- a/src/fileio.jl +++ b/src/fileio.jl @@ -133,12 +133,14 @@ function add_folder_from_disc!(jw::JuliaWorkspace, path; ignore_io_errors=false) end end -function workspace_from_folders(workspace_folders::Vector{String}) - jw = JuliaWorkspace() +function workspace_from_folders(workspace_folders::Vector{String}; dynamic=false) + jw = JuliaWorkspace(;dynamic=dynamic) for folder in workspace_folders add_folder_from_disc!(jw, folder) end + update_dynamic(jw) + return jw end diff --git a/src/types.jl b/src/types.jl index 39003ab..59b1882 100644 --- a/src/types.jl +++ b/src/types.jl @@ -276,7 +276,7 @@ struct JuliaWorkspace runtime::Salsa.Runtime dynamic_feature::Union{Nothing,DynamicFeature} - function JuliaWorkspace(dynamic=false) + function JuliaWorkspace(;dynamic=false) rt = Salsa.Runtime() set_input_files!(rt, Set{URI}()) From dfbc2bd04133b5e7a04fb5eabc654e3111f54e2a Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 15:49:01 -0800 Subject: [PATCH 19/24] Update env --- juliadynamicanalysisprocess/environments/v1.0/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.1/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.10/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.11/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.12/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.2/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.3/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.4/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.5/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.6/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.7/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.8/Manifest.toml | 2 +- juliadynamicanalysisprocess/environments/v1.9/Manifest.toml | 2 +- 13 files changed, 13 insertions(+), 13 deletions(-) diff --git a/juliadynamicanalysisprocess/environments/v1.0/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.0/Manifest.toml index f4ddd84..b47aa75 100644 --- a/juliadynamicanalysisprocess/environments/v1.0/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.0/Manifest.toml @@ -17,7 +17,7 @@ deps = ["LinearAlgebra", "Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.1/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.1/Manifest.toml index f78cb7f..b558ce4 100644 --- a/juliadynamicanalysisprocess/environments/v1.1/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.1/Manifest.toml @@ -19,7 +19,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.10/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.10/Manifest.toml index c7ca693..0056c0e 100644 --- a/juliadynamicanalysisprocess/environments/v1.10/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.10/Manifest.toml @@ -35,7 +35,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[deps.JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.11/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.11/Manifest.toml index 87f062c..f833982 100644 --- a/juliadynamicanalysisprocess/environments/v1.11/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.11/Manifest.toml @@ -41,7 +41,7 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" version = "1.11.0" [[deps.JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.12/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.12/Manifest.toml index f455c36..ef1f75b 100644 --- a/juliadynamicanalysisprocess/environments/v1.12/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.12/Manifest.toml @@ -46,7 +46,7 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" version = "1.11.0" [[deps.JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.2/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.2/Manifest.toml index f78cb7f..b558ce4 100644 --- a/juliadynamicanalysisprocess/environments/v1.2/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.2/Manifest.toml @@ -19,7 +19,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.3/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.3/Manifest.toml index 100e571..dcdc13c 100644 --- a/juliadynamicanalysisprocess/environments/v1.3/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.3/Manifest.toml @@ -19,7 +19,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.4/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.4/Manifest.toml index bbeb099..17e1482 100644 --- a/juliadynamicanalysisprocess/environments/v1.4/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.4/Manifest.toml @@ -19,7 +19,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.5/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.5/Manifest.toml index bbeb099..17e1482 100644 --- a/juliadynamicanalysisprocess/environments/v1.5/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.5/Manifest.toml @@ -19,7 +19,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.6/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.6/Manifest.toml index b60d2c3..7a9ccc6 100644 --- a/juliadynamicanalysisprocess/environments/v1.6/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.6/Manifest.toml @@ -29,7 +29,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.7/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.7/Manifest.toml index dd17beb..31ce127 100644 --- a/juliadynamicanalysisprocess/environments/v1.7/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.7/Manifest.toml @@ -32,7 +32,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[deps.JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.8/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.8/Manifest.toml index 98ec150..828cbed 100644 --- a/juliadynamicanalysisprocess/environments/v1.8/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.8/Manifest.toml @@ -35,7 +35,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[deps.JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" diff --git a/juliadynamicanalysisprocess/environments/v1.9/Manifest.toml b/juliadynamicanalysisprocess/environments/v1.9/Manifest.toml index 2f6b061..181e20b 100644 --- a/juliadynamicanalysisprocess/environments/v1.9/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/v1.9/Manifest.toml @@ -35,7 +35,7 @@ deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[deps.JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" From 4a083061af2a915652b402c56ec9383098caa398 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 15:49:07 -0800 Subject: [PATCH 20/24] Update env --- juliadynamicanalysisprocess/environments/fallback/Manifest.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/juliadynamicanalysisprocess/environments/fallback/Manifest.toml b/juliadynamicanalysisprocess/environments/fallback/Manifest.toml index 9abc5e8..09d1885 100644 --- a/juliadynamicanalysisprocess/environments/fallback/Manifest.toml +++ b/juliadynamicanalysisprocess/environments/fallback/Manifest.toml @@ -46,7 +46,7 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" version = "1.11.0" [[deps.JuliaDynamicAnalysisProcess]] -deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "Sockets", "Test", "UUIDs", "Unicode"] +deps = ["Base64", "Dates", "Distributed", "FileWatching", "InteractiveUtils", "LibGit2", "Logging", "Mmap", "Pkg", "REPL", "Random", "SHA", "Sockets", "Test", "UUIDs", "Unicode"] path = "../../JuliaDynamicAnalysisProcess" uuid = "d3729fb9-06c0-4e86-aa34-36de5a1c1049" version = "1.0.0" From c4383d1ce1e728b2a193874d4fc8a18dedbc009a Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 17:33:45 -0800 Subject: [PATCH 21/24] Progress --- .../src/JuliaDynamicAnalysisProcess.jl | 4 ++- .../src/symbolserver.jl | 8 ++--- ...julia_dynamic_analysis_process_protocol.jl | 2 +- shared/symbolserver/utils.jl | 6 ++-- src/JuliaWorkspaces.jl | 1 + src/SymbolServer/SymbolServer.jl | 6 ++-- src/dynamic_feature.jl | 27 +++++--------- src/inputs.jl | 2 ++ src/layer_static_lint.jl | 8 ++++- src/public.jl | 6 ++++ src/types.jl | 35 +++++++++++++++++++ 11 files changed, 73 insertions(+), 32 deletions(-) diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl index 900e974..fd88049 100644 --- a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/JuliaDynamicAnalysisProcess.jl @@ -1,5 +1,7 @@ module JuliaDynamicAnalysisProcess +import Sockets, Pkg + include("pkg_imports.jl") include("../../../shared/julia_dynamic_analysis_process_protocol.jl") include("symbolserver.jl") @@ -8,7 +10,7 @@ struct JuliaDynamicAnalysisProcessState end function get_store_request(params::JuliaDynamicAnalysisProtocol.GetStoreParams, state::JuliaDynamicAnalysisProcessState, token) - Pkg.activate(uri2filepath(params.projectUri)) + Pkg.activate(params.projectPath) SymbolServer.get_store(params.storePath, nothing) end diff --git a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl index c15b7fb..e8a7bbc 100644 --- a/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl +++ b/juliadynamicanalysisprocess/JuliaDynamicAnalysisProcess/src/symbolserver.jl @@ -4,10 +4,6 @@ module SymbolServer start_time = time_ns() - -module LoadingBay -end - using Pkg, SHA using Base: UUID @@ -26,6 +22,8 @@ else end function get_store(store_path::String, progress_callback) + loading_bay = Module(:LoadingBay) + ctx = try Pkg.Types.Context() catch err @@ -80,7 +78,7 @@ function get_store(store_path::String, progress_callback) # Load all packages together # This is important, or methods added to functions in other packages that are loaded earlier would not be in the cache for (i, uuid) in enumerate(packages_to_load) - load_package(ctx, uuid, progress_callback, LoadingBay, round(Int, 100*(i - 1)/length(packages_to_load))) + load_package(ctx, uuid, progress_callback, loading_bay, round(Int, 100*(i - 1)/length(packages_to_load))) end # Create image of whole package env. This creates the module structure only. diff --git a/shared/julia_dynamic_analysis_process_protocol.jl b/shared/julia_dynamic_analysis_process_protocol.jl index fed40dd..52decbd 100644 --- a/shared/julia_dynamic_analysis_process_protocol.jl +++ b/shared/julia_dynamic_analysis_process_protocol.jl @@ -6,7 +6,7 @@ import ..JSONRPC.JSON using ..JSONRPC: @dict_readable, RequestType, NotificationType, Outbound @dict_readable struct GetStoreParams <: JSONRPC.Outbound - projectUri::String + projectPath::String storePath::String end diff --git a/shared/symbolserver/utils.jl b/shared/symbolserver/utils.jl index 853f8f3..f111052 100644 --- a/shared/symbolserver/utils.jl +++ b/shared/symbolserver/utils.jl @@ -648,13 +648,15 @@ function load_package(c::Pkg.Types.Context, uuid, progress_callback, loadingbay, else m = try progress_callback !== nothing && progress_callback(:STARTLOAD, pe_name, uuid, :noversion, percentage) - loadingbay.eval(:(import $(Symbol(pe_name)))) + Core.eval(loadingbay, (:(import $(Symbol(pe_name))))) progress_callback !== nothing && progress_callback(:STOPLOAD, pe_name) m = Base.invokelatest(() -> getfield(loadingbay, Symbol(pe_name))) - catch + catch err return end end + + return m end function write_cache(uuid, pkg::Package, outpath) diff --git a/src/JuliaWorkspaces.jl b/src/JuliaWorkspaces.jl index 5319b01..233464d 100644 --- a/src/JuliaWorkspaces.jl +++ b/src/JuliaWorkspaces.jl @@ -26,6 +26,7 @@ using .URIs2: filepath2uri, uri2filepath using .URIs2: URI, @uri_str include("exception_types.jl") +include("../shared/julia_dynamic_analysis_process_protocol.jl") include("dynamic_feature.jl") include("types.jl") include("sourcetext.jl") diff --git a/src/SymbolServer/SymbolServer.jl b/src/SymbolServer/SymbolServer.jl index 286bf72..181b91f 100644 --- a/src/SymbolServer/SymbolServer.jl +++ b/src/SymbolServer/SymbolServer.jl @@ -318,7 +318,7 @@ function load_project_packages_into_store!(store_path, depot_path, environment_p load_package_from_cache_into_store!(store_path, depot_path, uuid isa UUID ? uuid : UUID(uuid), environment_path, manifest, store, progress_callback, round(Int, 100 * (i - 1) / num_uuids)) end took = round(time() - t0, sigdigits = 2) - progress_callback("Loaded all packages into cache in $(took)s", 100) + progress_callback === nothing || progress_callback("Loaded all packages into cache in $(took)s", 100) end """ @@ -338,7 +338,7 @@ function load_package_from_cache_into_store!(store_path, depot_path, uuid::UUID, cache_path = joinpath(store_path, get_cache_path(manifest, uuid)...) if isfile(cache_path) t0 = time() - progress_callback("Loading $pe_name from cache...", percentage) + progress_callback === nothing || progress_callback("Loading $pe_name from cache...", percentage) try package_data = open(cache_path) do io CacheStore.read(io) @@ -358,7 +358,7 @@ function load_package_from_cache_into_store!(store_path, depot_path, uuid::UUID, if took > 0.01 msg *= " (took $(took)s)" end - progress_callback(msg, percentage) + progress_callback === nothing || progress_callback(msg, percentage) t0 = time() for dep in deps(pe) load_package_from_cache_into_store!(store_path, depot_path, packageuuid(dep), environment_path, manifest, store, progress_callback, percentage) diff --git a/src/dynamic_feature.jl b/src/dynamic_feature.jl index 0620086..238a282 100644 --- a/src/dynamic_feature.jl +++ b/src/dynamic_feature.jl @@ -1,11 +1,11 @@ mutable struct DynamicJuliaProcess - project_uri::String + project_path::String proc::Union{Nothing, Base.Process} endpoint::Union{Nothing, JSONRPC.JSONRPCEndpoint} - function DynamicJuliaProcess(project_uri::String) + function DynamicJuliaProcess(project_path::String) return new( - project_uri, + project_path, nothing, nothing ) @@ -13,18 +13,17 @@ mutable struct DynamicJuliaProcess end function get_store(djp::DynamicJuliaProcess, store_path::String, depot_path) - @info "Calling get_store" store_path, depot_path JSONRPC.send( djp.endpoint, JuliaDynamicAnalysisProtocol.get_store_request_type, JuliaDynamicAnalysisProtocol.GetStoreParams( - projectUri = djp.project_uri, - storePath = store_path + djp.project_path, + store_path ) ) new_store = SymbolServer.recursive_copy(SymbolServer.stdlibs) - SymbolServer.load_project_packages_into_store!(store_path, depot_path, djp.project_uri, new_store, nothing) + SymbolServer.load_project_packages_into_store!(store_path, depot_path, djp.project_path, new_store, nothing) return new_store end @@ -63,7 +62,6 @@ function start(djp::DynamicJuliaProcess) error_handler_file = error_handler_file === nothing ? [] : [error_handler_file] crash_reporting_pipename = crash_reporting_pipename === nothing ? [] : [crash_reporting_pipename] - @debug "Launch proc" djp.proc = open( pipeline( Cmd(`julia --startup-file=no --history-file=no --depwarn=no $julia_dynamic_analysis_process_script $pipe_name $(error_handler_file...) $(crash_reporting_pipename...)`, detach=false), @@ -249,14 +247,13 @@ function start(df::DynamicFeature) djp = DynamicJuliaProcess(i) df.procs[i] = djp - @info "Adding proc for" i start(djp) end for i in msg.environments - env = get_store(df.procs[i], joinpath(homeidr(), "djpstore"), joinpath(homedir(), ".julia")) + env = get_store(df.procs[i], joinpath(homedir(), "djpstore"), joinpath(homedir(), ".julia")) - @info "WE GOT ENV FOR $i" + put!(df.out_channel, (command=:environment_ready, path=i, environment=env)) end else error("Unknown message: $msg") @@ -269,11 +266,3 @@ function start(df::DynamicFeature) flush(stderr) end end - -function update_dynamic(jw::JuliaWorkspace) - projects = uri2filepath.(derived_project_folders(jw.runtime)) - - if jw.dynamic_feature !== nothing - put!(jw.dynamic_feature.in_channel, (command = :set_environments, environments = projects)) - end -end diff --git a/src/inputs.jl b/src/inputs.jl index e53f0ff..4492207 100644 --- a/src/inputs.jl +++ b/src/inputs.jl @@ -2,3 +2,5 @@ Salsa.@declare_input input_files(rt)::Set{URI} Salsa.@declare_input input_text_file(rt, uri)::TextFile Salsa.@declare_input input_notebook_file(rt, uri)::NotebookFile Salsa.@declare_input input_fallback_test_project(rt)::Union{URI,Nothing} +Salsa.@declare_input input_project_environments(rt)::Set{URI} +Salsa.@declare_input input_project_environment(rt, uri)::StaticLint.ExternalEnv diff --git a/src/layer_static_lint.jl b/src/layer_static_lint.jl index f16b518..1e3dcec 100644 --- a/src/layer_static_lint.jl +++ b/src/layer_static_lint.jl @@ -3,7 +3,13 @@ function StaticLint.hasfile(rt, path) end Salsa.@derived function derived_external_env(rt, uri) - return StaticLint.ExternalEnv(Dict{Symbol,SymbolServer.ModuleStore}(:Base => SymbolServer.stdlibs[:Base], :Core => SymbolServer.stdlibs[:Core]), SymbolServer.collect_extended_methods(SymbolServer.stdlibs), Symbol[]) + envs = input_project_environments(rt) + + if uri in envs + return derived_input_project_environment(rt, uri) + else + return StaticLint.ExternalEnv(Dict{Symbol,SymbolServer.ModuleStore}(:Base => SymbolServer.stdlibs[:Base], :Core => SymbolServer.stdlibs[:Core]), SymbolServer.collect_extended_methods(SymbolServer.stdlibs), Symbol[]) + end end Salsa.@derived function derived_static_lint_meta(rt) diff --git a/src/public.jl b/src/public.jl index 3f450f0..4aaeca1 100644 --- a/src/public.jl +++ b/src/public.jl @@ -32,6 +32,8 @@ export JuliaWorkspace, Add a file to the workspace. If the file already exists, it will throw an error. """ function add_file!(jw::JuliaWorkspace, file::TextFile) + process_from_dynamic(jw) + files = input_files(jw.runtime) file.uri in files && throw(JWDuplicateFile("Duplicate file $(file.uri)")) @@ -49,6 +51,8 @@ end Update a file in the workspace. If the file does not exist, it will throw an error. """ function update_file!(jw::JuliaWorkspace, file::TextFile) + process_from_dynamic(jw) + has_file(jw, file.uri) || throw(JWUnknownFile("Cannot update unknown file $(file.uri).")) set_input_text_file!(jw.runtime, file.uri, file) @@ -231,6 +235,8 @@ Get all diagnostics from the workspace. - A vector of `Diagnostic` structs. """ function get_diagnostics(jw::JuliaWorkspace) + process_from_dynamic(jw) + return derived_all_diagnostics(jw.runtime) end diff --git a/src/types.jl b/src/types.jl index 59b1882..169bdc9 100644 --- a/src/types.jl +++ b/src/types.jl @@ -280,6 +280,7 @@ struct JuliaWorkspace rt = Salsa.Runtime() set_input_files!(rt, Set{URI}()) + set_input_project_environments!(rt, Set{URI}()) set_input_fallback_test_project!(rt, nothing) dynamic_feature = dynamic ? DynamicFeature() : nothing @@ -288,3 +289,37 @@ struct JuliaWorkspace new(rt, dynamic_feature) end end + +function update_dynamic(jw::JuliaWorkspace) + projects = uri2filepath.(derived_project_folders(jw.runtime)) + + if jw.dynamic_feature !== nothing + put!(jw.dynamic_feature.in_channel, (command = :set_environments, environments = projects)) + end +end + +function process_from_dynamic(jw::JuliaWorkspace) + if jw.dynamic_feature !== nothing + while isready(jw.dynamic_feature.out_channel) + msg = take!(jw.dynamic_feature.out_channel) + + if msg.command == :environment_ready + env = msg.environment + + old_environments = input_project_environments(jw.runtime) + new_environments = Set{URI}([old_environments...; filepath2uri(msg.path)]) + set_input_project_environments!(jw.runtime, new_environments) + + ext_env = StaticLint.ExternalEnv( + env, + SymbolServer.collect_extended_methods(env), + collect(keys(env)) + ) + + set_input_project_environment!(jw.runtime, filepath2uri(msg.path), ext_env) + else + error("Unknown message: $msg") + end + end + end +end From 55bf87f64de929ca2f08dc07a35cea59bebf62c5 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Tue, 2 Dec 2025 17:44:10 -0800 Subject: [PATCH 22/24] Remove no longer needed code --- src/SymbolServer/indexbasestdlib.jl | 56 ----------- src/SymbolServer/indexpackage.jl | 65 ------------- src/SymbolServer/server.jl | 138 ---------------------------- 3 files changed, 259 deletions(-) delete mode 100644 src/SymbolServer/indexbasestdlib.jl delete mode 100644 src/SymbolServer/indexpackage.jl delete mode 100644 src/SymbolServer/server.jl diff --git a/src/SymbolServer/indexbasestdlib.jl b/src/SymbolServer/indexbasestdlib.jl deleted file mode 100644 index c598c05..0000000 --- a/src/SymbolServer/indexbasestdlib.jl +++ /dev/null @@ -1,56 +0,0 @@ -module SymbolServer - -using Pkg, SHA -using Base: UUID - -@info "Indexing Julia $VERSION..." - -# This path will always be mounted in the docker container in which we are running -store_path = "/symcache" - -cache_package_folder_path = joinpath(store_path, "v1", "stdlib") - -mkpath(cache_package_folder_path) - -module LoadingBay end - -include("faketypes.jl") -include("symbols.jl") -include("utils.jl") -include("serialize.jl") -using .CacheStore - -# TODO Make this load all the stdlibs and save them - -# m = try -# LoadingBay.eval(:(import $current_package_name)) -# getfield(LoadingBay, current_package_name) -# catch e -# @info "Could not load package, exiting." -# exit(10) -# end - -# # Get the symbols -# env = getenvtree([current_package_name]) -# symbols(env, m) - -# # Strip out paths -# modify_dirs(env[current_package_name], f -> modify_dir(f, pkg_src_dir(Base.loaded_modules[Base.PkgId(current_package_uuid, string(current_package_name))]), "PLACEHOLDER")) - -# # There's an issue here - @enum used within CSTParser seems to add a method that is introduced from Enums.jl... - -# Pkg.PlatformEngines.probe_platform_engines!() - -# mktempdir() do path -# # Write them to a file -# open(joinpath(path, filename_with_extension), "w") do io -# CacheStore.write(io, Package(string(current_package_name), env[current_package_name], current_package_uuid, nothing)) -# end - -# # cp(joinpath(path, filename_with_extension), cache_path) -# Pkg.PlatformEngines.package(path, cache_path_compressed) -# end - -@info "Finished indexing." - -end diff --git a/src/SymbolServer/indexpackage.jl b/src/SymbolServer/indexpackage.jl deleted file mode 100644 index 421a0bf..0000000 --- a/src/SymbolServer/indexpackage.jl +++ /dev/null @@ -1,65 +0,0 @@ -module SymbolServer - -using Pkg, SHA -using Base: UUID - -current_package_name = Symbol(ARGS[1]) -current_package_version = VersionNumber(ARGS[2]) -current_package_uuid = UUID(ARGS[3]) -current_package_treehash = ARGS[4] - -@info "Indexing package $current_package_name $current_package_version..." - -# This path will always be mounted in the docker container in which we are running -store_path = "/symcache" - -current_package_versionwithoutplus = replace(string(current_package_version), '+'=>'_') -filename_with_extension = "v$(current_package_versionwithoutplus)_$current_package_treehash.jstore" - -module LoadingBay end - -try - Pkg.add(name=string(current_package_name), version=current_package_version) -catch err - @info "Could not install package, exiting" - exit(20) -end - -# TODO Make the code below ONLY write a cache file for the package we just added here. -include("faketypes.jl") -include("symbols.jl") -include("utils.jl") -include("serialize.jl") -using .CacheStore - -# Load package -m = try - LoadingBay.eval(:(import $current_package_name)) - getfield(LoadingBay, current_package_name) -catch e - @info "Could not load package, exiting." - exit(10) -end - -# Get the symbols -env = getenvtree([current_package_name]) -symbols(env, m, get_return_type=true) - - # Strip out paths -modify_dirs(env[current_package_name], f -> modify_dir(f, pkg_src_dir(Base.loaded_modules[Base.PkgId(current_package_uuid, string(current_package_name))]), "PLACEHOLDER")) - -# There's an issue here - @enum used within CSTParser seems to add a method that is introduced from Enums.jl... - -# Write them to a file -open(joinpath(store_path, filename_with_extension), "w") do io - CacheStore.write(io, Package(string(current_package_name), env[current_package_name], current_package_uuid, nothing)) -end - -@info "Finished indexing." - -# We are exiting with a custom error code to indicate success. This allows -# the parent process to distinguish between a successful run and one -# where the package exited the process. -exit(37) - -end diff --git a/src/SymbolServer/server.jl b/src/SymbolServer/server.jl deleted file mode 100644 index d892b1f..0000000 --- a/src/SymbolServer/server.jl +++ /dev/null @@ -1,138 +0,0 @@ -module SymbolServer - -!in("@stdlib", LOAD_PATH) && push!(LOAD_PATH, "@stdlib") # Make sure we can load stdlibs - -import Sockets -pipename = length(ARGS) > 1 ? ARGS[2] : nothing -conn = pipename !== nothing ? Sockets.connect(pipename) : nothing - -start_time = time_ns() - -# Try to lower the priority of this process so that it doesn't block the -# user system. -@static if Sys.iswindows() - # Get process handle - p_handle = ccall(:GetCurrentProcess, stdcall, Ptr{Cvoid}, ()) - - # Set BELOW_NORMAL_PRIORITY_CLASS - ret = ccall(:SetPriorityClass, stdcall, Cint, (Ptr{Cvoid}, Culong), p_handle, 0x00004000) - ret != 1 && @warn "Something went wrong when setting BELOW_NORMAL_PRIORITY_CLASS." -else - ret = ccall(:nice, Cint, (Cint,), 1) - # We don't check the return value because it doesn't really matter -end - -module LoadingBay -end - -using Pkg, SHA -using Base: UUID - -include("faketypes.jl") -include("symbols.jl") -include("utils.jl") -include("serialize.jl") -using .CacheStore - -store_path = length(ARGS) > 0 ? ARGS[1] : abspath(joinpath(@__DIR__, "..", "store")) - -ctx = try - Pkg.Types.Context() -catch err - @info "Package environment can't be read." - exit() -end -# Add some methods to check whether a package is part of the standard library and so -# won't need recaching. -if isdefined(Pkg.Types, :is_stdlib) - is_stdlib(uuid::UUID) = Pkg.Types.is_stdlib(uuid) -else - is_stdlib(uuid::UUID) = uuid in keys(ctx.stdlibs) -end - -server = Server(store_path, ctx, Dict{UUID,Package}()) - -written_caches = String[] # List of caches that have already been written -toplevel_pkgs = deps(project(ctx)) # First get a list of all package UUIds that we want to cache -packages_to_load = [] - -# Obtain the directory containing the active Manifest.toml. Any 'develop'ed dependencies -# will contain a path that is relative to this directory. -manifest_dir = dirname(ctx.env.manifest_file) - -# Next make sure the cache is up-to-date for all of these. -for (pk_name, uuid) in toplevel_pkgs - uuid isa UUID || (uuid = UUID(uuid)) - if !isinmanifest(ctx, uuid) - @info "$pk_name not in manifest, skipping." - continue - end - pe = frommanifest(manifest(ctx), uuid) - cache_path = joinpath(server.storedir, SymbolServer.get_cache_path(manifest(ctx), uuid)...) - - if isfile(cache_path) - if is_package_deved(manifest(ctx), uuid) - try - cached_version = open(cache_path) do io - CacheStore.read(io) - end - if sha_pkg(manifest_dir, frommanifest(manifest(ctx), uuid)) != cached_version.sha - @info "Outdated sha, will recache package $pk_name ($uuid)" - push!(packages_to_load, uuid) - else - @info "Package $pk_name ($uuid) is cached." - end - catch err - @info "Couldn't load $pk_name ($uuid) from file, will recache." - end - else - @info "Package $pk_name ($uuid) is cached." - end - else - @info "Will cache package $pk_name ($uuid)" - push!(packages_to_load, uuid) - end -end - -# Load all packages together -# This is important, or methods added to functions in other packages that are loaded earlier would not be in the cache -for (i, uuid) in enumerate(packages_to_load) - load_package(ctx, uuid, conn, LoadingBay, round(Int, 100*(i - 1)/length(packages_to_load))) -end - -# Create image of whole package env. This creates the module structure only. -env_symbols = getenvtree() - -# Populate the above with symbols, skipping modules that don't need caching. -# symbols (env_symbols) -visited = Base.IdSet{Module}([Base, Core]) - -for (pid, m) in Base.loaded_modules - if pid.uuid !== nothing && is_stdlib(pid.uuid) && - isinmanifest(ctx, pid.uuid) && - isfile(joinpath(server.storedir, SymbolServer.get_cache_path(manifest(ctx), pid.uuid)...)) - push!(visited, m) - delete!(env_symbols, Symbol(pid.name)) - end -end - -symbols(env_symbols, nothing, getallns(), visited) - -# Wrap the `ModuleStore`s as `Package`s. -for (pkg_name, cache) in env_symbols - !isinmanifest(ctx, String(pkg_name)) && continue - uuid = packageuuid(ctx, String(pkg_name)) - pe = frommanifest(ctx, uuid) - server.depot[uuid] = Package(String(pkg_name), cache, uuid, sha_pkg(manifest_dir, pe)) -end - -write_depot(server, server.context, written_caches) - -@info "Symbol server indexing took $((time_ns() - start_time) / 1e9) seconds." - -if conn !== nothing - println(conn, "DONE") - close(conn) -end - -end From 4bac9c4a237e9586d12a009f717191667814b943 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Wed, 3 Dec 2025 16:37:21 -0800 Subject: [PATCH 23/24] Progress on dynamic feature --- src/dynamic_feature.jl | 16 ++++++++++------ src/fileio.jl | 13 ++++++++++++- src/layer_projects.jl | 10 +++++++++- src/public.jl | 38 ++++++++++++++++++++++++++++++++++++++ src/types.jl | 5 ++++- 5 files changed, 73 insertions(+), 9 deletions(-) diff --git a/src/dynamic_feature.jl b/src/dynamic_feature.jl index 238a282..b7723a5 100644 --- a/src/dynamic_feature.jl +++ b/src/dynamic_feature.jl @@ -1,11 +1,13 @@ mutable struct DynamicJuliaProcess project_path::String + julia_version::Union{Nothing,VersionNumber} proc::Union{Nothing, Base.Process} endpoint::Union{Nothing, JSONRPC.JSONRPCEndpoint} - function DynamicJuliaProcess(project_path::String) + function DynamicJuliaProcess(project_path::String, julia_version::Union{Nothing,VersionNumber}) return new( project_path, + julia_version, nothing, nothing ) @@ -62,9 +64,11 @@ function start(djp::DynamicJuliaProcess) error_handler_file = error_handler_file === nothing ? [] : [error_handler_file] crash_reporting_pipename = crash_reporting_pipename === nothing ? [] : [crash_reporting_pipename] + julia_version = djp.julia_version === nothing ? [] : [string(djp.julia_version)] + djp.proc = open( pipeline( - Cmd(`julia --startup-file=no --history-file=no --depwarn=no $julia_dynamic_analysis_process_script $pipe_name $(error_handler_file...) $(crash_reporting_pipename...)`, detach=false), + Cmd(`julia $(julia_version...)--startup-file=no --history-file=no --depwarn=no $julia_dynamic_analysis_process_script $pipe_name $(error_handler_file...) $(crash_reporting_pipename...)`, detach=false), # stdout = pipe_out, # stderr = pipe_out ) @@ -237,20 +241,20 @@ function start(df::DynamicFeature) if msg.command == :set_environments # Delete Julia procs we no longer need - foreach(setdiff(keys(df.procs), msg.environments)) do i + foreach(setdiff(keys(df.procs), keys(msg.environments))) do i kill(procs[i]) delete!(df.procs, i) end # Add new required procs - foreach(setdiff(msg.environments, keys(df.procs))) do i - djp = DynamicJuliaProcess(i) + foreach(setdiff(keys(msg.environments), keys(df.procs))) do i + djp = DynamicJuliaProcess(i, msg.environments[i].julia_version) df.procs[i] = djp start(djp) end - for i in msg.environments + for i in keys(msg.environments) env = get_store(df.procs[i], joinpath(homedir(), "djpstore"), joinpath(homedir(), ".julia")) put!(df.out_channel, (command=:environment_ready, path=i, environment=env)) diff --git a/src/fileio.jl b/src/fileio.jl index 552a274..0066d64 100644 --- a/src/fileio.jl +++ b/src/fileio.jl @@ -110,20 +110,30 @@ function read_path_into_textdocuments(uri::URI; ignore_io_errors=false) end function add_file_from_disc!(jw::JuliaWorkspace, path) + process_from_dynamic(jw) + uri = filepath2uri(path) text_file = read_text_file_from_uri(uri) add_file!(jw, text_file) + + update_dynamic(jw) end function update_file_from_disc!(jw::JuliaWorkspace, path) + process_from_dynamic(jw) + uri = filepath2uri(path) text_file = read_text_file_from_uri(uri) update_file!(jw, text_file) + + update_dynamic(jw) end function add_folder_from_disc!(jw::JuliaWorkspace, path; ignore_io_errors=false) + process_from_dynamic(jw) + path_uri = filepath2uri(path) files = read_path_into_textdocuments(path_uri, ignore_io_errors=ignore_io_errors) @@ -131,6 +141,8 @@ function add_folder_from_disc!(jw::JuliaWorkspace, path; ignore_io_errors=false) for i in files add_file!(jw, i) end + + update_dynamic(jw) end function workspace_from_folders(workspace_folders::Vector{String}; dynamic=false) @@ -141,6 +153,5 @@ function workspace_from_folders(workspace_folders::Vector{String}; dynamic=false end update_dynamic(jw) - return jw end diff --git a/src/layer_projects.jl b/src/layer_projects.jl index ff302eb..3f18378 100644 --- a/src/layer_projects.jl +++ b/src/layer_projects.jl @@ -79,6 +79,14 @@ Salsa.@derived function derived_project(rt, uri) error("") end + julia_version = if manifest_version == "1.0" + nothing + elseif manifest_version=="2.0" && haskey(manifest_content, "julia_version") + tryparse(VersionNumber, manifest_content["julia_version"]) + else + error("") + end + for (k_entry, v_entry) in pairs(manifest_deps) v_entry isa Vector || continue length(v_entry)==1 || continue @@ -126,7 +134,7 @@ Salsa.@derived function derived_project(rt, uri) project_text_content = input_text_file(rt, project_file) project_content_hash = hash(project_text_content.content.content, hash(manifest_text_content.content.content)) - JuliaProject(project_file, manifest_file, project_content_hash, deved_packages, regular_packages, stdlib_packages) + JuliaProject(project_file, manifest_file, julia_version, project_content_hash, deved_packages, regular_packages, stdlib_packages) end Salsa.@derived function derived_package_folders(rt) diff --git a/src/public.jl b/src/public.jl index 4aaeca1..c29810d 100644 --- a/src/public.jl +++ b/src/public.jl @@ -43,6 +43,8 @@ function add_file!(jw::JuliaWorkspace, file::TextFile) set_input_files!(jw.runtime, new_files) set_input_text_file!(jw.runtime, file.uri, file) + + update_dynamic(jw) end """ @@ -56,6 +58,8 @@ function update_file!(jw::JuliaWorkspace, file::TextFile) has_file(jw, file.uri) || throw(JWUnknownFile("Cannot update unknown file $(file.uri).")) set_input_text_file!(jw.runtime, file.uri, file) + + update_dynamic(jw) end """ @@ -68,6 +72,8 @@ Get all text files from the workspace. - A set of URIs. """ function get_text_files(jw::JuliaWorkspace) + process_from_dynamic(jw) + return derived_text_files(jw.runtime) end @@ -81,6 +87,8 @@ Get all Julia files from the workspace. - A set of URIs. """ function get_julia_files(jw::JuliaWorkspace) + process_from_dynamic(jw) + return derived_julia_files(jw.runtime) end @@ -93,6 +101,8 @@ Get all files from the workspace. - A set of URIs. """ function get_files(jw::JuliaWorkspace) + process_from_dynamic(jw) + return input_files(jw.runtime) end @@ -102,6 +112,8 @@ end Check if a file exists in the workspace. """ function has_file(jw, uri) + process_from_dynamic(jw) + return derived_has_file(jw.runtime, uri) end @@ -115,6 +127,8 @@ Get a text file from the workspace. If the file does not exist, it will throw an - A [`TextFile`](@ref) struct. """ function get_text_file(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + files = input_files(jw.runtime) uri in files || throw(JWUnknownFile("Unknown file $uri")) @@ -128,6 +142,8 @@ end Remove a file from the workspace. If the file does not exist, it will throw an error. """ function remove_file!(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + files = input_files(jw.runtime) uri in files || throw(JWUnknownFile("Trying to remove non-existing file $uri")) @@ -137,6 +153,8 @@ function remove_file!(jw::JuliaWorkspace, uri::URI) set_input_files!(jw.runtime, new_files) delete_input_text_file!(jw.runtime, uri) + + update_dynamic(jw) end """ @@ -145,6 +163,8 @@ end Remove all children of a folder from the workspace. """ function remove_all_children!(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + files = get_files(jw) uri_as_string = string(uri) @@ -156,6 +176,8 @@ function remove_all_children!(jw::JuliaWorkspace, uri::URI) remove_file!(jw, file) end end + + update_dynamic(jw) end # Projects @@ -170,6 +192,8 @@ Get all packages from the workspace. - A set of URIs. """ function get_packages(jw::JuliaWorkspace) + process_from_dynamic(jw) + return derived_package_folders(jw.runtime) end @@ -183,6 +207,8 @@ Get all projects from the workspace. - A set of URIs. """ function get_projects(jw::JuliaWorkspace) + process_from_dynamic(jw) + return derived_project_folders(jw.runtime) end @@ -199,6 +225,8 @@ Get the syntax tree of a Julia file from the workspace. and `diagnostics` is a vector of `Diagnostic` structs. """ function get_julia_syntax_tree(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + return derived_julia_syntax_tree(jw.runtime, uri) end @@ -208,6 +236,8 @@ end Get the syntax tree of a TOML file from the workspace. """ function get_toml_syntax_tree(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + return derived_toml_syntax_tree(jw.runtime, uri) end @@ -223,6 +253,8 @@ Get the diagnostics of a file from the workspace. - A vector of `Diagnostic` structs. """ function get_diagnostic(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + return derived_diagnostics(jw.runtime, uri) end @@ -252,6 +284,8 @@ Returns - an instance of the struct [`TestDetails`](@ref) """ function get_test_items(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + derived_testitems(jw.runtime, uri) end @@ -265,6 +299,8 @@ Returns - an instance of the struct [`TestDetails`](@ref) """ function get_test_items(jw::JuliaWorkspace) + process_from_dynamic(jw) + derived_all_testitems(jw.runtime) end @@ -278,5 +314,7 @@ Returns - an instance of the struct [`JuliaTestEnv`](@ref) """ function get_test_env(jw::JuliaWorkspace, uri::URI) + process_from_dynamic(jw) + derived_testenv(jw.runtime, uri) end diff --git a/src/types.jl b/src/types.jl index 169bdc9..a8d71c8 100644 --- a/src/types.jl +++ b/src/types.jl @@ -153,6 +153,7 @@ Details of a Julia project. - `project_file_uri`::URI - `manifest_file_uri`::URI +- `julia_version`::Union{Nothing,VersionNumber} - content_hash::UInt - deved_packages::Dict{String,JuliaProjectEntryDevedPackage} - regular_packages::Dict{String,JuliaProjectEntryRegularPackage} @@ -161,6 +162,7 @@ Details of a Julia project. @auto_hash_equals struct JuliaProject project_file_uri::URI manifest_file_uri::URI + julia_version::Union{Nothing,VersionNumber} content_hash::UInt deved_packages::Dict{String,JuliaProjectEntryDevedPackage} regular_packages::Dict{String,JuliaProjectEntryRegularPackage} @@ -294,7 +296,7 @@ function update_dynamic(jw::JuliaWorkspace) projects = uri2filepath.(derived_project_folders(jw.runtime)) if jw.dynamic_feature !== nothing - put!(jw.dynamic_feature.in_channel, (command = :set_environments, environments = projects)) + put!(jw.dynamic_feature.in_channel, (command = :set_environments, environments = Dict{String,JuliaProject}(i => derived_project(jw.runtime, i) for i in projects))) end end @@ -304,6 +306,7 @@ function process_from_dynamic(jw::JuliaWorkspace) msg = take!(jw.dynamic_feature.out_channel) if msg.command == :environment_ready + @info "Processeing new env" msg.path msg.environment env = msg.environment old_environments = input_project_environments(jw.runtime) From a821e2ab7b843dd0199773fa46e5f8a131aa3c39 Mon Sep 17 00:00:00 2001 From: David Anthoff Date: Wed, 3 Dec 2025 18:41:42 -0800 Subject: [PATCH 24/24] Fix some bugs --- shared/symbolserver/utils.jl | 2 +- src/dynamic_feature.jl | 12 ++++++++---- src/types.jl | 4 ++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/shared/symbolserver/utils.jl b/shared/symbolserver/utils.jl index f111052..a58e5ad 100644 --- a/shared/symbolserver/utils.jl +++ b/shared/symbolserver/utils.jl @@ -643,7 +643,7 @@ function load_package(c::Pkg.Types.Context, uuid, progress_callback, loadingbay, pid = Base.PkgId(uuid isa String ? Base.UUID(uuid) : uuid, pe_name) if pid in keys(Base.loaded_modules) progress_callback !== nothing && progress_callback(:PROCESSPKG, pe_name, uuid, :noversion, percentage) - loadingbay.eval(:($(Symbol(pe_name)) = $(Base.loaded_modules[pid]))) + Core.eval(loadingbay, :($(Symbol(pe_name)) = $(Base.loaded_modules[pid]))) m = Base.invokelatest(() -> getfield(loadingbay, Symbol(pe_name))) else m = try diff --git a/src/dynamic_feature.jl b/src/dynamic_feature.jl index b7723a5..5ee5901 100644 --- a/src/dynamic_feature.jl +++ b/src/dynamic_feature.jl @@ -64,11 +64,17 @@ function start(djp::DynamicJuliaProcess) error_handler_file = error_handler_file === nothing ? [] : [error_handler_file] crash_reporting_pipename = crash_reporting_pipename === nothing ? [] : [crash_reporting_pipename] - julia_version = djp.julia_version === nothing ? [] : [string(djp.julia_version)] + julia_version = djp.julia_version === nothing ? [] : ["+$(djp.julia_version)"] + + env_to_use = copy(ENV) + + if haskey(env_to_use, "JULIA_DEPOT_PATH") + delete!(env_to_use, "JULIA_DEPOT_PATH") + end djp.proc = open( pipeline( - Cmd(`julia $(julia_version...)--startup-file=no --history-file=no --depwarn=no $julia_dynamic_analysis_process_script $pipe_name $(error_handler_file...) $(crash_reporting_pipename...)`, detach=false), + Cmd(`julia $(julia_version...) --startup-file=no --history-file=no --depwarn=no $julia_dynamic_analysis_process_script $pipe_name $(error_handler_file...) $(crash_reporting_pipename...)`, detach=false, env=env_to_use), # stdout = pipe_out, # stderr = pipe_out ) @@ -252,9 +258,7 @@ function start(df::DynamicFeature) df.procs[i] = djp start(djp) - end - for i in keys(msg.environments) env = get_store(df.procs[i], joinpath(homedir(), "djpstore"), joinpath(homedir(), ".julia")) put!(df.out_channel, (command=:environment_ready, path=i, environment=env)) diff --git a/src/types.jl b/src/types.jl index a8d71c8..e9f55cc 100644 --- a/src/types.jl +++ b/src/types.jl @@ -293,10 +293,10 @@ struct JuliaWorkspace end function update_dynamic(jw::JuliaWorkspace) - projects = uri2filepath.(derived_project_folders(jw.runtime)) + project_uris = derived_project_folders(jw.runtime) if jw.dynamic_feature !== nothing - put!(jw.dynamic_feature.in_channel, (command = :set_environments, environments = Dict{String,JuliaProject}(i => derived_project(jw.runtime, i) for i in projects))) + put!(jw.dynamic_feature.in_channel, (command = :set_environments, environments = Dict{String,JuliaProject}(uri2filepath(i) => derived_project(jw.runtime, i) for i in project_uris))) end end