Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix global vars #1

Merged
merged 2 commits into from
Jan 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 1 addition & 5 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,5 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added

### Fixed
- Fixed wrong initiation of `CONV_HISTORY` and other globals that led to UndefVarError. Moved several globals to `const Ref{}` pattern to ensure type stability, but it means that from now it always needs to be dereferenced with `[]` (eg, `MAIN_INDEX[]` instead of `MAIN_INDEX`).

## [Unreleased]

### Added

### Fixed
13 changes: 7 additions & 6 deletions src/AIHelpMe.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,15 @@ include("generation.jl")
export @aihelp_str, @aihelp!_str
include("macros.jl")

## Globals
const CONV_HISTORY = Vector{Vector{PT.AbstractMessage}}()
const CONV_HISTORY_LOCK = ReentrantLock()
const MAX_HISTORY_LENGTH = 1
const LAST_CONTEXT = Ref{Union{Nothing, RAG.RAGContext}}(nothing)
const MAIN_INDEX = Ref{Union{Nothing, RAG.AbstractChunkIndex}}(nothing)
function __init__()
## Globals
CONV_HISTORY::Vector{Vector{<:Any}} = Vector{Vector{<:Any}}()
CONV_HISTORY_LOCK::ReentrantLock = ReentrantLock()
MAX_HISTORY_LENGTH::Int = 1
LAST_CONTEXT::Union{Nothing, RAG.RAGContext} = nothing
## Load index
MAIN_INDEX::Union{Nothing, RAG.AbstractChunkIndex} = load_index!()
MAIN_INDEX[] = load_index!()
end

end
6 changes: 3 additions & 3 deletions src/generation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ function aihelp(index::RAG.AbstractChunkIndex,
filtered_candidates,
reranked_candidates)
lock(CONV_HISTORY_LOCK) do
PT.LAST_CONTEXT = rag_context
LAST_CONTEXT[] = rag_context
end

if return_context # for evaluation
Expand All @@ -151,6 +151,6 @@ end
function aihelp(question::AbstractString;
kwargs...)
global MAIN_INDEX
@assert !isnothing(MAIN_INDEX) "MAIN_INDEX is not loaded. Use `load_index!` to load an index."
aihelp(MAIN_INDEX, question; kwargs...)
@assert !isnothing(MAIN_INDEX[]) "MAIN_INDEX is not loaded. Use `load_index!` to load an index."
aihelp(MAIN_INDEX[], question; kwargs...)
end
4 changes: 2 additions & 2 deletions src/macros.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ macro aihelp_str(user_question, flags...)
model = isempty(flags) ? PT.MODEL_CHAT : only(flags)
prompt = Meta.parse("\"$(escape_string(user_question))\"")
quote
conv = aihelp($(esc(MAIN_INDEX)), $(esc(prompt));
conv = aihelp($(esc(MAIN_INDEX[])), $(esc(prompt));
model = $(esc(model)),
return_all = true)
PT.push_conversation!($(esc(CONV_HISTORY)), conv, $(esc(MAX_HISTORY_LENGTH)))
Expand Down Expand Up @@ -84,7 +84,7 @@ aihelp!"Can you create it from named tuple?"gpt4t
Ensure that the conversation history is not too long to maintain relevancy and coherence in the AI's responses. The history length is managed by `MAX_HISTORY_LENGTH`.
"""
macro aihelp!_str(user_question, flags...)
global CONV_HISTORY, LAST_CONTEXT, MAIN_INDEX
global CONV_HISTORY, MAIN_INDEX
model = isempty(flags) ? PT.MODEL_CHAT : only(flags)
prompt = Meta.parse("\"$(escape_string(user_question))\"")
quote
Expand Down
14 changes: 7 additions & 7 deletions src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ It can be useful to see the sources/references used by the AI model to generate

If you're using `aihelp()` make sure to set `return_context = true` to return the context.
"""
last_context() = PT.LAST_CONTEXT
last_context() = LAST_CONTEXT[]

struct ContextPreview
question::AbstractString
Expand Down Expand Up @@ -111,7 +111,7 @@ AIH.load_index!(index)
function load_index!(index::RAG.AbstractChunkIndex;
verbose::Bool = true, kwargs...)
global MAIN_INDEX
MAIN_INDEX = index
MAIN_INDEX[] = index
verbose && @info "Loaded index into MAIN_INDEX"
return index
end
Expand All @@ -127,16 +127,16 @@ function load_index!(file_path::Union{Nothing, AbstractString} = nothing;
global MAIN_INDEX
if !isnothing(file_path)
@assert endswith(file_path, ".jls") "Provided file path must end with `.jls` (serialized Julia object)."
file_str = "from file $(file_path) "
file_str = " from a file $(file_path) "
else
artifact_path = artifact"juliaextra"
file_path = joinpath(artifact_path, "docs-index.jls")
file_str = " "
file_str = " from an artifact "
end
index = deserialize(file_path)
@assert index isa RAG.AbstractChunkIndex "Provided file path must point to a serialized RAG index (Deserialized type: $(typeof(index)))."
verbose && @info "Loaded index $(file_str)into MAIN_INDEX"
MAIN_INDEX = index
verbose && @info "Loaded index$(file_str)into MAIN_INDEX"
MAIN_INDEX[] = index

return index
end
Expand Down Expand Up @@ -167,7 +167,7 @@ AHM.update_index() |> AHM.load_index!
index = AHM.update_index(index)
```
"""
function update_index(index::RAG.AbstractChunkIndex = MAIN_INDEX,
function update_index(index::RAG.AbstractChunkIndex = MAIN_INDEX[],
modules::Vector{Module} = Base.Docs.modules;
verbose::Integer = 1,
separators = ["\n\n", ". ", "\n"], max_length::Int = 256,
Expand Down
Loading