From fbdc88a62f521370259d4a4d1837ac8a4fa55c69 Mon Sep 17 00:00:00 2001 From: J S <49557684+svilupp@users.noreply.github.com> Date: Sun, 18 Aug 2024 13:05:21 +0200 Subject: [PATCH 1/2] update logs --- src/pipeline_defaults.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/pipeline_defaults.jl b/src/pipeline_defaults.jl index 6c2e826..0d6a55f 100644 --- a/src/pipeline_defaults.jl +++ b/src/pipeline_defaults.jl @@ -156,7 +156,7 @@ function update_pipeline!( @warn "Invalid configuration for knowledge packs! For `nomic-embed-text`, `embedding_dimension` must be 0. See the available artifacts." end if model_embedding == "text-embedding-3-large" && - (embedding_dimension ∉ [1024, 0] || !isnothing(embedding_dimension)) + !(embedding_dimension in [1024, 0] || isnothing(embedding_dimension)) @warn "Invalid configuration for knowledge packs! For `text-embedding-3-large`, `embedding_dimension` must be 0 or 1024. See the available artifacts." end @@ -184,7 +184,6 @@ function update_pipeline!( ## Update GLOBAL variables MODEL_CHAT = model_chat MODEL_EMBEDDING = model_embedding - @info embedding_dimension EMBEDDING_DIMENSION = embedding_dimension ## Set the options From 6f5cc52cbbd0d24c099a8999abd3829f5b5e6b8f Mon Sep 17 00:00:00 2001 From: J S <49557684+svilupp@users.noreply.github.com> Date: Sun, 18 Aug 2024 13:09:48 +0200 Subject: [PATCH 2/2] add documentation --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 9e011b7..00e1ef4 100644 --- a/README.md +++ b/README.md @@ -202,7 +202,7 @@ We apply a Retrieval Augment Generation (RAG) pattern, ie, This ensures that the answers are not only based on general AI knowledge but are also specifically tailored to Julia's ecosystem and best practices. -The "knowledge packs" are sourced from documentation sites and then processed with DocsScraper.jl. +The "knowledge packs" are sourced from documentation sites and then processed with [DocsScraper.jl](https://github.com/JuliaGenAI/DocsScraper.jl). > [!NOTE] > If you would like to set up an automated process to create a new knowledge pack for some package/organization, let us know! @@ -235,6 +235,9 @@ A: Tavily's API is used to search the best matching snippets from the documentat **Q: Can we use Ollama (locally-hosted) models?** A: Yes, see the Advanced section in the docs. +**Q: How can I build knowledge packs for my package(s)?** +A: Check out package [DocsScraper.jl](https://github.com/JuliaGenAI/DocsScraper.jl). It's what we use to build the knowledge packs loaded in this package! + ## Future Directions AIHelpMe is continuously evolving. Future updates may include: