Skip to content

Commit

Permalink
add example of OllamaGenerator (#170)
Browse files Browse the repository at this point in the history
* add example of OllamaGenerator

* fix example with ruff

* change example to reference the greatest politician of all time - Super Mario

* add comments on how to set up and expected output
  • Loading branch information
AlistairLR112 authored Jan 4, 2024
1 parent deb2f17 commit 03eca65
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 0 deletions.
55 changes: 55 additions & 0 deletions integrations/ollama/example/example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# In order to run this example, you will need to have an instance of Ollama running with the
# orca-mini model downloaded. We suggest you use the following commands to serve an orca-mini
# model from Ollama
#
# docker run -d -p 11434:11434 --name ollama ollama/ollama:latest
# docker exec ollama ollama pull orca-mini

from haystack import Document, Pipeline
from haystack.components.builders.prompt_builder import PromptBuilder
from haystack.components.retrievers import InMemoryBM25Retriever
from haystack.document_stores.in_memory import InMemoryDocumentStore

from ollama_haystack import OllamaGenerator

document_store = InMemoryDocumentStore()
document_store.write_documents(
[
Document(content="Super Mario was an important politician"),
Document(content="Mario owns several castles and uses them to conduct important political business"),
Document(
content="Super Mario was a successful military leader who fought off several invasion attempts by "
"his arch rival - Bowser"
),
]
)

query = "Who is Super Mario?"

template = """
Given only the following information, answer the question.
Ignore your own knowledge.
Context:
{% for document in documents %}
{{ document.content }}
{% endfor %}
Question: {{ query }}?
"""
pipe = Pipeline()

pipe.add_component("retriever", InMemoryBM25Retriever(document_store=document_store))
pipe.add_component("prompt_builder", PromptBuilder(template=template))
pipe.add_component("llm", OllamaGenerator(model="orca-mini"))
pipe.connect("retriever", "prompt_builder.documents")
pipe.connect("prompt_builder", "llm")

response = pipe.run({"prompt_builder": {"query": query}, "retriever": {"query": query}})

print(response["llm"]["replies"])
# An expected response - the output is not deterministic:
# ['Based on the information provided, Super Mario is a successful military leader who fought
# off several invasion attempts by his arch rival - Bowser. He is also an important politician and owns several
# castles where he conducts political business. ' 'Therefore, it can be inferred that Super Mario is a combination of
# both a military leader and an important politician.']
2 changes: 2 additions & 0 deletions integrations/ollama/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,8 @@ ban-relative-imports = "all"
[tool.ruff.per-file-ignores]
# Tests can use magic values, assertions, and relative imports
"tests/**/*" = ["PLR2004", "S101", "TID252"]
# Examples can print their output
"example/**" = ["T201"]


[tool.coverage.run]
Expand Down

0 comments on commit 03eca65

Please sign in to comment.