From 5d578bd7149e411deac95fbc63bf7617230be810 Mon Sep 17 00:00:00 2001 From: A Noor <100anonyo@gmail.com> Date: Wed, 22 May 2024 07:38:20 -0400 Subject: [PATCH] update README.md --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index e443e89..c8dcf7a 100644 --- a/README.md +++ b/README.md @@ -355,11 +355,13 @@ with cria.Model() as ai: #### Running A Managed Subprocess (Reccomended) ```python + +# If it is the first time you start the program, ollama will start automatically +# If it is the second time (or subsequent times) you run the program, ollama will already be running + ai = cria.Cria(standalone=True, close_on_exit=False) prompt = "Who is the CEO of OpenAI?" -# Ollama will already be running. - with cria.Model("llama2") as llama2: response = llama2.generate("Who is the CEO of OpenAI?", stream=False) print(response) @@ -369,7 +371,7 @@ with cria.Model("llama3") as llama3: print(response) quit() -# Olama will keep running, and be used the next time this program starts. +# Despite exiting, olama will keep running, and be used the next time this program starts. ``` ### Formatting