refactor: Prompt Llama3 via PromptingTools
This commit is contained in:
parent
27086c408a
commit
123e0537ee
1 changed files with 6 additions and 3 deletions
9
main.jl
9
main.jl
|
@ -28,8 +28,6 @@ end #for
|
|||
# We will be offloading the analysis to Ollama running Llama3.1 locally
|
||||
questions = [:Q8, :Q16, :Q29, :Q30]
|
||||
|
||||
run(`ollama serve`)
|
||||
|
||||
for q in questions
|
||||
analysis_prompt = """
|
||||
The following is a list of answers to a survey with one response per paragraph:
|
||||
|
@ -52,7 +50,12 @@ for q in questions
|
|||
|
||||
@info "Prompting Llama3.1 with \n```\n$analysis_prompt\n```\n"
|
||||
|
||||
analysis_response = read(run(`ollama run llama3.1 $analysis_prompt`), String)
|
||||
analysis_response = aigenerate(
|
||||
PromptingTools.OllamaSchema(),
|
||||
analysis_prompt;
|
||||
model="llama3.1",
|
||||
api_kwargs=(; options=(; num_gpu=99))
|
||||
).content
|
||||
|
||||
@info "Llama3.1 responsed with \n```\n$analysis_response\n```\n"
|
||||
end #for
|
||||
|
|
Loading…
Reference in a new issue