refactor: Add prompt-specific logic to function

This commit is contained in:
Thomas A. Christensen II 2024-09-04 16:02:16 -05:00
parent 123e0537ee
commit 8fc627bc26
Signed by: millironx
GPG key ID: B7044A3432851F64

22
main.jl
View file

@ -28,6 +28,18 @@ end #for
# We will be offloading the analysis to Ollama running Llama3.1 locally
questions = [:Q8, :Q16, :Q29, :Q30]
function logged_prompt(prompt)
@info "Prompting Llama3.1 with \n```$prompt\n```\n"
response = aigenerate(
PromptingTools.OllamaSchema(),
prompt;
model="llama3.1",
api_kwargs=(; options=(; num_gpu=99))
).content
@info "Llama3.1 responsed with \n```\n$response\n```\n"
return response
end #function
for q in questions
analysis_prompt = """
The following is a list of answers to a survey with one response per paragraph:
@ -48,16 +60,8 @@ for q in questions
Summarize the common themes between the survey responses.
"""
@info "Prompting Llama3.1 with \n```\n$analysis_prompt\n```\n"
analysis_response = logged_prompt(analysis_prompt)
analysis_response = aigenerate(
PromptingTools.OllamaSchema(),
analysis_prompt;
model="llama3.1",
api_kwargs=(; options=(; num_gpu=99))
).content
@info "Llama3.1 responsed with \n```\n$analysis_response\n```\n"
end #for