refactor: Add prompt-specific logic to function
This commit is contained in:
parent
123e0537ee
commit
8fc627bc26
1 changed files with 13 additions and 9 deletions
22
main.jl
22
main.jl
|
@ -28,6 +28,18 @@ end #for
|
||||||
# We will be offloading the analysis to Ollama running Llama3.1 locally
|
# We will be offloading the analysis to Ollama running Llama3.1 locally
|
||||||
questions = [:Q8, :Q16, :Q29, :Q30]
|
questions = [:Q8, :Q16, :Q29, :Q30]
|
||||||
|
|
||||||
|
function logged_prompt(prompt)
|
||||||
|
@info "Prompting Llama3.1 with \n```$prompt\n```\n"
|
||||||
|
response = aigenerate(
|
||||||
|
PromptingTools.OllamaSchema(),
|
||||||
|
prompt;
|
||||||
|
model="llama3.1",
|
||||||
|
api_kwargs=(; options=(; num_gpu=99))
|
||||||
|
).content
|
||||||
|
@info "Llama3.1 responsed with \n```\n$response\n```\n"
|
||||||
|
return response
|
||||||
|
end #function
|
||||||
|
|
||||||
for q in questions
|
for q in questions
|
||||||
analysis_prompt = """
|
analysis_prompt = """
|
||||||
The following is a list of answers to a survey with one response per paragraph:
|
The following is a list of answers to a survey with one response per paragraph:
|
||||||
|
@ -48,16 +60,8 @@ for q in questions
|
||||||
Summarize the common themes between the survey responses.
|
Summarize the common themes between the survey responses.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@info "Prompting Llama3.1 with \n```\n$analysis_prompt\n```\n"
|
analysis_response = logged_prompt(analysis_prompt)
|
||||||
|
|
||||||
analysis_response = aigenerate(
|
|
||||||
PromptingTools.OllamaSchema(),
|
|
||||||
analysis_prompt;
|
|
||||||
model="llama3.1",
|
|
||||||
api_kwargs=(; options=(; num_gpu=99))
|
|
||||||
).content
|
|
||||||
|
|
||||||
@info "Llama3.1 responsed with \n```\n$analysis_response\n```\n"
|
|
||||||
end #for
|
end #for
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue