wip: Make Ollama offloading step (broken)

This commit is contained in:
Thomas A. Christensen II 2024-09-04 15:45:48 -05:00
parent cc3d2908ed
commit 2f9d4ed08b
Signed by: millironx
GPG key ID: B7044A3432851F64

35
main.jl
View file

@ -23,8 +23,41 @@ end #for
# infer types # infer types
# deleteat!(survey_data, [1,2]) # deleteat!(survey_data, [1,2])
# Compile comments from all requested questions # Compile comments from all questions and analyze
# We will be offloading the analysis to Ollama running Llama3.1 locally
questions = [:Q8, :Q16, :Q29, :Q30] questions = [:Q8, :Q16, :Q29, :Q30]
run(`ollama serve`)
for q in questions
analysis_prompt = """
The following is a list of answers to a survey with one response per paragraph:
# Antimicrobial usage survey open-ended question: $q
$(
join(
[
i == 1 ? "**$a**\n" : "$a\n" for (i, a) in enumerate(skipmissing(survey_data[!, q]))
],
'\n'
)
)
---
Summarize the common themes between the survey responses.
"""
@info "Prompting Llama3.1 with \n```\n$analysis_prompt\n```\n"
analysis_response = read(run(`ollama run llama3.1 $analysis_prompt`), String)
@info "Llama3.1 responsed with \n```\n$analysis_response\n```\n"
end #for
# Compile comments from all requested questions
for q in questions for q in questions
open("$q.md", "w") do f open("$q.md", "w") do f
write(f, "# Antimicrobial usage survey open-ended question: $q\n\n") write(f, "# Antimicrobial usage survey open-ended question: $q\n\n")