LLama Chat¶
Name |
URL |
---|---|
Ollama |
|
Ollama Python |
import ollama
import streamlit as st
Print banner
@st.cache_data
def print_banner():
print("""
/$$ /$$ /$$ /$$
| $$| $$ | $$ | $$
| $$| $$ /$$$$$$ /$$$$$$/$$$$ /$$$$$$ /$$$$$$$| $$$$$$$ /$$$$$$ /$$$$$$
| $$| $$ |____ $$| $$_ $$_ $$ |____ $$ /$$$$$$ /$$_____/| $$__ $$ |____ $$|_ $$_/
| $$| $$ /$$$$$$$| $$ \\ $$ \\ $$ /$$$$$$$|______/| $$ | $$ \\ $$ /$$$$$$$ | $$
| $$| $$ /$$__ $$| $$ | $$ | $$ /$$__ $$ | $$ | $$ | $$ /$$__ $$ | $$ /$$
| $$| $$| $$$$$$$| $$ | $$ | $$| $$$$$$$ | $$$$$$$| $$ | $$| $$$$$$$ | $$$$/
|__/|__/ \\_______/|__/ |__/ |__/ \\_______/ \\_______/|__/ |__/ \\_______/ \\___/
""")
return 1
print_banner()
Call LLama
question = st.text_input("Question")
if st.button(':thinking_face: Call LLama'):
response = ollama.chat(model='llama3.2', messages=[
{
'role': 'user',
'content': question,
},
])
st.write(response.message.content)