This commit is contained in:
37
ollama_remote_query/ollama_prompt.py
Executable file
37
ollama_remote_query/ollama_prompt.py
Executable file
@@ -0,0 +1,37 @@
|
||||
#! /usr/bin/env python
|
||||
from ollama import Client
|
||||
import datetime
|
||||
import os
|
||||
|
||||
output_folder = "output"
|
||||
|
||||
if not os.path.exists(output_folder):
|
||||
os.makedirs(output_folder)
|
||||
|
||||
# Create_Timestamp
|
||||
now = int(datetime.datetime.now().timestamp())
|
||||
# res = int(now.timestamp())
|
||||
|
||||
# PROMT
|
||||
prompt = input("Was möchtest du Fragen: ")
|
||||
|
||||
client = Client(
|
||||
host='http://localhost:11434',
|
||||
headers={'x-some-header': 'some-value'}
|
||||
)
|
||||
|
||||
response = client.chat(
|
||||
model='gemma3n:e2b',
|
||||
messages=[
|
||||
{
|
||||
'role': 'user',
|
||||
'content': prompt,
|
||||
}],
|
||||
stream=True)
|
||||
|
||||
for chunk in response:
|
||||
print(chunk['message']["content"], end='', flush=True)
|
||||
with open(str(output_folder + "/ollama_output_" + str(now) + ".md"), "a") as dm:
|
||||
print(chunk['message']["content"], end='', flush=True, file=dm)
|
||||
|
||||
# with open("test.md", "w") as dm:
|
||||
6
ollama_remote_query/output/ollama_output_1755524506.md
Normal file
6
ollama_remote_query/output/ollama_output_1755524506.md
Normal file
@@ -0,0 +1,6 @@
|
||||
Okay, I'm exiting. Have a good one!
|
||||
|
||||
If you need anything in the future, feel free to ask. 😊
|
||||
|
||||
|
||||
|
||||
38
ollama_remote_query/translate_func.py
Normal file
38
ollama_remote_query/translate_func.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# %% packages
|
||||
# from langchain_openai import ChatOpenAI
|
||||
import langchain_ollama
|
||||
from langchain_core.prompts import ChatPromptTemplate
|
||||
# from dotenv import load_dotenv
|
||||
from langchain_core.output_parsers import StrOutputParser
|
||||
# load_dotenv('.env')
|
||||
|
||||
|
||||
def translate_func(select_lang="Germany", target_lang="English", query_trans="prompt"):
|
||||
if query_trans == "prompt":
|
||||
query_trans = input("Was soll Übersetzt werden ? ")
|
||||
# %% set up prompt template
|
||||
prompt_template = ChatPromptTemplate.from_messages([
|
||||
("system", f"You are an AI assistant that translates {
|
||||
select_lang} into another language."),
|
||||
("user", "Translate this sentence: '{input}' into {target_language}"),
|
||||
])
|
||||
|
||||
# %% model
|
||||
model = langchain_ollama.llms.OllamaLLM(base_url='http://localhost:11434', model="gemma3n:e2b",
|
||||
temperature=0)
|
||||
|
||||
# %% chain
|
||||
chain = prompt_template | model | StrOutputParser()
|
||||
|
||||
# %% invoke chain
|
||||
res = chain.invoke({"input": query_trans,
|
||||
"target_language": target_lang})
|
||||
print(res)
|
||||
|
||||
# %%
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
select_lang = "Germany"
|
||||
target_lang = "English"
|
||||
translate_func(select_lang=target_lang, target_lang=select_lang)
|
||||
Reference in New Issue
Block a user