This commit is contained in:
37
ollama_remote_query/ollama_prompt.py
Executable file
37
ollama_remote_query/ollama_prompt.py
Executable file
@@ -0,0 +1,37 @@
|
||||
#! /usr/bin/env python
|
||||
from ollama import Client
|
||||
import datetime
|
||||
import os
|
||||
|
||||
output_folder = "output"
|
||||
|
||||
if not os.path.exists(output_folder):
|
||||
os.makedirs(output_folder)
|
||||
|
||||
# Create_Timestamp
|
||||
now = int(datetime.datetime.now().timestamp())
|
||||
# res = int(now.timestamp())
|
||||
|
||||
# PROMT
|
||||
prompt = input("Was möchtest du Fragen: ")
|
||||
|
||||
client = Client(
|
||||
host='http://localhost:11434',
|
||||
headers={'x-some-header': 'some-value'}
|
||||
)
|
||||
|
||||
response = client.chat(
|
||||
model='gemma3n:e2b',
|
||||
messages=[
|
||||
{
|
||||
'role': 'user',
|
||||
'content': prompt,
|
||||
}],
|
||||
stream=True)
|
||||
|
||||
for chunk in response:
|
||||
print(chunk['message']["content"], end='', flush=True)
|
||||
with open(str(output_folder + "/ollama_output_" + str(now) + ".md"), "a") as dm:
|
||||
print(chunk['message']["content"], end='', flush=True, file=dm)
|
||||
|
||||
# with open("test.md", "w") as dm:
|
||||
Reference in New Issue
Block a user