Files
python_skripte/ollama_remote_query/ollama_prompt.py
jonnybravo 6678aed520
All checks were successful
test / build-docs (push) Successful in -1m26s
18082025
2025-08-18 16:08:22 +02:00

38 lines
858 B
Python
Executable File

#! /usr/bin/env python
from ollama import Client
import datetime
import os
output_folder = "output"
if not os.path.exists(output_folder):
os.makedirs(output_folder)
# Create_Timestamp
now = int(datetime.datetime.now().timestamp())
# res = int(now.timestamp())
# PROMT
prompt = input("Was möchtest du Fragen: ")
client = Client(
host='http://localhost:11434',
headers={'x-some-header': 'some-value'}
)
response = client.chat(
model='gemma3n:e2b',
messages=[
{
'role': 'user',
'content': prompt,
}],
stream=True)
for chunk in response:
print(chunk['message']["content"], end='', flush=True)
with open(str(output_folder + "/ollama_output_" + str(now) + ".md"), "a") as dm:
print(chunk['message']["content"], end='', flush=True, file=dm)
# with open("test.md", "w") as dm: