This commit is contained in:
38
ollama_remote_query/translate_func.py
Normal file
38
ollama_remote_query/translate_func.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# %% packages
|
||||
# from langchain_openai import ChatOpenAI
|
||||
import langchain_ollama
|
||||
from langchain_core.prompts import ChatPromptTemplate
|
||||
# from dotenv import load_dotenv
|
||||
from langchain_core.output_parsers import StrOutputParser
|
||||
# load_dotenv('.env')
|
||||
|
||||
|
||||
def translate_func(select_lang="Germany", target_lang="English", query_trans="prompt"):
|
||||
if query_trans == "prompt":
|
||||
query_trans = input("Was soll Übersetzt werden ? ")
|
||||
# %% set up prompt template
|
||||
prompt_template = ChatPromptTemplate.from_messages([
|
||||
("system", f"You are an AI assistant that translates {
|
||||
select_lang} into another language."),
|
||||
("user", "Translate this sentence: '{input}' into {target_language}"),
|
||||
])
|
||||
|
||||
# %% model
|
||||
model = langchain_ollama.llms.OllamaLLM(base_url='http://localhost:11434', model="gemma3n:e2b",
|
||||
temperature=0)
|
||||
|
||||
# %% chain
|
||||
chain = prompt_template | model | StrOutputParser()
|
||||
|
||||
# %% invoke chain
|
||||
res = chain.invoke({"input": query_trans,
|
||||
"target_language": target_lang})
|
||||
print(res)
|
||||
|
||||
# %%
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
select_lang = "Germany"
|
||||
target_lang = "English"
|
||||
translate_func(select_lang=target_lang, target_lang=select_lang)
|
||||
Reference in New Issue
Block a user