Compare commits

...

19 Commits

Author SHA1 Message Date
6678aed520 18082025
All checks were successful
test / build-docs (push) Successful in -1m26s
2025-08-18 16:08:22 +02:00
a692ac8b05 current
All checks were successful
test / build-docs (push) Successful in -19s
2025-07-10 21:03:06 +02:00
11a0aa2d89 try
All checks were successful
test / build-docs (push) Successful in -11s
2025-02-04 10:07:52 +01:00
39fd46028f test
All checks were successful
test / build-docs (push) Successful in 1s
2025-02-03 20:12:20 +01:00
0107b7add7 try
All checks were successful
test / build-docs (push) Successful in 1s
2025-02-03 15:24:23 +01:00
5ee723e476 Dtry
All checks were successful
test / build-docs (push) Successful in 1s
2025-02-03 14:00:34 +01:00
85927ef3d9 try
All checks were successful
test / build-docs (push) Successful in 1s
2025-02-03 13:59:28 +01:00
80baf62cc0 test action
All checks were successful
test / build-docs (push) Successful in 1s
2025-02-03 13:58:11 +01:00
f1fa3c6b36 try
Some checks are pending
test / build-docs (push) Waiting to run
2025-02-03 13:56:52 +01:00
a9c6e6661f try 2025-02-03 13:53:55 +01:00
0c6e438b39 test 2025-02-03 13:52:28 +01:00
151e9c8ed3 try 2025-02-03 13:50:25 +01:00
2b56f1b6e1 try
Some checks failed
test / build-docs (push) Failing after 9s
2025-02-03 13:48:35 +01:00
3b90e814fc hh 2025-02-03 13:45:29 +01:00
d9c4c64ea7 try 2025-02-03 13:44:05 +01:00
eb04b48e72 try 2025-02-03 13:39:38 +01:00
da594e8e8c try 2025-02-03 13:35:49 +01:00
34a375c50d all 2024-11-25 17:23:00 +01:00
69295768d5 try 2024-10-09 18:01:24 +02:00
40 changed files with 5454 additions and 115 deletions

View File

@@ -0,0 +1,22 @@
name: test
on:
- pull_request
- push
jobs:
build-docs:
runs-on: shell
steps:
- name: Run ansible Version
shell: bash
run: ansible --version
- name: Show all Vars
shell: bash
run: env
- name: Run 2
shell: bash
run: echo "Hello World"
- name: Test User
shell: bash
run: echo "Ich bin User $USER"

31
SSH_Agent/main.py Normal file → Executable file
View File

@@ -1,21 +1,24 @@
#! /usr/bin/env python3.12
import subprocess, os
#! /usr/bin/env python3.13
import os
import subprocess
def import_ssh_keys_to_agent(privat_key = str):
def import_ssh_keys_to_agent(privat_key=str):
try:
run_add_key = subprocess.run(
[ "/usr/bin/ssh-add","-q", privat_key],
["/usr/bin/ssh-add", "-q", privat_key],
shell=False,
text=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
timeout=10,)
timeout=10,
)
except subprocess.TimeoutExpired:
print("\n","Timeout, No Import :", privat_key)
print("\n", "Timeout, No Import :", privat_key)
return False
# print(run_add_key)
# print(run_add_key)
if run_add_key.returncode == 0:
return True
else:
@@ -26,30 +29,30 @@ def check_key_exist(ssh_pub=str):
if not os.path.exists(ssh_pub):
return False
run_check_key = subprocess.run(
[ "/usr/bin/ssh-add","-L"],
["/usr/bin/ssh-add", "-L"],
shell=False,
text=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,)
stderr=subprocess.PIPE,
)
list_agent_pubs = str(run_check_key.stdout,encoding="utf-8").splitlines()
list_agent_pubs = str(run_check_key.stdout, encoding="utf-8").splitlines()
read_input_pub = open(ssh_pub)
READ_FILE_PUB = read_input_pub.read()
count=0
count = 0
for pub in list_agent_pubs:
count = count +1
count = count + 1
if READ_FILE_PUB == pub + "\n":
return True
return False
if __name__ == "__main__":
ssh_keys = [
"/home/jonnybravo/.ssh/ansible-test",
"/home/jonnybravo/.ssh/blu",
"/home/jonnybravo/.ssh/gitea",
"/home/jonnybravo/.ssh/gitlll",
"/home/jonnybravo/.ssh/gitlab_ed25519",
]
for add_key in ssh_keys:

19
check_git.py Normal file
View File

@@ -0,0 +1,19 @@
import subprocess, os
def git_test(git_file=str, git_root=str):
return subprocess.Popen(
"git -C {git_root} log -1 --pretty=format:%h:%ct {git_file}".format(
git_file=git_file,
git_root=git_root),
shell=True,
universal_newlines=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
).communicate()
if __name__ == "__main__":
print(git_test(git_file="/home/jonnybravo/Projekte/Python_Skripte/jenkins/use_jenkins_api.py", git_root="/home/jonnybravo/Projekte/Python_Skripte")[0])
#git log -1 --pretty=format:%h:%c /home/jonnybravo/Projekte/Python_Skripte/jenkins/use_jenkins_api.py

32
create_ps/create_ps.py Executable file
View File

@@ -0,0 +1,32 @@
#! /usr/bin/env python
import string
import secrets
def create_password(long=int(10), sonder="!#$%&()*+-/:;<=>?@[]_{|}"):
print("Sonderzeichen:", sonder)
alle = string.ascii_letters + string.digits + sonder
while True:
tx = ""
anz = [0, 0, 0, 0]
for i in range(long):
zeichen = secrets.choice(alle)
tx += zeichen
if zeichen in string.ascii_lowercase:
anz[0] += 1
elif zeichen in string.ascii_uppercase:
anz[1] += 1
elif zeichen in string.digits:
anz[2] += 1
else:
anz[3] += 1
# print("Anzahl:", anz)
if 0 not in anz:
break
return tx
if __name__ == "__main__":
print(create_password(long=20))

View File

@@ -0,0 +1 @@

23
imdb_parsen/main.py Normal file
View File

@@ -0,0 +1,23 @@
# print(the_matrix.get('tech'))
from imdb import Cinemagoer
# create an instance of the Cinemagoer class
ia = Cinemagoer()
# get a movie
movie = ia.get_movie('0133093')
# print the names of the directors of the movie
print('Directors:')
for director in movie['directors']:
print(director['name'])
# print the genres of the movie
print('Genres:')
for genre in movie['genres']:
print(genre)
# search for a person name
people = ia.search_person('Mel Gibson')
for person in people:
print(person.personID, person['name'])

48
imdb_parsen/test.py Normal file
View File

@@ -0,0 +1,48 @@
from bs4 import BeautifulSoup
import requests
import re
import pandas as pd
# Downloading imdb top 250 movie's data
url = 'http://www.imdb.com/chart/top'
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")
movies = soup.select('td.titleColumn')
crew = [a.attrs.get('title') for a in soup.select('td.titleColumn a')]
ratings = [b.attrs.get('data-value')
for b in soup.select('td.posterColumn span[name=ir]')]
# create a empty list for storing
# movie information
list = []
# Iterating over movies to extract
# each movie's details
for index in range(0, len(movies)):
# Separating movie into: 'place',
# 'title', 'year'
movie_string = movies[index].get_text()
movie = (' '.join(movie_string.split()).replace('.', ''))
movie_title = movie[len(str(index))+1:-7]
# year = re.search('\((.*?)\)', movie_string).group(1)
place = movie[:len(str(index))-(len(movie))]
data = {"place": place,
"movie_title": movie_title,
"rating": ratings[index],
# "year": year,
"star_cast": crew[index],
}
list.append(data)
# printing movie details with its rating.
for movie in list:
print(movie['place'], '-', movie['movie_title'],
'Starring:', movie['star_cast'], movie['rating'])
## .......##
df = pd.DataFrame(list)
df.to_csv('imdb_top_250_movies.csv', index=False)

View File

@@ -3,6 +3,9 @@
import os, sys, subprocess
class nfs_server_conf:
"""
Diese Class installiert nur einen NFS Server mit Systemd voraussetzung ist das nsfv4 installiert ist
"""
def __init__(self, nfs_srv_folders = ["/nfsroot/publicnfs","/nfsroot/datennfs"], nfs_config_file = "/etc/exports", allow_network = "192.168.50.0/25") -> None:
if not os.geteuid()==0:
raise PermissionError("Sie sind kein Root")

View File

@@ -0,0 +1,75 @@
import os
import systemd.daemon
from typing import List
class NFSServerConf:
def __init__(self, nfs_srv_folders: List[str] = ["/nfsroot/publicnfs", "/nfsroot/datennfs"],
nfs_config_file: str = "/etc/exports", allow_network: str = "192.168.50.0/25"):
self._validate_input(nfs_srv_folders)
self._validate_input(nfs_config_file)
self.nfs_srv_folders = nfs_srv_folders
self.nfs_config_file = nfs_config_file
self.allow_network = allow_network
def _validate_input(self, input_value: str) -> None:
if not isinstance(input_value, str):
raise ValueError(f"Input muss eine Zeichenkette sein. ({type(input_value).__name__} wurde verwendet.)")
if not os.path.exists(input_value):
raise ValueError(f"Datei {input_value} existiert nicht.")
def mount_serverfolder(self) -> None:
# Durchsuche das angegebene Verzeichnis und seine Unterordner nach Dateien und erstelle eine Liste von Dateipfaden
server_folder_list = []
for folder in self.nfs_srv_folders:
files_in_folder = [os.path.join(folder, file) for file in os.listdir(folder)]
server_folder_list.extend(files_in_folder)
# Erstelle für jedes Verzeichnis in nfs_srv_folders ein Unit-File im Format .mount in /etc/systemd/system/
for folder in self.nfs_srv_folders:
mount_unit_file = f"/etc/systemd/system/{os.path.basename(folder)}.mount"
with open(mount_unit_file, "w") as file:
file.write("[Unit]\n")
file.write(f"Description=Mount {folder}\n")
file.write("[Mount]\n")
file.write(f"Where={folder}\n")
file.write("What=/mnt/nfs\n")
# Starte und ermögliche den Dienst für jedes Unit-File
systemd.daemon.notify(systemd.daemon.DaemonReload)
def nfs_server_conf(self) -> None:
# Durchsuche die Konfiguration des NFS-Servers (im Standardfall /etc/exports) nach Verzeichnissen
with open(self.nfs_config_file, "r") as file:
config_content = file.readlines()
# Zähle, wie oft jeder Verzeichnis-Pfad in der Konfiguration vorkommt
folder_count = {}
for line in config_content:
if ":" in line and not line.startswith("#"):
folder_path = line.split(":")[0].strip()
if folder_path not in folder_count:
folder_count[folder_path] = 1
else:
folder_count[folder_path] += 1
# Füge für jeden Verzeichnis-Pfad hinzu, der nicht bereits in der Konfiguration vorhanden ist, einen neuen Eintrag in die Konfiguration
for folder in self.nfs_srv_folders:
if folder not in [line.strip() for line in config_content]:
with open(self.nfs_config_file, "a") as file:
file.write(f"{folder}({self.allow_network})\n")
def start_nfs_server(self) -> None:
# Starte und ermögliche den Dienst für den NFS-Server (im Standardfall nfsv4-server.service)
systemd.daemon.notify(systemd.daemon.DaemonReload)
def main_install():
nfsserverconf = NFSServerConf()
nfsserverconf.mount_serverfolder()
nfsserverconf.nfs_server_conf()
nfsserverconf.start_nfs_server()
if __name__ == "__main__":
main_install()

View File

@@ -0,0 +1,7 @@
Folgende Punkte
- erstellen eine SQlite DB
- Die DB soll folgende Tabellen haben Genre, MyList,
- Die Anzeige soll über Flask angezeigt werden
Weitere Schritte folgen !!!

View File

@@ -0,0 +1,69 @@
import DBcm
def create_movie_database(db_name="movie_db.db"):
create_my_list = """
create table if not exists movie_list (
id integer not null primary key autoincrement,
titel varchar(64) not null,
genre_id integer not null,
regie_id integer not null
)
"""
create_genre = """
create table if not exists genre (
id integer not null primary key autoincrement,
name varchar(64) not null
)
"""
create_regie = """
create table if not exists regie (
id integer not null primary key autoincrement,
surname varchar(64) not null,
lastname varchr(64) not null
)
"""
create_medium = """
create table if not exists medium (
id integer not null primary key autoincrement,
medium varchar(64) not null
)
"""
with DBcm.UseDatabase(db_name) as db:
db.execute(create_my_list)
db.execute(create_genre)
db.execute(create_regie)
db.execute(create_medium)
def all_genres(db_name="movie_db.db"):
ALL_GENRE = "SELECT * from genre"
with DBcm.UseDatabase(db_name) as db:
db.execute(ALL_GENRE)
all_genre = [i[1] for i in db.fetchall()]
return all_genre
def search_genre_id(db_name="movie_db.db", genre_name=str):
GENRE_QUERY = """
select id from genre
where name = ?
"""
try:
with DBcm.UseDatabase(db_name) as db:
db.execute(GENRE_QUERY, (genre_name,))
genre_id = db.fetchone()[0]
return int(genre_id)
except:
return int(0)
if __name__ == "__main__":
create_movie_database()
print(all_genres())
print(search_genre_id(genre_name="war"))

View File

@@ -0,0 +1,534 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "5263a987-da36-46d7-a2e7-d0658cda09c1",
"metadata": {},
"outputs": [],
"source": [
"import DBcm"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "3f6b0763-4106-4bfa-9aef-7afbd180c6d4",
"metadata": {},
"outputs": [],
"source": [
"db_name = \"movie_db.db\"\n",
"\n",
"create_my_list = \"\"\"\n",
" create table if not exists movie_list (\n",
" id integer not null primary key autoincrement,\n",
" titel varchar(64) not null,\n",
" genre_id integer not null,\n",
" regie_id integer not null\n",
" \n",
" )\n",
"\n",
"\"\"\"\n",
"\n",
"create_genre = \"\"\"\n",
" create table if not exists genre (\n",
" id integer not null primary key autoincrement,\n",
" name varchar(64) not null\n",
" )\n",
"\"\"\"\n",
"\n",
"create_regie = \"\"\"\n",
" create table if not exists regie (\n",
" id integer not null primary key autoincrement,\n",
" surname varchar(64) not null,\n",
" lastname varchr(64) not null\n",
" )\n",
"\"\"\"\n",
"\n",
"\n",
"with DBcm.UseDatabase(db_name) as db: \n",
" db.execute(create_my_list)\n",
" db.execute(create_genre)\n",
" db.execute(create_regie)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "376ef812-97b5-45ba-8ef1-eb8d7829494a",
"metadata": {},
"outputs": [],
"source": [
"# ADDed Genre values\n",
"\n",
"ADD_GENRE_VALUE = \"\"\"\n",
" INSERT INTO genre(name)\n",
" SELECT ?\n",
" WHERE NOT EXISTS (SELECT 1 FROM genre WHERE name = ?);\n",
" \"\"\"\n",
"\n",
"with open(\"genre_list\", \"r\") as fs: \n",
" for genre_value in fs.readlines():\n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(ADD_GENRE_VALUE, (genre_value.strip(), genre_value.strip()))\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 27,
"id": "63b16a41-88bf-4832-a26c-09180832f597",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['action', 'adventure', 'animation', 'biography', 'comedy', 'crime', 'cult movie', 'disney', 'documentary', 'drama', 'erotic', 'family', 'fantasy', 'film-noir', 'gangster', 'gay and lesbian', 'history', 'horror', 'military', 'music', 'musical', 'mystery', 'nature', 'neo-noir', 'period', 'pixar', 'road movie', 'romance', 'sci-fi', 'short', 'spy', 'super hero', 'thriller', 'visually stunning', 'war', 'western']\n"
]
}
],
"source": [
"def all_genres():\n",
" ALL_GENRE = \"\"\"\n",
" SELECT * from genre \n",
" \"\"\" \n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(ALL_GENRE)\n",
" all_genre = [i[1] for i in db.fetchall()]\n",
" \n",
" return all_genre\n",
"\n",
"print(all_genres())\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 36,
"id": "89b20b5f-34aa-4490-a4c0-c186c9fa30bd",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1\n",
"36\n"
]
}
],
"source": [
"def search_genre_id(genre_name):\n",
" GENRE_QUERY = \"\"\"\n",
" select id from genre\n",
" where name = ?\n",
" \"\"\"\n",
" try:\n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(GENRE_QUERY,(genre_name,))\n",
" genre_id = db.fetchone()[0]\n",
" return int(genre_id)\n",
" except:\n",
" return int(0)\n",
"\n",
"\n",
"def search_medium_id(genre_name):\n",
" GENRE_QUERY = \"\"\"\n",
" select id from genre\n",
" where medium = ?\n",
" \"\"\"\n",
" try:\n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(GENRE_QUERY,(genre_name,))\n",
" genre_id = db.fetchone()[0]\n",
" return int(genre_id)\n",
" except:\n",
" return int(0)\n",
"\n",
"print(search_genre_id(genre_name=\"action\"))\n",
"print(search_genre_id(genre_name=\"western\"))"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "c70d91a5-7855-465d-a4a6-daebc065ee37",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0-John-Smith\n",
"1-James-Johnson\n",
"2-William-Williams\n",
"3-Michael-Brown\n",
"4-David-Davis\n",
"5-Richard-Miller\n",
"6-Joseph-Wilson\n",
"7-Charles-Moore\n",
"8-Thomas-Taylor\n",
"9-Daniel-Anderson\n",
"10-Paul-Thomas\n",
"11-Mark-Jackson\n",
"12-Donna-White\n",
"13-Michelle-Harris\n",
"14-Laura-Martin\n",
"15-Sara-Thompson\n",
"16-Ana-Garcia\n",
"17-Carlos-Rodriguez\n",
"18-Maria-Martinez\n",
"19-Jose-Hernandez\n",
"20-Luis-Lopez\n",
"21-Rosa-Gonzalez\n",
"22-Pedro-Perez\n",
"23-Miguel-Sanchez\n",
"24-Juan-Ramirez\n",
"25-Ana-Flores\n",
"26-Isabella-Cruz\n",
"27-Victor-Rivera\n",
"28-Kevin-Lee\n",
"29-Brian-Walker\n",
"30-Emily-Hall\n",
"31-Ryan-Allen\n",
"32-Aaron-Young\n",
"33-Jeffrey-King\n",
"34-Joshua-Wright\n",
"35-Brandon-Scott\n",
"36-Frank-Turner\n",
"37-Gregory-Carter\n",
"38-Samuel-Phillips\n",
"39-Chris-Evans\n",
"40-Anthony-Collins\n",
"41-Eric-Stewart\n",
"42-Frank-Snyder\n",
"43-Thomas-Baker\n",
"44-Jeremy-Nelson\n",
"45-Steven-Roberts\n",
"46-Edward-Campbell\n",
"47-Ryan-Miller\n",
"48-Jacob-Davis\n",
"49-David-Garcia\n",
"50-Sophia-Rodriguez\n",
"51-Emma-Martinez\n",
"52-Noah-Hernandez\n",
"53-Ava-Lopez\n",
"54-Ethan-Gonzalez\n",
"55-Mia-Perez\n",
"56-William-Sanchez\n",
"57-James-Ramirez\n",
"58-Olivia-Flores\n",
"59-Lucas-Cruz\n",
"60-Isabella-Rivera\n",
"61-David-Lee\n",
"62-Sophie-Walker\n",
"63-Matthew-Hall\n",
"64-Emma-Allen\n",
"65-Ryan-Young\n",
"66-Ava-King\n",
"67-Ethan-Wright\n",
"68-Mia-Scott\n",
"69-William-Turner\n",
"70-James-Carter\n",
"71-Olivia-Phillips\n",
"72-Lucas-Evans\n",
"73-Sophie-Collins\n",
"74-Noah-Stewart\n",
"75-Ava-Snyder\n",
"76-Ethan-Baker\n",
"77-Mia-Nelson\n",
"78-Noah-Roberts\n",
"79-Emma-Campbell\n",
"80-William-Miller\n",
"81-James-Davis\n",
"82-Olivia-Garcia\n",
"83-Lucas-Rodriguez\n",
"84-Sophie-Martinez\n",
"85-Noah-Hernandez\n",
"86-Ava-Lopez\n",
"87-Ethan-Gonzalez\n",
"88-Mia-Perez\n",
"89-William-Sanchez\n",
"90-James-Ramirez\n",
"91-Olivia-Flores\n",
"92-Lucas-Cruz\n",
"93-Isabella-Rivera\n",
"94-David-Lee\n",
"95-Sophie-Walker\n",
"96-Matthew-Hall\n",
"97-Emma-Allen\n",
"98-Ryan-Young\n",
"99-Ava-King\n",
"100-Ethan-Wright\n",
"101-Mia-Scott\n",
"102-William-Turner\n",
"103-James-Carter\n",
"104-Olivia-Phillips\n",
"105-Lucas-Evans\n",
"106-Sophie-Collins\n",
"107-Noah-Stewart\n",
"108-Ava-Snyder\n",
"109-Ethan-Baker\n",
"110-Mia-Nelson\n",
"111-Noah-Roberts\n",
"112-Emma-Campbell\n",
"113-William-Miller\n",
"114-James-Davis\n",
"115-Olivia-Garcia\n",
"116-Lucas-Rodriguez\n",
"117-Sophie-Martinez\n",
"118-Noah-Hernandez\n",
"119-Ava-Lopez\n",
"120-Ethan-Gonzalez\n",
"121-Mia-Perez\n",
"122-William-Sanchez\n",
"123-James-Ramirez\n",
"124-Olivia-Flores\n",
"125-Lucas-Cruz\n",
"126-Isabella-Rivera\n",
"127-David-Lee\n",
"128-Sophie-Walker\n",
"129-Matthew-Hall\n",
"130-Emma-Allen\n",
"131-Ryan-Young\n",
"132-Ava-King\n",
"133-Ethan-Wright\n",
"134-Mia-Scott\n",
"135-William-Turner\n",
"136-James-Carter\n",
"137-Olivia-Phillips\n",
"138-Lucas-Evans\n",
"139-Sophie-Collins\n",
"140-Noah-Stewart\n",
"141-Ava-Snyder\n",
"142-Ethan-Baker\n",
"143-Mia-Nelson\n",
"144-Noah-Roberts\n",
"145-Emma-Campbell\n",
"146-William-Miller\n",
"147-James-Davis\n",
"148-Olivia-Garcia\n",
"149-Lucas-Rodriguez\n",
"150-Sophie-Martinez\n",
"151-Noah-Hernandez\n",
"152-Ava-Lopez\n",
"153-Ethan-Gonzalez\n",
"154-Mia-Perez\n",
"155-William-Sanchez\n",
"156-James-Ramirez\n",
"157-Olivia-Flores\n",
"158-Lucas-Cruz\n",
"159-Isabella-Rivera\n",
"160-David-Lee\n",
"161-Sophie-Walker\n",
"162-Matthew-Hall\n",
"163-Emma-Allen\n",
"164-Ryan-Young\n",
"165-Ava-King\n",
"166-Ethan-Wright\n",
"167-Mia-Scott\n",
"168-William-Turner\n",
"169-James-Carter\n",
"170-Olivia-Phillips\n",
"171-Lucas-Evans\n",
"172-Sophie-Collins\n",
"173-Noah-Stewart\n",
"174-Ava-Snyder\n",
"175-Ethan-Baker\n",
"176-Mia-Nelson\n",
"177-Noah-Roberts\n",
"178-Emma-Campbell\n",
"179-William-Miller\n",
"180-James-Davis\n",
"181-Olivia-Garcia\n",
"182-Lucas-Rodriguez\n",
"183-Sophie-Martinez\n",
"184-Noah-Hernandez\n",
"185-Ava-Lopez\n",
"186-Ethan-Gonzalez\n",
"187-Mia-Perez\n",
"188-William-Sanchez\n",
"189-James-Ramirez\n",
"190-Olivia-Flores\n",
"191-Lucas-Cruz\n",
"192-Isabella-Rivera\n",
"193-David-Lee\n",
"194-Sophie-Walker\n",
"195-Matthew-Hall\n",
"196-Emma-Allen\n",
"197-Ryan-Young\n",
"198-Ava-King\n",
"199-Ethan-Wright\n",
"200-Mia-Scott\n",
"201-William-Turner\n",
"202-James-Carter\n",
"203-Olivia-Phillips\n",
"204-Lucas-Evans\n",
"205-Sophie-Collins\n",
"206-Noah-Stewart\n",
"207-Ava-Snyder\n",
"208-Ethan-Baker\n",
"209-Mia-Nelson\n",
"210-Noah-Roberts\n",
"211-Emma-Campbell\n",
"212-William-Miller\n",
"213-James-Davis\n",
"214-Olivia-Garcia\n",
"215-Lucas-Rodriguez\n",
"216-Sophie-Martinez\n",
"217-Noah-Hernandez\n",
"218-Ava-Lopez\n",
"219-Ethan-Gonzalez\n",
"220-Mia-Perez\n",
"221-William-Sanchez\n",
"222-James-Ramirez\n",
"223-Olivia-Flores\n",
"224-Lucas-Cruz\n",
"225-Isabella-Rivera\n",
"226-David-Lee\n",
"227-Sophie-Walker\n",
"228-Matthew-Hall\n",
"229-Emma-Allen\n",
"230-Ryan-Young\n",
"231-Ava-King\n",
"232-Ethan-Wright\n",
"233-Mia-Scott\n",
"234-William-Turner\n",
"235-James-Carter\n",
"236-Olivia-Phillips\n",
"237-Lucas-Evans\n",
"238-Sophie-Collins\n",
"239-Noah-Stewart\n",
"240-Ava-Snyder\n",
"241-Ethan-Baker\n",
"242-Mia-Nelson\n",
"243-Noah-Roberts\n",
"244-Emma-Campbell\n",
"245-William-Miller\n",
"246-James-Davis\n",
"247-Olivia-Garcia\n",
"248-Lucas-Rodriguez\n",
"249-Sophie-Martinez\n",
"250-Noah-Hernandez\n",
"251-Ava-Lopez\n",
"252-Ethan-Gonzalez\n",
"253-Mia-Perez\n",
"254-William-Sanchez\n",
"255-James-Ramirez\n",
"256-Olivia-Flores\n",
"257-Lucas-Cruz\n",
"258-Isabella-Rivera\n",
"259-David-Lee\n",
"260-Sophie-Walker\n",
"261-Matthew-Hall\n",
"262-Emma-Allen\n",
"263-Ryan-Young\n",
"264-Ava-King\n",
"265-Ethan-Wright\n",
"266-Mia-Scott\n",
"267-William-Turner\n",
"268-James-Carter\n",
"269-Olivia-Phillips\n",
"270-Lucas-Evans\n",
"271-Sophie-Collins\n",
"272-Noah-Stewart\n",
"273-Ava-Snyder\n",
"274-Ethan-Baker\n",
"275-Mia-Nelson\n",
"276-Noah-Roberts\n",
"277-Emma-Campbell\n",
"278-William-Miller\n",
"279-James-Davis\n",
"280-Olivia-Garcia\n",
"281-Lucas-Rodriguez\n",
"282-Sophie-Martinez\n",
"283-Noah-Hernandez\n",
"284-Ava-Lopez\n",
"285-Ethan-Gonzalez\n",
"286-Mia-Perez\n",
"287-William-Sanchez\n",
"288-James-Ramirez\n",
"289-Olivia-Flores\n",
"290-Lucas-Cruz\n",
"291-Isabella-Rivera\n",
"292-David-Lee\n",
"293-Sophie-Walker\n",
"294-Matthew-Hall\n",
"295-Emma-Allen\n",
"296-Ryan-Young\n",
"297-Ava-King\n",
"298-Ethan-Wright\n",
"299-Mia-Scott\n",
"300-William-Turner\n",
"301-James-Carter\n",
"302-Olivia-Phillips\n",
"303-Lucas-Evans\n",
"304-Sophie-Collins\n",
"305-Noah-Stewart\n",
"306-Ava-Snyder\n",
"307-Ethan-Baker\n",
"308-Mia-Nelson\n",
"309-Noah-Roberts\n",
"310-Emma-Campbell\n",
"311-William-Miller\n",
"312-James-Davis\n",
"313-Olivia-Garcia\n",
"314-Lucas-Rodriguez\n",
"315-Sophie-Martinez\n",
"316-Noah-Hernandez\n",
"317-Ava-Lopez\n",
"318-Ethan-Gonzalez\n",
"319-Mia-Perez\n",
"320-William-Sanchez\n",
"321-James-Ramirez\n",
"322-Olivia-Flores\n",
"323-Lucas-Cruz\n",
"324-Isabella-Rivera\n",
"325-David-Lee\n",
"326-Sophie-Walker\n"
]
}
],
"source": [
"import pandas as pd\n",
"usecols = [\"Name\", \"Vorname\"]\n",
"air = pd.read_csv(\"regie_name.csv\", usecols=usecols)\n",
"\n",
"for count, (i, b) in enumerate(zip(air[\"Name\"], air[\"Vorname\"])):\n",
" print(count, b, i, sep=\"-\")\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ee4b5dee-6b41-49eb-8d75-9db50d20fdef",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

7
movie-db/README.md Normal file
View File

@@ -0,0 +1,7 @@
Folgende Punkte
- erstellen eine SQlite DB
- Die DB soll folgende Tabellen haben Genre, MyList,
- Die Anzeige soll über Flask angezeigt werden
Weitere Schritte folgen !!!

Binary file not shown.

36
movie-db/genre_list Normal file
View File

@@ -0,0 +1,36 @@
action
adventure
animation
biography
comedy
crime
cult movie
disney
documentary
drama
erotic
family
fantasy
film-noir
gangster
gay and lesbian
history
horror
military
music
musical
mystery
nature
neo-noir
period
pixar
road movie
romance
sci-fi
short
spy
super hero
thriller
visually stunning
war
western

59
movie-db/main.py Normal file
View File

@@ -0,0 +1,59 @@
import flask
import moviedb_func
app = flask.Flask(__name__)
app.secret_key = "Start1234!"
@app.get("/add_movie")
def add_movie():
return flask.render_template(
"add_movie.html",
sitename="Meine Movieliste !!!",
url="output",
select_movie="add_movie",
select_genre="add_genre",
select_medium="add_medium",
select_regie="add_regie",
data=moviedb_func.all_select(),
data_medium=moviedb_func.all_select(what_select="medium"),
data_regie=moviedb_func.all_select(what_select="regie"),
data_all=moviedb_func.show_movie_list()
)
@app.post("/output")
def csv_output():
select_add_movie = flask.request.form["add_movie"]
select_genre = flask.request.form["add_genre"]
select_genre_id = moviedb_func.search_id(
search_name=flask.request.form["add_genre"])
select_medium = flask.request.form["add_medium"]
select_medium_id = moviedb_func.search_id(
search_name=flask.request.form["add_medium"], select_from="medium", select_where="medium")
select_regie = flask.request.form["add_regie"]
select_regie_id = moviedb_func.search_id(
search_name=flask.request.form["add_regie"], select_from="regie", select_where="regie")
moviedb_func.add_movie_to_list(movie_name=select_add_movie, regie_id=select_regie_id,
medium_id=select_medium_id, genre_id=select_genre_id)
return flask.render_template(
"output.html",
sitename="Meine Movieliste !!!",
add_movie=select_add_movie,
add_genre=select_genre,
add_medium=select_medium,
add_regie=select_regie,
add_genre_id=select_genre_id,
add_medium_id=select_medium_id,
add_regie_id=select_regie_id
# data=modify_csv.read_csv_file(),
# sum_all=modify_csv.sum_all(),
)
if __name__ == "__main__":
app.run(port=5082, host="0.0.0.0", debug=True)

BIN
movie-db/movie_db.db Normal file

Binary file not shown.

175
movie-db/moviedb_func.py Normal file
View File

@@ -0,0 +1,175 @@
import DBcm
from mariadb import ProgrammingError
import pandas as pd
import sqlite3
def create_movie_database(db_name="movie_db.db"):
create_my_list = """
create table if not exists movie_list (
id integer not null primary key autoincrement,
titel varchar(64) not null,
genre_id integer not null,
regie_id integer not null,
medium_id integer not null
)
"""
create_genre = """
create table if not exists genre (
id integer not null primary key autoincrement,
name varchar(64) not null
)
"""
create_regie = """
create table if not exists regie (
id integer not null primary key autoincrement,
surname varchar(64) not null,
lastname varchar(64) not null
)
"""
create_medium = """
create table if not exists medium (
id integer not null primary key autoincrement,
medium varchar(64) not null
)
"""
ADD_GENRE_VALUE = """
INSERT INTO genre(name)
SELECT ?
WHERE NOT EXISTS (SELECT 1 FROM genre WHERE name = ?);
"""
ADD_MEDIUM_VALUE = """
INSERT INTO medium(medium)
SELECT ?
WHERE NOT EXISTS (SELECT 1 FROM medium WHERE medium = ?);
"""
ADD_REGIE_VALUE = """
INSERT INTO regie (surname, lastname )
SELECT ?, ?
WHERE NOT EXISTS
(SELECT surname, lastname
FROM regie
WHERE surname = ? AND lastname = ?);
"""
with DBcm.UseDatabase(db_name) as db:
db.execute(create_my_list)
db.execute(create_genre)
db.execute(create_regie)
db.execute(create_medium)
with open("genre_list", "r") as fs:
for genre_value in fs.readlines():
with DBcm.UseDatabase(db_name) as db:
db.execute(ADD_GENRE_VALUE,
(genre_value.strip(), genre_value.strip()))
usecols = ["Name", "Vorname"]
air = pd.read_csv("regie_name.csv", usecols=usecols)
for count, (reg_name, reg_vorname) in enumerate(zip(air["Name"], air["Vorname"])):
# print(count, reg_vorname, reg_name)
with DBcm.UseDatabase(db_name) as db:
db.execute(ADD_REGIE_VALUE, (reg_vorname,
reg_name, reg_vorname, reg_name))
LISTE_MEDIUM = ["BlueRay", "DVD", "Datei",
"BlueRay Steelbook", "DVD Steelbook"]
with DBcm.UseDatabase(db_name) as db:
for MEDIUM in LISTE_MEDIUM:
db.execute(ADD_MEDIUM_VALUE, (MEDIUM, MEDIUM))
def all_select(db_name="movie_db.db", what_select="genre"):
ALL_SELECT = "SELECT * from " + what_select
if what_select == "genre" or what_select == "medium":
with DBcm.UseDatabase(db_name) as db:
db.execute(ALL_SELECT)
all_value = [i[1] for i in db.fetchall()]
return all_value
elif what_select == 'regie':
all_value = []
with DBcm.UseDatabase(db_name) as db:
db.execute(ALL_SELECT)
for i in db.fetchall():
all_value.append(i[1] + " " + i[2])
return all_value
else:
return "Wrong Value !!!"
def search_id(db_name="movie_db.db", search_name=str, select_from="genre", select_where="name"):
if select_from == "regie":
split_search = search_name.split(" ")
GENRE_QUERY = f"""select id from {select_from}
where surname = ? and lastname = ?"""
with DBcm.UseDatabase(db_name) as db:
db.execute(GENRE_QUERY, (split_search[0], split_search[1],))
regie_id = db.fetchone()[0]
return int(regie_id)
else:
try:
GENRE_QUERY = f"""select id from {select_from}
where {select_where} = ?"""
with DBcm.UseDatabase(db_name) as db:
db.execute(GENRE_QUERY, (search_name,))
genre_id = db.fetchone()[0]
return int(genre_id)
except:
return int(0)
def add_movie_to_list(db_name="movie_db.db", movie_name=str, genre_id=int, regie_id=int, medium_id=int):
SQL_PARAM = f"""
INSERT INTO movie_list (titel, genre_id, regie_id, medium_id )
SELECT ?, ?, ?, ?
WHERE NOT EXISTS
(SELECT titel FROM movie_list WHERE titel = ?);
"""
try:
with DBcm.UseDatabase(db_name) as db:
db.execute(SQL_PARAM, (movie_name.lower(), genre_id,
regie_id, medium_id, movie_name.lower(),))
except ProgrammingError:
raise ProgrammingError("Konnte nicht in die DB schreiben")
return True
def show_movie_list(db_name="movie_db.db"):
SQL_PARAM = f"""SELECT
movie_list.id,
titel,
genre.name AS genre,
regie.surname as regie_surname,
regie.lastname as regie_lastname,
medium.medium
FROM movie_list
INNER JOIN genre ON movie_list.genre_id=genre.id
INNER JOIN regie ON movie_list.regie_id=regie.id
INNER JOIN medium ON movie_list.medium_id=medium.id
;
"""
db = sqlite3.connect(db_name)
SELCET_VALUE = pd.read_sql(SQL_PARAM, db)
return_list_dict = []
for id, titel, genre, regie_surname, regie_lastname, medium in zip(SELCET_VALUE["id"], SELCET_VALUE["titel"], SELCET_VALUE["genre"], SELCET_VALUE["regie_surname"], SELCET_VALUE["regie_lastname"], SELCET_VALUE["medium"]):
return_list_dict.append(
{"id": id, "titel": titel, "genre": genre, "regie": regie_surname + " " + regie_lastname, "medium": medium})
return return_list_dict
if __name__ == "__main__":
create_movie_database()
# print(all_select())
# id_genre = search_id(
# search_name="DVD", select_from="medium", select_where="medium")
add_movie_to_list(movie_name="Schlumpfland",
genre_id=1, regie_id=1, medium_id=1)
for test in show_movie_list():
print(test.get("id"), test.get("medium"))

329
movie-db/regie_name.csv Normal file
View File

@@ -0,0 +1,329 @@
Name,Vorname
Rodriguez,Robert
Smith,John
Johnson,James
Williams,William
Brown,Michael
Davis,David
Miller,Richard
Wilson,Joseph
Moore,Charles
Taylor,Thomas
Anderson,Daniel
Thomas,Paul
Jackson,Mark
White,Donna
Harris,Michelle
Martin,Laura
Thompson,Sara
Garcia,Ana
Rodriguez,Carlos
Martinez,Maria
Hernandez,Jose
Lopez,Luis
Gonzalez,Rosa
Perez,Pedro
Sanchez,Miguel
Ramirez,Juan
Flores,Ana
Cruz,Isabella
Rivera,Victor
Lee,Kevin
Walker,Brian
Hall,Emily
Allen,Ryan
Young,Aaron
King,Jeffrey
Wright,Joshua
Scott,Brandon
Turner,Frank
Carter,Gregory
Phillips,Samuel
Evans,Chris
Collins,Anthony
Stewart,Eric
Snyder,Frank
Baker,Thomas
Nelson,Jeremy
Roberts,Steven
Campbell,Edward
Miller,Ryan
Davis,Jacob
Garcia,David
Rodriguez,Sophia
Martinez,Emma
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
Hall,Matthew
Allen,Emma
Young,Ryan
King,Ava
Wright,Ethan
Scott,Mia
Turner,William
Carter,James
Phillips,Olivia
Evans,Lucas
Collins,Sophie
Stewart,Noah
Snyder,Ava
Baker,Ethan
Nelson,Mia
Roberts,Noah
Campbell,Emma
Miller,William
Davis,James
Garcia,Olivia
Rodriguez,Lucas
Martinez,Sophie
Hernandez,Noah
Lopez,Ava
Gonzalez,Ethan
Perez,Mia
Sanchez,William
Ramirez,James
Flores,Olivia
Cruz,Lucas
Rivera,Isabella
Lee,David
Walker,Sophie
1 Name Vorname
2 Rodriguez Robert
3 Smith John
4 Johnson James
5 Williams William
6 Brown Michael
7 Davis David
8 Miller Richard
9 Wilson Joseph
10 Moore Charles
11 Taylor Thomas
12 Anderson Daniel
13 Thomas Paul
14 Jackson Mark
15 White Donna
16 Harris Michelle
17 Martin Laura
18 Thompson Sara
19 Garcia Ana
20 Rodriguez Carlos
21 Martinez Maria
22 Hernandez Jose
23 Lopez Luis
24 Gonzalez Rosa
25 Perez Pedro
26 Sanchez Miguel
27 Ramirez Juan
28 Flores Ana
29 Cruz Isabella
30 Rivera Victor
31 Lee Kevin
32 Walker Brian
33 Hall Emily
34 Allen Ryan
35 Young Aaron
36 King Jeffrey
37 Wright Joshua
38 Scott Brandon
39 Turner Frank
40 Carter Gregory
41 Phillips Samuel
42 Evans Chris
43 Collins Anthony
44 Stewart Eric
45 Snyder Frank
46 Baker Thomas
47 Nelson Jeremy
48 Roberts Steven
49 Campbell Edward
50 Miller Ryan
51 Davis Jacob
52 Garcia David
53 Rodriguez Sophia
54 Martinez Emma
55 Hernandez Noah
56 Lopez Ava
57 Gonzalez Ethan
58 Perez Mia
59 Sanchez William
60 Ramirez James
61 Flores Olivia
62 Cruz Lucas
63 Rivera Isabella
64 Lee David
65 Walker Sophie
66 Hall Matthew
67 Allen Emma
68 Young Ryan
69 King Ava
70 Wright Ethan
71 Scott Mia
72 Turner William
73 Carter James
74 Phillips Olivia
75 Evans Lucas
76 Collins Sophie
77 Stewart Noah
78 Snyder Ava
79 Baker Ethan
80 Nelson Mia
81 Roberts Noah
82 Campbell Emma
83 Miller William
84 Davis James
85 Garcia Olivia
86 Rodriguez Lucas
87 Martinez Sophie
88 Hernandez Noah
89 Lopez Ava
90 Gonzalez Ethan
91 Perez Mia
92 Sanchez William
93 Ramirez James
94 Flores Olivia
95 Cruz Lucas
96 Rivera Isabella
97 Lee David
98 Walker Sophie
99 Hall Matthew
100 Allen Emma
101 Young Ryan
102 King Ava
103 Wright Ethan
104 Scott Mia
105 Turner William
106 Carter James
107 Phillips Olivia
108 Evans Lucas
109 Collins Sophie
110 Stewart Noah
111 Snyder Ava
112 Baker Ethan
113 Nelson Mia
114 Roberts Noah
115 Campbell Emma
116 Miller William
117 Davis James
118 Garcia Olivia
119 Rodriguez Lucas
120 Martinez Sophie
121 Hernandez Noah
122 Lopez Ava
123 Gonzalez Ethan
124 Perez Mia
125 Sanchez William
126 Ramirez James
127 Flores Olivia
128 Cruz Lucas
129 Rivera Isabella
130 Lee David
131 Walker Sophie
132 Hall Matthew
133 Allen Emma
134 Young Ryan
135 King Ava
136 Wright Ethan
137 Scott Mia
138 Turner William
139 Carter James
140 Phillips Olivia
141 Evans Lucas
142 Collins Sophie
143 Stewart Noah
144 Snyder Ava
145 Baker Ethan
146 Nelson Mia
147 Roberts Noah
148 Campbell Emma
149 Miller William
150 Davis James
151 Garcia Olivia
152 Rodriguez Lucas
153 Martinez Sophie
154 Hernandez Noah
155 Lopez Ava
156 Gonzalez Ethan
157 Perez Mia
158 Sanchez William
159 Ramirez James
160 Flores Olivia
161 Cruz Lucas
162 Rivera Isabella
163 Lee David
164 Walker Sophie
165 Hall Matthew
166 Allen Emma
167 Young Ryan
168 King Ava
169 Wright Ethan
170 Scott Mia
171 Turner William
172 Carter James
173 Phillips Olivia
174 Evans Lucas
175 Collins Sophie
176 Stewart Noah
177 Snyder Ava
178 Baker Ethan
179 Nelson Mia
180 Roberts Noah
181 Campbell Emma
182 Miller William
183 Davis James
184 Garcia Olivia
185 Rodriguez Lucas
186 Martinez Sophie
187 Hernandez Noah
188 Lopez Ava
189 Gonzalez Ethan
190 Perez Mia
191 Sanchez William
192 Ramirez James
193 Flores Olivia
194 Cruz Lucas
195 Rivera Isabella
196 Lee David
197 Walker Sophie
198 Hall Matthew
199 Allen Emma
200 Young Ryan
201 King Ava
202 Wright Ethan
203 Scott Mia
204 Turner William
205 Carter James
206 Phillips Olivia
207 Evans Lucas
208 Collins Sophie
209 Stewart Noah
210 Snyder Ava
211 Baker Ethan
212 Nelson Mia
213 Roberts Noah
214 Campbell Emma
215 Miller William
216 Davis James
217 Garcia Olivia
218 Rodriguez Lucas
219 Martinez Sophie
220 Hernandez Noah
221 Lopez Ava
222 Gonzalez Ethan
223 Perez Mia
224 Sanchez William
225 Ramirez James
226 Flores Olivia
227 Cruz Lucas
228 Rivera Isabella
229 Lee David
230 Walker Sophie
231 Hall Matthew
232 Allen Emma
233 Young Ryan
234 King Ava
235 Wright Ethan
236 Scott Mia
237 Turner William
238 Carter James
239 Phillips Olivia
240 Evans Lucas
241 Collins Sophie
242 Stewart Noah
243 Snyder Ava
244 Baker Ethan
245 Nelson Mia
246 Roberts Noah
247 Campbell Emma
248 Miller William
249 Davis James
250 Garcia Olivia
251 Rodriguez Lucas
252 Martinez Sophie
253 Hernandez Noah
254 Lopez Ava
255 Gonzalez Ethan
256 Perez Mia
257 Sanchez William
258 Ramirez James
259 Flores Olivia
260 Cruz Lucas
261 Rivera Isabella
262 Lee David
263 Walker Sophie
264 Hall Matthew
265 Allen Emma
266 Young Ryan
267 King Ava
268 Wright Ethan
269 Scott Mia
270 Turner William
271 Carter James
272 Phillips Olivia
273 Evans Lucas
274 Collins Sophie
275 Stewart Noah
276 Snyder Ava
277 Baker Ethan
278 Nelson Mia
279 Roberts Noah
280 Campbell Emma
281 Miller William
282 Davis James
283 Garcia Olivia
284 Rodriguez Lucas
285 Martinez Sophie
286 Hernandez Noah
287 Lopez Ava
288 Gonzalez Ethan
289 Perez Mia
290 Sanchez William
291 Ramirez James
292 Flores Olivia
293 Cruz Lucas
294 Rivera Isabella
295 Lee David
296 Walker Sophie
297 Hall Matthew
298 Allen Emma
299 Young Ryan
300 King Ava
301 Wright Ethan
302 Scott Mia
303 Turner William
304 Carter James
305 Phillips Olivia
306 Evans Lucas
307 Collins Sophie
308 Stewart Noah
309 Snyder Ava
310 Baker Ethan
311 Nelson Mia
312 Roberts Noah
313 Campbell Emma
314 Miller William
315 Davis James
316 Garcia Olivia
317 Rodriguez Lucas
318 Martinez Sophie
319 Hernandez Noah
320 Lopez Ava
321 Gonzalez Ethan
322 Perez Mia
323 Sanchez William
324 Ramirez James
325 Flores Olivia
326 Cruz Lucas
327 Rivera Isabella
328 Lee David
329 Walker Sophie

View File

@@ -0,0 +1,2 @@
flask
dbcm

View File

@@ -0,0 +1,50 @@
{% extends "base.html" %}
{% block body %}
<form action="{{ url }}" method="POST">
<input type="text"
placeholder="Movie"
name="{{ select_movie }}"
id="{{ select_movie }}">
&nbsp;
<select name="{{ select_genre }}" id="{{ select_genre }}">
{% for name in data %}
<option value="{{ name }}">{{ name }}</option>
{% endfor %}
</select>
&nbsp;
<select name="{{ select_medium }}" id="{{ select_medium }}">
{% for name in data_medium %}
<option value="{{ name }}">{{ name }}</option>
{% endfor %}
</select>
&nbsp;
<select name="{{ select_regie }}" id="{{ select_regie }}">
{% for name in data_regie %}
<option value="{{ name }}">{{ name }}</option>
{% endfor %}
</select>
&nbsp;
<button type="submit"> Eingabe </button>
</form>
<br>
<table border="1" width="50%">
<tr>
<th>Id</th>
<th>Name</th>
<th>Genre</th>
<th>Regie</th>
<th>Medium</th>
</tr>
{% for list_all in data_all %}
<tr>
<td>{{list_all.get("id")}}</td>
<td>{{list_all.get("titel")}}</td>
<td>{{list_all.get("genre")}}</td>
<td>{{list_all.get("regie")}}</td>
<td>{{list_all.get("medium")}}</td>
</tr>
{% endfor %}
</table>
{% endblock %}

View File

@@ -0,0 +1,12 @@
<!DOCTYPE html>
<html>
<head>
<title> {{ sitename }} </title>
</head>
<body>
<h2> {{ sitename }} </h2>
<p> Es werden alle Filmtitel in einer Liste angezeigt.
{% block body %}
{% endblock %}
</body>
</html>

View File

@@ -0,0 +1,10 @@
{% extends "base.html" %}
{% block body %}
<p>
Film = {{ add_movie }} <br>
Genre = {{ add_genre }} <span>&nbsp genre_id = {{ add_genre_id }} <br>
Medium = {{ add_medium }} <span>&nbsp medium_id = {{add_medium_id }} <br>
Regie = {{ add_regie }} <span>&nbsp reg_id = {{ add_regie_id }} <br>
</p>
{% endblock %}

650
movie-db/test_jup.ipynb Normal file
View File

@@ -0,0 +1,650 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "5263a987-da36-46d7-a2e7-d0658cda09c1",
"metadata": {},
"outputs": [],
"source": [
"import DBcm"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "3f6b0763-4106-4bfa-9aef-7afbd180c6d4",
"metadata": {},
"outputs": [],
"source": [
"db_name = \"movie_db.db\"\n",
"\n",
"create_my_list = \"\"\"\n",
" create table if not exists movie_list (\n",
" id integer not null primary key autoincrement,\n",
" titel varchar(64) not null,\n",
" genre_id integer not null,\n",
" regie_id integer not null\n",
" \n",
" )\n",
"\n",
"\"\"\"\n",
"\n",
"create_genre = \"\"\"\n",
" create table if not exists genre (\n",
" id integer not null primary key autoincrement,\n",
" name varchar(64) not null\n",
" )\n",
"\"\"\"\n",
"\n",
"create_regie = \"\"\"\n",
" create table if not exists regie (\n",
" id integer not null primary key autoincrement,\n",
" surname varchar(64) not null,\n",
" lastname varchr(64) not null\n",
" )\n",
"\"\"\"\n",
"\n",
"\n",
"with DBcm.UseDatabase(db_name) as db: \n",
" db.execute(create_my_list)\n",
" db.execute(create_genre)\n",
" db.execute(create_regie)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "376ef812-97b5-45ba-8ef1-eb8d7829494a",
"metadata": {},
"outputs": [],
"source": [
"# ADDed Genre values\n",
"\n",
"ADD_GENRE_VALUE = \"\"\"\n",
" INSERT INTO genre(name)\n",
" SELECT ?\n",
" WHERE NOT EXISTS (SELECT 1 FROM genre WHERE name = ?);\n",
" \"\"\"\n",
"\n",
"with open(\"genre_list\", \"r\") as fs: \n",
" for genre_value in fs.readlines():\n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(ADD_GENRE_VALUE, (genre_value.strip(), genre_value.strip()))\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 27,
"id": "63b16a41-88bf-4832-a26c-09180832f597",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"['action', 'adventure', 'animation', 'biography', 'comedy', 'crime', 'cult movie', 'disney', 'documentary', 'drama', 'erotic', 'family', 'fantasy', 'film-noir', 'gangster', 'gay and lesbian', 'history', 'horror', 'military', 'music', 'musical', 'mystery', 'nature', 'neo-noir', 'period', 'pixar', 'road movie', 'romance', 'sci-fi', 'short', 'spy', 'super hero', 'thriller', 'visually stunning', 'war', 'western']\n"
]
}
],
"source": [
"def all_genres():\n",
" ALL_GENRE = \"\"\"\n",
" SELECT * from genre \n",
" \"\"\" \n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(ALL_GENRE)\n",
" all_genre = [i[1] for i in db.fetchall()]\n",
" \n",
" return all_genre\n",
"\n",
"print(all_genres())\n",
" "
]
},
{
"cell_type": "code",
"execution_count": 36,
"id": "89b20b5f-34aa-4490-a4c0-c186c9fa30bd",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"1\n",
"36\n"
]
}
],
"source": [
"def search_genre_id(genre_name):\n",
" GENRE_QUERY = \"\"\"\n",
" select id from genre\n",
" where name = ?\n",
" \"\"\"\n",
" try:\n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(GENRE_QUERY,(genre_name,))\n",
" genre_id = db.fetchone()[0]\n",
" return int(genre_id)\n",
" except:\n",
" return int(0)\n",
"\n",
"\n",
"def search_medium_id(genre_name):\n",
" GENRE_QUERY = \"\"\"\n",
" select id from genre\n",
" where medium = ?\n",
" \"\"\"\n",
" try:\n",
" with DBcm.UseDatabase(db_name) as db:\n",
" db.execute(GENRE_QUERY,(genre_name,))\n",
" genre_id = db.fetchone()[0]\n",
" return int(genre_id)\n",
" except:\n",
" return int(0)\n",
"\n",
"print(search_genre_id(genre_name=\"action\"))\n",
"print(search_genre_id(genre_name=\"western\"))"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "c70d91a5-7855-465d-a4a6-daebc065ee37",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0-John-Smith\n",
"1-James-Johnson\n",
"2-William-Williams\n",
"3-Michael-Brown\n",
"4-David-Davis\n",
"5-Richard-Miller\n",
"6-Joseph-Wilson\n",
"7-Charles-Moore\n",
"8-Thomas-Taylor\n",
"9-Daniel-Anderson\n",
"10-Paul-Thomas\n",
"11-Mark-Jackson\n",
"12-Donna-White\n",
"13-Michelle-Harris\n",
"14-Laura-Martin\n",
"15-Sara-Thompson\n",
"16-Ana-Garcia\n",
"17-Carlos-Rodriguez\n",
"18-Maria-Martinez\n",
"19-Jose-Hernandez\n",
"20-Luis-Lopez\n",
"21-Rosa-Gonzalez\n",
"22-Pedro-Perez\n",
"23-Miguel-Sanchez\n",
"24-Juan-Ramirez\n",
"25-Ana-Flores\n",
"26-Isabella-Cruz\n",
"27-Victor-Rivera\n",
"28-Kevin-Lee\n",
"29-Brian-Walker\n",
"30-Emily-Hall\n",
"31-Ryan-Allen\n",
"32-Aaron-Young\n",
"33-Jeffrey-King\n",
"34-Joshua-Wright\n",
"35-Brandon-Scott\n",
"36-Frank-Turner\n",
"37-Gregory-Carter\n",
"38-Samuel-Phillips\n",
"39-Chris-Evans\n",
"40-Anthony-Collins\n",
"41-Eric-Stewart\n",
"42-Frank-Snyder\n",
"43-Thomas-Baker\n",
"44-Jeremy-Nelson\n",
"45-Steven-Roberts\n",
"46-Edward-Campbell\n",
"47-Ryan-Miller\n",
"48-Jacob-Davis\n",
"49-David-Garcia\n",
"50-Sophia-Rodriguez\n",
"51-Emma-Martinez\n",
"52-Noah-Hernandez\n",
"53-Ava-Lopez\n",
"54-Ethan-Gonzalez\n",
"55-Mia-Perez\n",
"56-William-Sanchez\n",
"57-James-Ramirez\n",
"58-Olivia-Flores\n",
"59-Lucas-Cruz\n",
"60-Isabella-Rivera\n",
"61-David-Lee\n",
"62-Sophie-Walker\n",
"63-Matthew-Hall\n",
"64-Emma-Allen\n",
"65-Ryan-Young\n",
"66-Ava-King\n",
"67-Ethan-Wright\n",
"68-Mia-Scott\n",
"69-William-Turner\n",
"70-James-Carter\n",
"71-Olivia-Phillips\n",
"72-Lucas-Evans\n",
"73-Sophie-Collins\n",
"74-Noah-Stewart\n",
"75-Ava-Snyder\n",
"76-Ethan-Baker\n",
"77-Mia-Nelson\n",
"78-Noah-Roberts\n",
"79-Emma-Campbell\n",
"80-William-Miller\n",
"81-James-Davis\n",
"82-Olivia-Garcia\n",
"83-Lucas-Rodriguez\n",
"84-Sophie-Martinez\n",
"85-Noah-Hernandez\n",
"86-Ava-Lopez\n",
"87-Ethan-Gonzalez\n",
"88-Mia-Perez\n",
"89-William-Sanchez\n",
"90-James-Ramirez\n",
"91-Olivia-Flores\n",
"92-Lucas-Cruz\n",
"93-Isabella-Rivera\n",
"94-David-Lee\n",
"95-Sophie-Walker\n",
"96-Matthew-Hall\n",
"97-Emma-Allen\n",
"98-Ryan-Young\n",
"99-Ava-King\n",
"100-Ethan-Wright\n",
"101-Mia-Scott\n",
"102-William-Turner\n",
"103-James-Carter\n",
"104-Olivia-Phillips\n",
"105-Lucas-Evans\n",
"106-Sophie-Collins\n",
"107-Noah-Stewart\n",
"108-Ava-Snyder\n",
"109-Ethan-Baker\n",
"110-Mia-Nelson\n",
"111-Noah-Roberts\n",
"112-Emma-Campbell\n",
"113-William-Miller\n",
"114-James-Davis\n",
"115-Olivia-Garcia\n",
"116-Lucas-Rodriguez\n",
"117-Sophie-Martinez\n",
"118-Noah-Hernandez\n",
"119-Ava-Lopez\n",
"120-Ethan-Gonzalez\n",
"121-Mia-Perez\n",
"122-William-Sanchez\n",
"123-James-Ramirez\n",
"124-Olivia-Flores\n",
"125-Lucas-Cruz\n",
"126-Isabella-Rivera\n",
"127-David-Lee\n",
"128-Sophie-Walker\n",
"129-Matthew-Hall\n",
"130-Emma-Allen\n",
"131-Ryan-Young\n",
"132-Ava-King\n",
"133-Ethan-Wright\n",
"134-Mia-Scott\n",
"135-William-Turner\n",
"136-James-Carter\n",
"137-Olivia-Phillips\n",
"138-Lucas-Evans\n",
"139-Sophie-Collins\n",
"140-Noah-Stewart\n",
"141-Ava-Snyder\n",
"142-Ethan-Baker\n",
"143-Mia-Nelson\n",
"144-Noah-Roberts\n",
"145-Emma-Campbell\n",
"146-William-Miller\n",
"147-James-Davis\n",
"148-Olivia-Garcia\n",
"149-Lucas-Rodriguez\n",
"150-Sophie-Martinez\n",
"151-Noah-Hernandez\n",
"152-Ava-Lopez\n",
"153-Ethan-Gonzalez\n",
"154-Mia-Perez\n",
"155-William-Sanchez\n",
"156-James-Ramirez\n",
"157-Olivia-Flores\n",
"158-Lucas-Cruz\n",
"159-Isabella-Rivera\n",
"160-David-Lee\n",
"161-Sophie-Walker\n",
"162-Matthew-Hall\n",
"163-Emma-Allen\n",
"164-Ryan-Young\n",
"165-Ava-King\n",
"166-Ethan-Wright\n",
"167-Mia-Scott\n",
"168-William-Turner\n",
"169-James-Carter\n",
"170-Olivia-Phillips\n",
"171-Lucas-Evans\n",
"172-Sophie-Collins\n",
"173-Noah-Stewart\n",
"174-Ava-Snyder\n",
"175-Ethan-Baker\n",
"176-Mia-Nelson\n",
"177-Noah-Roberts\n",
"178-Emma-Campbell\n",
"179-William-Miller\n",
"180-James-Davis\n",
"181-Olivia-Garcia\n",
"182-Lucas-Rodriguez\n",
"183-Sophie-Martinez\n",
"184-Noah-Hernandez\n",
"185-Ava-Lopez\n",
"186-Ethan-Gonzalez\n",
"187-Mia-Perez\n",
"188-William-Sanchez\n",
"189-James-Ramirez\n",
"190-Olivia-Flores\n",
"191-Lucas-Cruz\n",
"192-Isabella-Rivera\n",
"193-David-Lee\n",
"194-Sophie-Walker\n",
"195-Matthew-Hall\n",
"196-Emma-Allen\n",
"197-Ryan-Young\n",
"198-Ava-King\n",
"199-Ethan-Wright\n",
"200-Mia-Scott\n",
"201-William-Turner\n",
"202-James-Carter\n",
"203-Olivia-Phillips\n",
"204-Lucas-Evans\n",
"205-Sophie-Collins\n",
"206-Noah-Stewart\n",
"207-Ava-Snyder\n",
"208-Ethan-Baker\n",
"209-Mia-Nelson\n",
"210-Noah-Roberts\n",
"211-Emma-Campbell\n",
"212-William-Miller\n",
"213-James-Davis\n",
"214-Olivia-Garcia\n",
"215-Lucas-Rodriguez\n",
"216-Sophie-Martinez\n",
"217-Noah-Hernandez\n",
"218-Ava-Lopez\n",
"219-Ethan-Gonzalez\n",
"220-Mia-Perez\n",
"221-William-Sanchez\n",
"222-James-Ramirez\n",
"223-Olivia-Flores\n",
"224-Lucas-Cruz\n",
"225-Isabella-Rivera\n",
"226-David-Lee\n",
"227-Sophie-Walker\n",
"228-Matthew-Hall\n",
"229-Emma-Allen\n",
"230-Ryan-Young\n",
"231-Ava-King\n",
"232-Ethan-Wright\n",
"233-Mia-Scott\n",
"234-William-Turner\n",
"235-James-Carter\n",
"236-Olivia-Phillips\n",
"237-Lucas-Evans\n",
"238-Sophie-Collins\n",
"239-Noah-Stewart\n",
"240-Ava-Snyder\n",
"241-Ethan-Baker\n",
"242-Mia-Nelson\n",
"243-Noah-Roberts\n",
"244-Emma-Campbell\n",
"245-William-Miller\n",
"246-James-Davis\n",
"247-Olivia-Garcia\n",
"248-Lucas-Rodriguez\n",
"249-Sophie-Martinez\n",
"250-Noah-Hernandez\n",
"251-Ava-Lopez\n",
"252-Ethan-Gonzalez\n",
"253-Mia-Perez\n",
"254-William-Sanchez\n",
"255-James-Ramirez\n",
"256-Olivia-Flores\n",
"257-Lucas-Cruz\n",
"258-Isabella-Rivera\n",
"259-David-Lee\n",
"260-Sophie-Walker\n",
"261-Matthew-Hall\n",
"262-Emma-Allen\n",
"263-Ryan-Young\n",
"264-Ava-King\n",
"265-Ethan-Wright\n",
"266-Mia-Scott\n",
"267-William-Turner\n",
"268-James-Carter\n",
"269-Olivia-Phillips\n",
"270-Lucas-Evans\n",
"271-Sophie-Collins\n",
"272-Noah-Stewart\n",
"273-Ava-Snyder\n",
"274-Ethan-Baker\n",
"275-Mia-Nelson\n",
"276-Noah-Roberts\n",
"277-Emma-Campbell\n",
"278-William-Miller\n",
"279-James-Davis\n",
"280-Olivia-Garcia\n",
"281-Lucas-Rodriguez\n",
"282-Sophie-Martinez\n",
"283-Noah-Hernandez\n",
"284-Ava-Lopez\n",
"285-Ethan-Gonzalez\n",
"286-Mia-Perez\n",
"287-William-Sanchez\n",
"288-James-Ramirez\n",
"289-Olivia-Flores\n",
"290-Lucas-Cruz\n",
"291-Isabella-Rivera\n",
"292-David-Lee\n",
"293-Sophie-Walker\n",
"294-Matthew-Hall\n",
"295-Emma-Allen\n",
"296-Ryan-Young\n",
"297-Ava-King\n",
"298-Ethan-Wright\n",
"299-Mia-Scott\n",
"300-William-Turner\n",
"301-James-Carter\n",
"302-Olivia-Phillips\n",
"303-Lucas-Evans\n",
"304-Sophie-Collins\n",
"305-Noah-Stewart\n",
"306-Ava-Snyder\n",
"307-Ethan-Baker\n",
"308-Mia-Nelson\n",
"309-Noah-Roberts\n",
"310-Emma-Campbell\n",
"311-William-Miller\n",
"312-James-Davis\n",
"313-Olivia-Garcia\n",
"314-Lucas-Rodriguez\n",
"315-Sophie-Martinez\n",
"316-Noah-Hernandez\n",
"317-Ava-Lopez\n",
"318-Ethan-Gonzalez\n",
"319-Mia-Perez\n",
"320-William-Sanchez\n",
"321-James-Ramirez\n",
"322-Olivia-Flores\n",
"323-Lucas-Cruz\n",
"324-Isabella-Rivera\n",
"325-David-Lee\n",
"326-Sophie-Walker\n"
]
}
],
"source": [
"import pandas as pd\n",
"usecols = [\"Name\", \"Vorname\"]\n",
"air = pd.read_csv(\"regie_name.csv\", usecols=usecols)\n",
"\n",
"for count, (i, b) in enumerate(zip(air[\"Name\"], air[\"Vorname\"])):\n",
" print(count, b, i, sep=\"-\")\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "ee4b5dee-6b41-49eb-8d75-9db50d20fdef",
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>Name</th>\n",
" <th>Vorname</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>Smith</td>\n",
" <td>John</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>Johnson</td>\n",
" <td>James</td>\n",
" </tr>\n",
" <tr>\n",
" <th>2</th>\n",
" <td>Williams</td>\n",
" <td>William</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>Brown</td>\n",
" <td>Michael</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>Davis</td>\n",
" <td>David</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" Name Vorname\n",
"0 Smith John\n",
"1 Johnson James\n",
"2 Williams William\n",
"3 Brown Michael\n",
"4 Davis David"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"import pandas as pd\n",
"air = pd.read_csv(\"regie_name.csv\", nrows=5)\n",
"air"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "8017742d-3b8e-4847-b5a3-4f28e0c9057a",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Name\n",
"Flores 10.0\n",
"Davis 10.0\n",
"Gonzalez 10.0\n",
"Garcia 10.0\n",
"Cruz 10.0\n",
" ... \n",
"Taylor 1.0\n",
"White 1.0\n",
"Thompson 1.0\n",
"Wilson 1.0\n",
"Williams 1.0\n",
"Length: 47, dtype: float64\n"
]
}
],
"source": [
"chunker = pd.read_csv(\"regie_name.csv\", chunksize=1000)\n",
"tot = pd.Series([], dtype='int64')\n",
"for piece in chunker:\n",
" tot = tot.add(piece[\"Name\"].value_counts(), fill_value=0)\n",
"tot = tot.sort_values(ascending=False)\n",
"print(tot)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d7531f34-19a6-4606-88f2-1c4fdd3b0b7d",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,37 @@
#! /usr/bin/env python
from ollama import Client
import datetime
import os
output_folder = "output"
if not os.path.exists(output_folder):
os.makedirs(output_folder)
# Create_Timestamp
now = int(datetime.datetime.now().timestamp())
# res = int(now.timestamp())
# PROMT
prompt = input("Was möchtest du Fragen: ")
client = Client(
host='http://localhost:11434',
headers={'x-some-header': 'some-value'}
)
response = client.chat(
model='gemma3n:e2b',
messages=[
{
'role': 'user',
'content': prompt,
}],
stream=True)
for chunk in response:
print(chunk['message']["content"], end='', flush=True)
with open(str(output_folder + "/ollama_output_" + str(now) + ".md"), "a") as dm:
print(chunk['message']["content"], end='', flush=True, file=dm)
# with open("test.md", "w") as dm:

View File

@@ -0,0 +1,6 @@
Okay, I'm exiting. Have a good one!
If you need anything in the future, feel free to ask. 😊

View File

@@ -0,0 +1,38 @@
# %% packages
# from langchain_openai import ChatOpenAI
import langchain_ollama
from langchain_core.prompts import ChatPromptTemplate
# from dotenv import load_dotenv
from langchain_core.output_parsers import StrOutputParser
# load_dotenv('.env')
def translate_func(select_lang="Germany", target_lang="English", query_trans="prompt"):
if query_trans == "prompt":
query_trans = input("Was soll Übersetzt werden ? ")
# %% set up prompt template
prompt_template = ChatPromptTemplate.from_messages([
("system", f"You are an AI assistant that translates {
select_lang} into another language."),
("user", "Translate this sentence: '{input}' into {target_language}"),
])
# %% model
model = langchain_ollama.llms.OllamaLLM(base_url='http://localhost:11434', model="gemma3n:e2b",
temperature=0)
# %% chain
chain = prompt_template | model | StrOutputParser()
# %% invoke chain
res = chain.invoke({"input": query_trans,
"target_language": target_lang})
print(res)
# %%
if __name__ == "__main__":
select_lang = "Germany"
target_lang = "English"
translate_func(select_lang=target_lang, target_lang=select_lang)

View File

@@ -1,7 +1,6 @@
[defaults]
#inventory = /home/user06/hosts
inventory = ./test.py
inventory = ./ansible_inv_main.py
ansible_managed = Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S on {host}
private_key_file = /home/jonnybravo/.ssh/ansible-test
gathering = smart

View File

@@ -0,0 +1,105 @@
#! /usr/bin/env python3
import csv
import errno
import json
import os
import socket
import sys
import scapy.all as scapy
def scan(ip):
arp_request = scapy.ARP(pdst=ip)
broadcast = scapy.Ether(dst="ff:ff:ff:ff:ff:ff")
arp_request_broadcast = broadcast / arp_request
answered_list = scapy.srp(arp_request_broadcast,
timeout=1, verbose=False)[0]
results = []
for element in answered_list:
result = {
"ip": element[1].psrc,
"mac": element[1].hwsrc,
"hostname": socket.gethostbyaddr(element[1].psrc)[0],
}
results.append(result)
return results
def port_open(address: str, dest_port=22) -> bool:
# result = sock.connect_ex(('ras-dan-01.local',22))
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(5)
result = sock.connect_ex((address, dest_port))
if result == 0:
return True
else:
return False
def scan_csv(csvfile=str, colm="hostname"):
if os.path.exists(csvfile):
hostlist = []
with open(csvfile) as csv_file:
csv_reader = csv.DictReader(csv_file, delimiter=",")
for count, row in enumerate(csv_reader, 1):
hostlist.append(row[colm])
else:
raise FileNotFoundError(
errno.ENOENT, os.strerror(errno.ENOENT), csvfile)
return tuple(hostlist)
def create_ip_list(target_ip=["192.168.50.1/24"]):
only_ip_list = []
for ip_rage in target_ip:
for i in scan(ip_rage):
if i["hostname"] != ".":
scan_name = i["hostname"]
else:
scan_name = i["ip"]
# print(scan_name)
# print(port_open(address=scan_name))
if port_open(address=scan_name):
only_ip_list.append(scan_name)
return tuple(only_ip_list)
if __name__ == "__main__":
# if len(sys.argv) == 1:
ip_list = create_ip_list()
man_list = scan_csv(
csvfile=os.path.dirname(os.path.realpath(
__file__)) + "/hostname_manuel.csv"
)
output = {
"_meta": {
"hostvars": {
"scan_csv": {
"http_port": 123,
}
}
},
"network_scan": {
"hosts": ip_list,
"vars": {
"ansible_user": "jonnybravo",
"ansible_python_interpreter": "/usr/bin/python3",
"ansible_ssh_private_key_file": "/home/jonnybravo/.ssh/ansible-test",
},
},
"csv_scan": {
"hosts": man_list,
"vars": {
"ansible_user": "jonnybravo",
"ansible_python_interpreter": "/usr/bin/python3",
"ansible_ssh_private_key_file": "/home/jonnybravo/.ssh/ansible-test",
},
},
}
print(json.dumps(output, indent=4, sort_keys=True))
# elif len(sys.argv) == 2:
# print(sys.argv)

View File

@@ -0,0 +1,669 @@
ansible_all_ipv4_addresses:
- 172.23.0.187
ansible_all_ipv6_addresses:
- fe80::7815:ceff:fe98:b5ed
ansible_apparmor:
status: disabled
ansible_architecture: x86_64
ansible_bios_date: 02/02/2024
ansible_bios_vendor: LENOVO
ansible_bios_version: N32ET91W (1.67 )
ansible_board_asset_tag: Not Available
ansible_board_name: 20XW0055GE
ansible_board_serial: NA
ansible_board_vendor: LENOVO
ansible_board_version: SDK0J40697 WIN
ansible_chassis_asset_tag: No Asset Information
ansible_chassis_serial: NA
ansible_chassis_vendor: LENOVO
ansible_chassis_version: None
ansible_cmdline:
BOOT_IMAGE: /boot/vmlinuz-6.12-x86_64
cryptdevice: UUID=9cf04ebe-7c4d-4987-afa5-0585f14cd1d7:luks-9cf04ebe-7c4d-4987-afa5-0585f14cd1d7
nmi_watchdog: '0'
nowatchdog: true
quiet: true
reboot: acpi
resume: /dev/mapper/luks-e3c32a55-f6b4-4d3d-a17a-9901f57c152f
root: /dev/mapper/luks-9cf04ebe-7c4d-4987-afa5-0585f14cd1d7
rw: true
splash: true
udev.log_priority: '3'
ansible_date_time:
date: '2025-01-28'
day: '28'
epoch: '1738052955'
epoch_int: '1738052955'
hour: 09
iso8601: '2025-01-28T08:29:15Z'
iso8601_basic: 20250128T092915446279
iso8601_basic_short: 20250128T092915
iso8601_micro: '2025-01-28T08:29:15.446279Z'
minute: '29'
month: '01'
second: '15'
time: 09:29:15
tz: CET
tz_dst: CEST
tz_offset: '+0100'
weekday: Tuesday
weekday_number: '2'
weeknumber: '04'
year: '2025'
ansible_default_ipv4:
address: 172.23.0.187
alias: host0
broadcast: 172.23.0.255
gateway: 172.23.0.1
interface: host0
macaddress: 7a:15:ce:98:b5:ed
mtu: 1500
netmask: 255.255.255.0
network: 172.23.0.0
prefix: '24'
type: ether
ansible_default_ipv6: {}
ansible_device_links:
ids: {}
labels: {}
masters:
nvme0n1p2:
- dm-0
nvme0n1p3:
- dm-1
uuids: {}
ansible_devices:
dm-0:
holders: []
host: ''
links:
ids: []
labels: []
masters: []
uuids: []
model: null
partitions: {}
removable: '0'
rotational: '0'
sas_address: null
sas_device_handle: null
scheduler_mode: ''
sectors: 1928097365
sectorsize: '512'
size: 919.39 GB
support_discard: '0'
vendor: null
virtual: 1
dm-1:
holders: []
host: ''
links:
ids: []
labels: []
masters: []
uuids: []
model: null
partitions: {}
removable: '0'
rotational: '0'
sas_address: null
sas_device_handle: null
scheduler_mode: ''
sectors: 71673682
sectorsize: '512'
size: 34.18 GB
support_discard: '0'
vendor: null
virtual: 1
nvme0n1:
holders: []
host: ''
links:
ids: []
labels: []
masters: []
uuids: []
model: Micron MTFDKBA1T0TFH
partitions:
nvme0n1p1:
holders: []
links:
ids: []
labels: []
masters: []
uuids: []
sectors: 614400
sectorsize: 512
size: 300.00 MB
start: '4096'
uuid: null
nvme0n1p2:
holders:
- luks-9cf04ebe-7c4d-4987-afa5-0585f14cd1d7
links:
ids: []
labels: []
masters:
- dm-0
uuids: []
sectors: 1928101461
sectorsize: 512
size: 919.39 GB
start: '618496'
uuid: null
nvme0n1p3:
holders:
- luks-e3c32a55-f6b4-4d3d-a17a-9901f57c152f
links:
ids: []
labels: []
masters:
- dm-1
uuids: []
sectors: 71677778
sectorsize: 512
size: 34.18 GB
start: '1928719957'
uuid: null
removable: '0'
rotational: '0'
sas_address: null
sas_device_handle: null
scheduler_mode: none
sectors: 2000409264
sectorsize: '512'
serial: 222538DCB6A6
size: 953.87 GB
support_discard: '512'
vendor: null
virtual: 1
sda:
holders: []
host: ''
links:
ids: []
labels: []
masters: []
uuids: []
model: 1081CS0
partitions: {}
removable: '1'
rotational: '0'
sas_address: null
sas_device_handle: null
scheduler_mode: mq-deadline
sectors: 0
sectorsize: '512'
size: 0.00 Bytes
support_discard: '0'
vendor: NORELSYS
virtual: 1
sdb:
holders: []
host: ''
links:
ids: []
labels: []
masters: []
uuids: []
model: 1081CS1
partitions: {}
removable: '1'
rotational: '0'
sas_address: null
sas_device_handle: null
scheduler_mode: mq-deadline
sectors: 0
sectorsize: '512'
size: 0.00 Bytes
support_discard: '0'
vendor: NORELSYS
virtual: 1
sdc:
holders: []
host: ''
links:
ids: []
labels: []
masters: []
uuids: []
model: CT1000X8SSD9
partitions:
sdc1:
holders: []
links:
ids: []
labels: []
masters: []
uuids: []
sectors: 1953521664
sectorsize: 512
size: 931.51 GB
start: '2048'
uuid: null
removable: '0'
rotational: '1'
sas_address: null
sas_device_handle: null
scheduler_mode: mq-deadline
sectors: 1953525168
sectorsize: '512'
size: 931.51 GB
support_discard: '0'
vendor: Micron
virtual: 1
ansible_distribution: Ubuntu
ansible_distribution_file_parsed: true
ansible_distribution_file_path: /etc/os-release
ansible_distribution_file_variety: Debian
ansible_distribution_major_version: '22'
ansible_distribution_release: jammy
ansible_distribution_version: '22.04'
ansible_dns:
nameservers:
- 10.50.0.3
options:
edns0: true
trust-ad: true
search:
- speedport.ip
ansible_domain: ''
ansible_effective_group_id: 1000
ansible_effective_user_id: 1000
ansible_env:
HOME: /home/jonnybravo
LC_CTYPE: C.UTF-8
LOGNAME: jonnybravo
MOTD_SHOWN: pam
PATH: /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin
PWD: /home/jonnybravo
SHELL: /bin/bash
SHLVL: '0'
SSH_CLIENT: 172.23.0.1 59494 22
SSH_CONNECTION: 172.23.0.1 59494 172.23.0.187 22
SSH_TTY: /dev/pts/1
TERM: tmux-256color
USER: jonnybravo
XDG_RUNTIME_DIR: /run/user/1000
XDG_SESSION_CLASS: user
XDG_SESSION_ID: '6'
XDG_SESSION_TYPE: tty
_: /bin/sh
ansible_fibre_channel_wwn: []
ansible_fips: false
ansible_flags:
- fpu
- vme
- de
- pse
- tsc
- msr
- pae
- mce
- cx8
- apic
- sep
- mtrr
- pge
- mca
- cmov
- pat
- pse36
- clflush
- dts
- acpi
- mmx
- fxsr
- sse
- sse2
- ss
- ht
- tm
- pbe
- syscall
- nx
- pdpe1gb
- rdtscp
- lm
- constant_tsc
- art
- arch_perfmon
- pebs
- bts
- rep_good
- nopl
- xtopology
- nonstop_tsc
- cpuid
- aperfmperf
- tsc_known_freq
- pni
- pclmulqdq
- dtes64
- monitor
- ds_cpl
- vmx
- est
- tm2
- ssse3
- sdbg
- fma
- cx16
- xtpr
- pdcm
- pcid
- sse4_1
- sse4_2
- x2apic
- movbe
- popcnt
- tsc_deadline_timer
- aes
- xsave
- avx
- f16c
- rdrand
- lahf_lm
- abm
- 3dnowprefetch
- cpuid_fault
- epb
- cat_l2
- cdp_l2
- ssbd
- ibrs
- ibpb
- stibp
- ibrs_enhanced
- tpr_shadow
- flexpriority
- ept
- vpid
- ept_ad
- fsgsbase
- tsc_adjust
- bmi1
- avx2
- smep
- bmi2
- erms
- invpcid
- rdt_a
- avx512f
- avx512dq
- rdseed
- adx
- smap
- avx512ifma
- clflushopt
- clwb
- intel_pt
- avx512cd
- sha_ni
- avx512bw
- avx512vl
- xsaveopt
- xsavec
- xgetbv1
- xsaves
- split_lock_detect
- user_shstk
- dtherm
- ida
- arat
- pln
- pts
- hwp
- hwp_notify
- hwp_act_window
- hwp_epp
- hwp_pkg_req
- vnmi
- avx512vbmi
- umip
- pku
- ospke
- avx512_vbmi2
- gfni
- vaes
- vpclmulqdq
- avx512_vnni
- avx512_bitalg
- avx512_vpopcntdq
- rdpid
- movdiri
- movdir64b
- fsrm
- avx512_vp2intersect
- md_clear
- ibt
- flush_l1d
- arch_capabilities
ansible_form_factor: Notebook
ansible_fqdn: localhost
ansible_host0:
active: true
device: host0
ipv4:
address: 172.23.0.187
broadcast: 172.23.0.255
netmask: 255.255.255.0
network: 172.23.0.0
prefix: '24'
ipv6:
- address: fe80::7815:ceff:fe98:b5ed
prefix: '64'
scope: link
macaddress: 7a:15:ce:98:b5:ed
mtu: 1500
promisc: false
speed: 10000
type: ether
ansible_hostname: dan-jam-01
ansible_hostnqn: ''
ansible_interfaces:
- lo
- host0
ansible_is_chroot: true
ansible_iscsi_iqn: ''
ansible_kernel: 6.12.4-1-MANJARO
ansible_kernel_version: '#1 SMP PREEMPT_DYNAMIC Mon, 09 Dec 2024 11:58:37 +0000'
ansible_lo:
active: true
device: lo
ipv4:
address: 127.0.0.1
broadcast: ''
netmask: 255.0.0.0
network: 127.0.0.0
prefix: '8'
ipv6:
- address: ::1
prefix: '128'
scope: host
mtu: 65536
promisc: false
type: loopback
ansible_loadavg:
15m: 0.37646484375
1m: 1.17431640625
5m: 0.80224609375
ansible_local: {}
ansible_locally_reachable_ips:
ipv4:
- 127.0.0.0/8
- 127.0.0.1
- 172.23.0.187
ipv6:
- ::1
- fe80::7815:ceff:fe98:b5ed
ansible_lsb:
codename: jammy
description: Ubuntu 22.04.5 LTS
id: Ubuntu
major_release: '22'
release: '22.04'
ansible_lvm: N/A
ansible_machine: x86_64
ansible_machine_id: 74fa9d4996124ff58eea3b7b9f53f38f
ansible_memfree_mb: 15518
ansible_memory_mb:
nocache:
free: 23611
used: 8203
real:
free: 15518
total: 31814
used: 16296
swap:
cached: 8
free: 34976
total: 34996
used: 20
ansible_memtotal_mb: 31814
ansible_mounts:
- block_available: 162114263
block_size: 4096
block_total: 236947382
block_used: 74833119
device: /dev/mapper/luks-9cf04ebe-7c4d-4987-afa5-0585f14cd1d7
dump: 0
fstype: ext4
inode_available: 58698830
inode_total: 60260352
inode_used: 1561522
mount: /
options: rw,noatime,idmapped,bind
passno: 0
size_available: 664020021248
size_total: 970536476672
uuid: N/A
- block_available: 162114263
block_size: 4096
block_total: 236947382
block_used: 74833119
device: /dev/mapper/luks-9cf04ebe-7c4d-4987-afa5-0585f14cd1d7
dump: 0
fstype: ext4
inode_available: 58698830
inode_total: 60260352
inode_used: 1561522
mount: /run/host/os-release
options: ro,nosuid,nodev,noexec,noatime,bind
passno: 0
size_available: 664020021248
size_total: 970536476672
uuid: N/A
ansible_nodename: dan-jam-01
ansible_os_family: Debian
ansible_pkg_mgr: apt
ansible_proc_cmdline:
BOOT_IMAGE: /boot/vmlinuz-6.12-x86_64
cryptdevice: UUID=9cf04ebe-7c4d-4987-afa5-0585f14cd1d7:luks-9cf04ebe-7c4d-4987-afa5-0585f14cd1d7
nmi_watchdog: '0'
nowatchdog: true
quiet: true
reboot: acpi
resume: /dev/mapper/luks-e3c32a55-f6b4-4d3d-a17a-9901f57c152f
root:
- UUID=38f88aa5-03f0-4608-ac46-6b9cad7cb1c4
- /dev/mapper/luks-9cf04ebe-7c4d-4987-afa5-0585f14cd1d7
rw: true
splash: true
udev.log_priority: '3'
ansible_processor:
- '0'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
- '1'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
- '2'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
- '3'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
- '4'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
- '5'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
- '6'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
- '7'
- GenuineIntel
- 11th Gen Intel(R) Core(TM) i7-1165G7 @ 2.80GHz
ansible_processor_cores: 4
ansible_processor_count: 1
ansible_processor_nproc: 8
ansible_processor_threads_per_core: 2
ansible_processor_vcpus: 8
ansible_product_name: 20XW0055GE
ansible_product_serial: NA
ansible_product_uuid: NA
ansible_product_version: ThinkPad X1 Carbon Gen 9
ansible_python:
executable: /usr/bin/python3
has_sslcontext: true
type: cpython
version:
major: 3
micro: 12
minor: 10
releaselevel: final
serial: 0
version_info:
- 3
- 10
- 12
- final
- 0
ansible_python_version: 3.10.12
ansible_real_group_id: 1000
ansible_real_user_id: 1000
ansible_selinux:
status: disabled
ansible_selinux_python_present: true
ansible_service_mgr: systemd
ansible_ssh_host_key_ecdsa_public: AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBBlQf89/mIPEp3lsLUSkqVWQaU1yvskxPFbvnedoV0f+vi5vQKUoUfcX//3jqIKgjdH6fnqiQRfROO+seF+mjjo=
ansible_ssh_host_key_ecdsa_public_keytype: ecdsa-sha2-nistp256
ansible_ssh_host_key_ed25519_public: AAAAC3NzaC1lZDI1NTE5AAAAIIZpUjgZziEasuYZLOLxU/ex4HiuyOf6oofL1DqM55xx
ansible_ssh_host_key_ed25519_public_keytype: ssh-ed25519
ansible_ssh_host_key_rsa_public: AAAAB3NzaC1yc2EAAAADAQABAAABgQCxAnj9PVaIaAMaRExN0t0cW+7mu2QOum1BowLjoWFhgNcYeoev38a0FqUy49WE9+wIYG+URr1vIjO2F5Udd6QRtm8//wbcdBA4sbc5JKmd+ILMpy2FI6rmAjAfzYQkxLxGB+AwblAKTAJgoFn6/stXELiY8QYfVf2+SKTVk96gvVlMsIuFQy36PSjS2nSwdovTML4N/KrwJdUMDp7ysJ8bOB3NyXY/hBy+dgzGZ8ezYx5pkg7PRR+Y5rRh++dsfrkm+8e4gHi6uY+mwitpxodNMzVBRI6KzijUhbj4bTU7ASCwfmQSoNBZrY+4Xb9DiI421GP9/TWxAOz7vLWNcnkKtVeZpzXrxH5mmAbKJqu6vFhOkkz4IGz/mvvAAOgX/J6ILgT+1VHAIooQVfIDoOE/9GfDEPmWpjCmP1IQRokJwIy3isUZwMoN4V0rYfZ2YDW8YHwgirRXa6eEBXKZzkHAnqCt+WSss6jVdGW+1PjiakG1N1NL+dUI0NcPHgiuZIM=
ansible_ssh_host_key_rsa_public_keytype: ssh-rsa
ansible_swapfree_mb: 34976
ansible_swaptotal_mb: 34996
ansible_system: Linux
ansible_system_capabilities:
- ''
ansible_system_capabilities_enforced: 'True'
ansible_system_vendor: LENOVO
ansible_systemd:
features: +PAM +AUDIT +SELINUX +APPARMOR +IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL
+ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK
+PCRE2 -PWQUALITY -P11KIT -QRENCODE +BZIP2 +LZ4 +XZ +ZLIB +ZSTD -XKBCOMMON +UTMP
+SYSVINIT default-hierarchy=unified
version: 249
ansible_uptime_seconds: 73395
ansible_user_dir: /home/jonnybravo
ansible_user_gecos: Jonny Bravo,,,
ansible_user_gid: 1000
ansible_user_id: jonnybravo
ansible_user_shell: /bin/bash
ansible_user_uid: 1000
ansible_userspace_architecture: x86_64
ansible_userspace_bits: '64'
ansible_virtualization_role: guest
ansible_virtualization_tech_guest:
- container
- systemd-nspawn
ansible_virtualization_tech_host:
- kvm
- virtualbox
ansible_virtualization_type: systemd-nspawn
gather_subset:
- all
module_setup: true

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,3 @@
hostname, info
dan-jam-01, keine,
ras-dan-01.local, keine,
1 hostname, info
2 dan-jam-01, keine,
3 ras-dan-01.local, keine,

View File

@@ -1,73 +0,0 @@
#! /usr/bin/env python3.12
import scapy.all as scapy
import json
import socket
def scan(ip):
arp_request = scapy.ARP(pdst=ip)
broadcast = scapy.Ether(dst="ff:ff:ff:ff:ff:ff")
arp_request_broadcast = broadcast / arp_request
answered_list = scapy.srp(arp_request_broadcast, timeout=1, verbose=False)[0]
results = []
for element in answered_list:
result = {"ip": element[1].psrc, "mac": element[1].hwsrc, 'hostname': socket.gethostbyaddr(element[1].psrc)[0]}
results.append(result)
return results
def test_port(address: str, dest_port: int) -> bool:
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
if sock.connect_ex((address, dest_port)) == 0:
return True
return False
except (OSError, ValueError):
return False
def create_ip_list(target_ip="192.168.50.1/24"):
only_ip_list = []
for i in scan(target_ip):
#print(test_port(address=i['ip'], dest_port=22))
if i['hostname'] != ".":
only_ip_list.append(i['hostname'])
else:
only_ip_list.append(i['ip'])
return tuple(only_ip_list)
def scan_csv(csv_file=str):
pass
if __name__ == "__main__":
ip_list = create_ip_list()
man_list = ["ras-dan-01.local", "dan-jam-01"]
output = {
"_meta": {
"hostvars": {
"webprod": {
"http_port": 123,
}
}
},
"network_scan": {
"hosts": ip_list,
"vars": {
"ansible_user": "jonnybravo",
"ansible_python_interpreter": "/usr/bin/python3",
"ansible_ssh_private_key_file": "/home/jonnybravo/.ssh/ansible-test"
}
},
"scan_csv": {
"hosts": man_list,
"vars":{
"ansible_user": "jonnybravo",
"ansible_python_interpreter": "/usr/bin/python3",
"ansible_ssh_private_key_file": "/home/jonnybravo/.ssh/ansible-test"
}
}
}
print(json.dumps(output,indent=4, sort_keys=True))

View File

@@ -1,20 +1,48 @@
import os
def show_all_files_directory(dir_path = str, search_endung = False, search_string = False):
li_all = []
def show_all_files_directory(dir_path: str, search_endung: str = None, search_string: str = None) -> list:
"""
Returns a list of all files in the directory and its subdirectories.
Parameters:
dir_path (str): Path to the root directory.
search_endung (str): Ending for the file names to be searched. Default is None.
search_string (str): String to be searched in the file names. Default is None.
Returns:
list: List of full paths to all files matching the search criteria.
"""
# Check if dir_path is not empty and exists
if not os.path.isdir(dir_path):
print("Die angegebene Verzeichnispfad existiert nicht.")
return []
all_files_path = []
for root_folder, dirs, files in os.walk(dir_path, topdown=False):
for name in files:
FullPATH = str(os.path.join(root_folder, name))
if not search_endung is False:
if FullPATH.endswith(search_endung):
li_all.append(FullPATH)
elif not search_string is False:
if not FullPATH.find(search_string) == -1:
li_all.append(FullPATH)
else:
li_all.append(FullPATH)
return li_all
full_path = str(os.path.join(root_folder, name))
# If search_endung is specified
if search_endung:
if not full_path.endswith(search_endung):
continue
# If search_string is specified
elif search_string:
if full_path.find(search_string) == -1:
continue
all_files_path.append(full_path)
print("Dateien:")
for file in all_files_path:
print(file)
return all_files_path
print(show_all_files_directory(dir_path="/home/jonnybravo/Downloads", search_string=".txt"))
if __name__ == "__main__":
test = show_all_files_directory(dir_path="/home/jonnybravo/Downloads", search_endung=".txt")
print(test)

View File

51
steam.py Normal file
View File

@@ -0,0 +1,51 @@
import requests
# Steam-API-Einheitspunkt
STEAM_API_URL = "https://api.steampowered.com"
# Benutzer-ID oder Anwendung-ID
USER_ID = "656119799984" # ersetzen Sie mit der ID des Benutzers oder der Anwendung, für die Sie Informationen benötigen
def get_user_info(user_id):
"""
Ruft die Information über einen bestimmten Benutzer ab.
"""
url = f"{STEAM_API_URL}/ISteamUser/GetPlayerSummarInfo/v0001/?key=80BED3ACB9E38E5A944F2BEB26FC9C3E&steamids={user_id}"
response = requests.get(url)
if response.status_code == 200:
data = response.text
return data
else:
print(f"Ein Fehler auftrat: {response.status_code}")
return None
def get_app_info(app_id):
"""
Ruft die Information über eine bestimmte Anwendung ab.
"""
url = f"{STEAM_API_URL}/ISteamApp/GetAppList/v0001/?key=80BED3ACB9E38E5A944F2BEB26FC9C3E&appids={app_id}"
response = requests.get(url)
if response.status_code == 200:
data = response.text
return data
else:
print(f"Ein Fehler auftrat: {response.status_code}")
return None
def main():
user_info = get_user_info(USER_ID)
print("Benutzer-Informationen:")
for line in user_info.splitlines():
print(line)
app_info = get_app_info(USER_ID) # oder USER_ID
print("\nAnwendung-Informationen:")
lines = app_info.splitlines()
while lines:
line = lines.pop(0)
if line.startswith("<"):
break
print(line)
if __name__ == "__main__":
main()

40
steam_api.py Normal file
View File

@@ -0,0 +1,40 @@
import requests
import json
# Steam-API-Einheitspunkt
STEAM_API_URL = "https://api.steampowered.com"
# Benutzer-ID oder Anwendung-ID
USER_ID = "76561197999844867" # ersetzen Sie mit der ID des Benutzers oder der Anwendung, für die Sie Informationen benötigen
def get_user_info(user_id):
"""
Ruft die Information über einen bestimmten Benutzer ab.
"""
url = f"{STEAM_API_URL}/ISteamUser/GetPlayerSummarInfo/v0001/?key=80BED3ACB9E38E5A944F2BEB26FC9C3E&steamids={user_id}"
response = requests.get(url)
data = json.loads(response.content)
return data
def get_app_info(app_id):
"""
Ruft die Information über eine bestimmte Anwendung ab.
"""
url = f"{STEAM_API_URL}/ISteamApp/GetAppList/v0001/?key=80BED3ACB9E38E5A944F2BEB26FC9C3E&appids={app_id}"
response = requests.get(url)
data = json.loads(response.content)
return data
def main():
user_info = get_user_info(USER_ID)
print("Benutzer-Informationen:")
for key, value in user_info["response"]["summarizeForPublic"]["player"].items():
print(f"{key}: {value}")
app_info = get_app_info(USER_ID) # oder USER_ID
print("\nAnwendung-Informationen:")
for item in app_info["app_list"]:
print(f"Name: {item['name']}, App ID: {item['appid']}")
if __name__ == "__main__":
main()

16
test_skripte/check_git.py Normal file
View File

@@ -0,0 +1,16 @@
import subprocess, os
def git_test(git_file=str)
return subprocess.Popen(
'git log -1 --pretty=format:"%h:%c" {git_file}'.format(
git_file=git_file ),
shell=True,
universal_newlines=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
).communicate()
if __name__ == "__main__":
print(git_test=(git_file="/home/jonnybravo/Python_Skripte/jenkins-module.txt"))

View File

@@ -0,0 +1,28 @@
#! /usr/bin/env python3.12
import socket
import scapy.all as scapy
#result = sock.connect_ex(('ras-dan-01.local',22))
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(5)
result = sock.connect_ex(('192.168.50.217',22))
if result == 0:
print("Port is open")
else:
print("Port is not open")
#sock.close()
def scan(ip):
arp_request = scapy.ARP(pdst=ip)
broadcast = scapy.Ether(dst="ff:ff:ff:ff:ff:ff")
arp_request_broadcast = broadcast / arp_request
answered_list = scapy.srp(arp_request_broadcast, timeout=1, verbose=False)[0]
results = []
for element in answered_list:
result = {"ip": element[1].psrc, "mac": element[1].hwsrc, 'hostname': socket.gethostbyaddr(element[1].psrc)[0]}
results.append(result)
return results
print(scan(ip='dan-jam-01'))

280
test_skripte/socket Normal file
View File

@@ -0,0 +1,280 @@
%!PS-Adobe-3.0
%%Creator: (ImageMagick)
%%Title: (socket)
%%CreationDate: (2024-10-29T14:37:21+00:00)
%%BoundingBox: 3147 1726 3157 1727
%%HiResBoundingBox: 3147 1726 3157 1727
%%DocumentData: Clean7Bit
%%LanguageLevel: 1
%%Orientation: Portrait
%%PageOrder: Ascend
%%Pages: 1
%%EndComments
%%BeginDefaults
%%EndDefaults
%%BeginProlog
%
% Display a color image. The image is displayed in color on
% Postscript viewers or printers that support color, otherwise
% it is displayed as grayscale.
%
/DirectClassPacket
{
%
% Get a DirectClass packet.
%
% Parameters:
% red.
% green.
% blue.
% length: number of pixels minus one of this color (optional).
%
currentfile color_packet readhexstring pop pop
compression 0 eq
{
/number_pixels 3 def
}
{
currentfile byte readhexstring pop 0 get
/number_pixels exch 1 add 3 mul def
} ifelse
0 3 number_pixels 1 sub
{
pixels exch color_packet putinterval
} for
pixels 0 number_pixels getinterval
} bind def
/DirectClassImage
{
%
% Display a DirectClass image.
%
systemdict /colorimage known
{
columns rows 8
[
columns 0 0
rows neg 0 rows
]
{ DirectClassPacket } false 3 colorimage
}
{
%
% No colorimage operator; convert to grayscale.
%
columns rows 8
[
columns 0 0
rows neg 0 rows
]
{ GrayDirectClassPacket } image
} ifelse
} bind def
/GrayDirectClassPacket
{
%
% Get a DirectClass packet; convert to grayscale.
%
% Parameters:
% red
% green
% blue
% length: number of pixels minus one of this color (optional).
%
currentfile color_packet readhexstring pop pop
color_packet 0 get 0.299 mul
color_packet 1 get 0.587 mul add
color_packet 2 get 0.114 mul add
cvi
/gray_packet exch def
compression 0 eq
{
/number_pixels 1 def
}
{
currentfile byte readhexstring pop 0 get
/number_pixels exch 1 add def
} ifelse
0 1 number_pixels 1 sub
{
pixels exch gray_packet put
} for
pixels 0 number_pixels getinterval
} bind def
/GrayPseudoClassPacket
{
%
% Get a PseudoClass packet; convert to grayscale.
%
% Parameters:
% index: index into the colormap.
% length: number of pixels minus one of this color (optional).
%
currentfile byte readhexstring pop 0 get
/offset exch 3 mul def
/color_packet colormap offset 3 getinterval def
color_packet 0 get 0.299 mul
color_packet 1 get 0.587 mul add
color_packet 2 get 0.114 mul add
cvi
/gray_packet exch def
compression 0 eq
{
/number_pixels 1 def
}
{
currentfile byte readhexstring pop 0 get
/number_pixels exch 1 add def
} ifelse
0 1 number_pixels 1 sub
{
pixels exch gray_packet put
} for
pixels 0 number_pixels getinterval
} bind def
/PseudoClassPacket
{
%
% Get a PseudoClass packet.
%
% Parameters:
% index: index into the colormap.
% length: number of pixels minus one of this color (optional).
%
currentfile byte readhexstring pop 0 get
/offset exch 3 mul def
/color_packet colormap offset 3 getinterval def
compression 0 eq
{
/number_pixels 3 def
}
{
currentfile byte readhexstring pop 0 get
/number_pixels exch 1 add 3 mul def
} ifelse
0 3 number_pixels 1 sub
{
pixels exch color_packet putinterval
} for
pixels 0 number_pixels getinterval
} bind def
/PseudoClassImage
{
%
% Display a PseudoClass image.
%
% Parameters:
% class: 0-PseudoClass or 1-Grayscale.
%
currentfile buffer readline pop
token pop /class exch def pop
class 0 gt
{
currentfile buffer readline pop
token pop /depth exch def pop
/grays columns 8 add depth sub depth mul 8 idiv string def
columns rows depth
[
columns 0 0
rows neg 0 rows
]
{ currentfile grays readhexstring pop } image
}
{
%
% Parameters:
% colors: number of colors in the colormap.
% colormap: red, green, blue color packets.
%
currentfile buffer readline pop
token pop /colors exch def pop
/colors colors 3 mul def
/colormap colors string def
currentfile colormap readhexstring pop pop
systemdict /colorimage known
{
columns rows 8
[
columns 0 0
rows neg 0 rows
]
{ PseudoClassPacket } false 3 colorimage
}
{
%
% No colorimage operator; convert to grayscale.
%
columns rows 8
[
columns 0 0
rows neg 0 rows
]
{ GrayPseudoClassPacket } image
} ifelse
} ifelse
} bind def
/DisplayImage
{
%
% Display a DirectClass or PseudoClass image.
%
% Parameters:
% x & y translation.
% x & y scale.
% label pointsize.
% image label.
% image columns & rows.
% class: 0-DirectClass or 1-PseudoClass.
% compression: 0-none or 1-RunlengthEncoded.
% hex color packets.
%
gsave
/buffer 512 string def
/byte 1 string def
/color_packet 3 string def
/pixels 768 string def
currentfile buffer readline pop
token pop /x exch def
token pop /y exch def pop
x y translate
currentfile buffer readline pop
token pop /x exch def
token pop /y exch def pop
currentfile buffer readline pop
token pop /pointsize exch def pop
x y scale
currentfile buffer readline pop
token pop /columns exch def
token pop /rows exch def pop
currentfile buffer readline pop
token pop /class exch def pop
currentfile buffer readline pop
token pop /compression exch def pop
class 0 gt { PseudoClassImage } { DirectClassImage } ifelse
grestore
showpage
} bind def
%%EndProlog
%%Page: 1 1
%%PageBoundingBox: 3147 1726 3157 1727
DisplayImage
3147 1726
10 1
12
10 1
0
0
1E22291E22291E22291E22291E22291E22291E22291E22291E22291E2229
%%PageTrailer
%%Trailer
%%EOF