# -*- coding: utf-8 -*- #!/usr/bin/env python3 """ ##################################################################### # ubuntu 20.04.1 lts (focal fossa) - minimal # ##################################################################### system information IP: 5.181.51.98 host: v2202101139816141041.nicesrv.de ssh-key fingerprints 256 SHA256:GEKFDaLdFE07hQFK9Hbmooz4Ir76/REFGYIQJgJIIwU (ECDSA) 3072 SHA256:62nswpTE4Tauo2aqdT+FYkojp3F5AeJRW3tFUWMikck (RSA) 256 SHA256:ITPcjfnupqr5uzStjrochFUjz0EYzGF78XrIYsyxDwo (ED25519) 256 MD5:a2:46:b1:c9:22:90:c3:20:0d:fd:26:28:b7:95:90:fa (ECDSA) 3072 MD5:b1:c3:c5:c3:d7:3f:e0:3d:c3:e4:0d:80:17:bb:be:5e (RSA) 256 MD5:95:78:d8:f6:bd:51:36:97:b4:92:3b:cd:d1:8c:e4:7d (ED25519) 1134HuErQ-###########senger-Ghwin82-Start2021 """ import os import datetime aTime = datetime.datetime.now() os.system("killall nginx"); os.system("killall apache2"); os.system("clear") print("###############################################") print("######## KI Autoblogger ########") print("######## by Sebastian Enger, M.Sc. ########") print("######## Version vom: 12.5.2021 - 1.9.b########") print("###############################################") print("Schritt 1: Installiere benötigte Linux Software.") software_installer = list() software_installer.append("apt-get update -y && apt-get upgrade -y") software_installer.append("apt-get install -y php7.4-xmlrpc snap net-tools software-properties-common python3 python3-pip build-essential zlib1g-dev libncurses5-dev libgdbm-dev libnss3-dev libssl-dev libreadline-dev libffi-dev wget screen php php-cli php-fpm php-json php-common php-mysql php-zip php-gd php-mbstring php-curl php-xml php-pear php-bcmath") #nginx software_installer.append("snap install core; sudo snap refresh core") software_installer.append("apt-get remove -y apache2 certbot") software_installer.append("pip3 install --upgrade requests") software_installer.append("apt -y autoremove") software_installer.append("snap install --classic certbot") software_installer.append("ln -s /snap/bin/certbot /usr/bin/certbot") software_installer.append("killall apache2 nginx") for exec in software_installer: 1#os.system(exec) import configparser config = configparser.ConfigParser() config.read('/home/seo-auto-scaler/files/seo-auto-scaler.conf') from pprint import pprint # pretty-printer import requests # pip3 install --upgrade requests import urllib.parse import codecs import socket import fcntl import struct import json import time import sys from datetime import date import re as _re from html.entities import html5 as _html5 def escape(s, quote=True): """ Replace special characters "&", "<" and ">" to HTML-safe sequences. If the optional flag quote is true (the default), the quotation mark characters, both double quote (") and single quote (') characters are also translated. """ s = s.replace("&", "&") # Must be done first! s = s.replace("<", "<") s = s.replace(">", ">") if quote: s = s.replace('"', """) s = s.replace('\'', "'") return s # see http://www.w3.org/TR/html5/syntax.html#tokenizing-character-references _invalid_charrefs = { 0x00: '\ufffd', # REPLACEMENT CHARACTER 0x0d: '\r', # CARRIAGE RETURN 0x80: '\u20ac', # EURO SIGN 0x81: '\x81', # 0x82: '\u201a', # SINGLE LOW-9 QUOTATION MARK 0x83: '\u0192', # LATIN SMALL LETTER F WITH HOOK 0x84: '\u201e', # DOUBLE LOW-9 QUOTATION MARK 0x85: '\u2026', # HORIZONTAL ELLIPSIS 0x86: '\u2020', # DAGGER 0x87: '\u2021', # DOUBLE DAGGER 0x88: '\u02c6', # MODIFIER LETTER CIRCUMFLEX ACCENT 0x89: '\u2030', # PER MILLE SIGN 0x8a: '\u0160', # LATIN CAPITAL LETTER S WITH CARON 0x8b: '\u2039', # SINGLE LEFT-POINTING ANGLE QUOTATION MARK 0x8c: '\u0152', # LATIN CAPITAL LIGATURE OE 0x8d: '\x8d', # 0x8e: '\u017d', # LATIN CAPITAL LETTER Z WITH CARON 0x8f: '\x8f', # 0x90: '\x90', # 0x91: '\u2018', # LEFT SINGLE QUOTATION MARK 0x92: '\u2019', # RIGHT SINGLE QUOTATION MARK 0x93: '\u201c', # LEFT DOUBLE QUOTATION MARK 0x94: '\u201d', # RIGHT DOUBLE QUOTATION MARK 0x95: '\u2022', # BULLET 0x96: '\u2013', # EN DASH 0x97: '\u2014', # EM DASH 0x98: '\u02dc', # SMALL TILDE 0x99: '\u2122', # TRADE MARK SIGN 0x9a: '\u0161', # LATIN SMALL LETTER S WITH CARON 0x9b: '\u203a', # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK 0x9c: '\u0153', # LATIN SMALL LIGATURE OE 0x9d: '\x9d', # 0x9e: '\u017e', # LATIN SMALL LETTER Z WITH CARON 0x9f: '\u0178', # LATIN CAPITAL LETTER Y WITH DIAERESIS } _invalid_codepoints = { # 0x0001 to 0x0008 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, # 0x000E to 0x001F 0xe, 0xf, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, # 0x007F to 0x009F 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, # 0xFDD0 to 0xFDEF 0xfdd0, 0xfdd1, 0xfdd2, 0xfdd3, 0xfdd4, 0xfdd5, 0xfdd6, 0xfdd7, 0xfdd8, 0xfdd9, 0xfdda, 0xfddb, 0xfddc, 0xfddd, 0xfdde, 0xfddf, 0xfde0, 0xfde1, 0xfde2, 0xfde3, 0xfde4, 0xfde5, 0xfde6, 0xfde7, 0xfde8, 0xfde9, 0xfdea, 0xfdeb, 0xfdec, 0xfded, 0xfdee, 0xfdef, # others 0xb, 0xfffe, 0xffff, 0x1fffe, 0x1ffff, 0x2fffe, 0x2ffff, 0x3fffe, 0x3ffff, 0x4fffe, 0x4ffff, 0x5fffe, 0x5ffff, 0x6fffe, 0x6ffff, 0x7fffe, 0x7ffff, 0x8fffe, 0x8ffff, 0x9fffe, 0x9ffff, 0xafffe, 0xaffff, 0xbfffe, 0xbffff, 0xcfffe, 0xcffff, 0xdfffe, 0xdffff, 0xefffe, 0xeffff, 0xffffe, 0xfffff, 0x10fffe, 0x10ffff } def _replace_charref(s): s = s.group(1) if s[0] == '#': # numeric charref if s[1] in 'xX': num = int(s[2:].rstrip(';'), 16) else: num = int(s[1:].rstrip(';')) if num in _invalid_charrefs: return _invalid_charrefs[num] if 0xD800 <= num <= 0xDFFF or num > 0x10FFFF: return '\uFFFD' if num in _invalid_codepoints: return '' return chr(num) else: # named charref if s in _html5: return _html5[s] # find the longest matching name (as defined by the standard) for x in range(len(s)-1, 1, -1): if s[:x] in _html5: return _html5[s[:x]] + s[x:] else: return '&' + s _charref = _re.compile(r'&(#[0-9]+;?' r'|#[xX][0-9a-fA-F]+;?' r'|[^\t\n\f <&#;]{1,32};?)') def unescape(s): """ Convert all named and numeric character references (e.g. >, >, &x3e;) in the string s to the corresponding unicode characters. This function uses the rules defined by the HTML 5 standard for both valid and invalid character references, and the list of HTML 5 named character references defined in html.entities.html5. """ if '&' not in s: return s return _charref.sub(_replace_charref, s) def get_ip_address(ifname): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) return socket.inet_ntoa(fcntl.ioctl( s.fileno(), 0x8915, # SIOCGIFADDR struct.pack(b'256s', ifname[:15].encode()) )[20:24]) def getWebpagesSimple(link): #UserAgent = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" UserAgentMobile = "Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_2 like Mac OS X) AppleWebKit/603.2.4 (KHTML, like Gecko) FxiOS/7.5b3349 Mobile/14F89 Safari/603.2.4"#"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36" #Headers = {'user-agent': UserAgent, 'Content-Type': 'text/html; charset=utf-8', 'Connection': 'keep-alive', 'Accept-Encoding': 'gzip, deflate'} #HeadersSimple = {'user-agent': UserAgentMobile, 'Content-Type': 'text/html; charset=utf-8', 'Connection': 'keep-alive', 'Accept-Encoding': 'gzip, deflate'} HeadersSimple = {'user-agent': UserAgentMobile, 'Connection': 'keep-alive', 'Accept-Encoding': 'gzip, deflate'} if link.lower().startswith("http") or link.lower().startswith("https"): #print("getWebpagesSimple():", link) # use python request library for fetching try: r1 = requests.get(link, headers=HeadersSimple, timeout=300, verify=True) #r1.encoding = r1.apparent_encoding #r1.encoding = 'utf-8' #r1.encoding = 'latin-1' myStatus = r1.status_code myText = str(r1.text) myContent = str(r1.headers['content-type']) #myText = myText.replace('\n', ' ') #myText = myText.replace("\n", ' ') mT1 = myText.strip() #t1 = ftfy.fix_text(mT1, normalization='NFKC', max_decode_length=9000000000) #at1 = encodify.encodeToLatin1(t1) #print("htmlify.getWebpagesSimple() Webpage size HTTP:", str(myStatus)," -> ", len(myText)," -> ", myContent) if "html" in myContent or "json" in myContent and myStatus == 200: #print("htmlify.getWebpagesSimple() Webpage size HTTP:", str(myStatus)," -> ", len(myText)," -> ", myContent) return mT1 except Exception as er: print("Unexpected error: getWebpagesSimple(link)", sys.exc_info()[0]) exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] print(exc_type, fname, exc_tb.tb_lineno) #print("htmlify.getWebpagesSimple(link): Empty HTML Document!") return str("") def createNginxConfig(mydomain, my_www_domain, myIP, mywwwroot): with codecs.open("/home/seo-auto-scaler/files/nginx/nginx-standard.conf", 'r', encoding='utf8') as f: nginx_conf = f.read() f.close() x = nginx_conf.replace("MYDOMAIN", mydomain) x = x.replace("MY_WWW_DOMAIN", my_www_domain) x = x.replace("MYIP", myIP) x = x.replace("MYWWWROOT", mywwwroot) with codecs.open("/etc/nginx/nginx.conf", 'w+', encoding='utf8') as f: f.write(x) f.close() return True def createNginxSSLConfig(mydomain, my_www_domain, myIP, mywwwroot, cert_path): for filename in os.listdir(cert_path): if filename.endswith(".pem"): if "chain" in filename.lower(): r = open(cert_path+"/"+filename) x = r.read() with codecs.open("/home/seo-auto-scaler/files/ssl/chain1.pem", 'w+', encoding='utf8') as f: f.write(x) f.close() if "fullchain" in filename.lower(): r = open(cert_path+"/"+filename) x = r.read() with codecs.open("/home/seo-auto-scaler/files/ssl/fullchain1.pem", 'w+', encoding='utf8') as f: f.write(x) f.close() if "privkey" in filename.lower(): r = open(cert_path+"/"+filename) x = r.read() with codecs.open("/home/seo-auto-scaler/files/ssl/privkey1.pem", 'w+', encoding='utf8') as f: f.write(x) f.close() with codecs.open("/home/seo-auto-scaler/files/nginx/nginx-ssl.conf", 'r', encoding='utf8') as f: nginx_conf = f.read() f.close() x = nginx_conf.replace("MYDOMAIN", mydomain) x = x.replace("MY_WWW_DOMAIN", my_www_domain) x = x.replace("MYIP", myIP) x = x.replace("MYWWWROOT", mywwwroot) with codecs.open("/etc/nginx/nginx.conf", 'w+', encoding='utf8') as f: f.write(x) f.close() os.system("chmod -R 755 /home/seo-auto-scaler/files/") os.system("chown -R root:www-data /home/seo-auto-scaler/files/") return True def createIndexFile(unaique_html_template, custom_link_top1, custom_link_top2, custom_link_top3, custom_link_name_top1, custom_link_name_top2, custom_link_name_top3, headline, structured_data, summary, textbyai, textbyai_simple, topics, my_www_domain, mywwwroot, google_analytics_id, optimized, article_score ): if article_score > 0: as_score = ''+str(article_score)+'' else: as_score = ''+str(article_score)+'' with codecs.open(unaique_html_template, 'r', encoding='utf8') as f: html_tpl = f.read() f.close() x = html_tpl.replace("{custom_link_top1}", custom_link_top1) x = x.replace("{custom_link_top2}", custom_link_top2) x = x.replace("{custom_link_top3}", custom_link_top3) x = x.replace("{custom_link_name_top1}", custom_link_name_top1) x = x.replace("{custom_link_name_top2}", custom_link_name_top2) x = x.replace("{custom_link_name_top3}", custom_link_name_top3) x = x.replace("{title}", headline) x = x.replace("{headline}", headline) x = x.replace("{description}", headline) structured_data = structured_data.replace("https://www.unaique.net", "https://"+my_www_domain) x = x.replace("{structured_data}", structured_data) x = x.replace("{summary}", summary) x = x.replace("{textbyai}", textbyai) x = x.replace("{textbyai_simple}", textbyai_simple) x = x.replace("{topics}", topics) x = x.replace("{my_www_domain}", "https://"+my_www_domain) x = x.replace("{keywords}", topics) x = x.replace("{google_analytics_id}", google_analytics_id) x = x.replace("{optimized}", optimized) x = x.replace("{article_score}", as_score) with codecs.open(mywwwroot+"/index.html", 'w+', encoding='utf8') as f: f.write(x) f.close() os.system("chmod -R 755 "+mywwwroot) os.system("chown -R www-data:www-data "+mywwwroot); return True def doCreateSitemaps(ssl_domain): # /home/seo-auto-scaler/files/wwwroot/robots.txt with codecs.open("/home/seo-auto-scaler/files/wwwroot/robots.txt", 'r', encoding='utf8') as f: txt_tpl1 = f.read() f.close() x = txt_tpl1.replace("{ssl_domain}", ssl_domain) with codecs.open("/home/seo-auto-scaler/files/wwwroot/robots.txt", 'w+', encoding='utf8') as f: f.write(x) f.close() # /home/seo-auto-scaler/files/wwwroot/sitemap.xml with codecs.open("/home/seo-auto-scaler/files/wwwroot/sitemap.xml", 'r', encoding='utf8') as f: txt_tpl2 = f.read() f.close() x2 = txt_tpl2.replace("{ssl_domain}", ssl_domain+"/") d = str(date.today()) x2 = x2.replace("{my_date}", d) with codecs.open("/home/seo-auto-scaler/files/wwwroot/sitemap.xml", 'w+', encoding='utf8') as f2: f2.write(x2) f2.close() return True def doSubmitSitemaps(ssl_domain): """ Bing und Google sind wichtig. Yahoo informiert sich über Bing. Duckduckgo nimmt von Google und Bing die Informationen zur Sitemap. Damit ist sichergestellt, dass die neue Webseite von allen bekannten Crawlern indexiert wird und Traffic sendet. """ my_sitemap = ssl_domain + "/" + "sitemap.xml" my_sitemap_bing = "https://www.bing.com/webmaster/ping.aspx?siteMap="+my_sitemap my_sitemap_google = "https://www.google.com/webmasters/sitemaps/ping?sitemap="+my_sitemap print("\t -> Informiere Bing über die neue Webseite.") web1 = getWebpagesSimple(my_sitemap_bing) print("\t -> Informiere Google über die neue Webseite.") web2 = getWebpagesSimple(my_sitemap_google) return True def makeWebapiCall(unaique_api_request, unaique_api): #api_hook = "https://www.unaique.net/en/api/index.php?language="+language+"&keyword="+urllib.parse.quote(search_query) #https://www.unaique.net/en/api/index.php?session=ad16185319532ee05a91ebf4d2c3374e-API web1 = getWebpagesSimple(unaique_api_request) x = json.loads(web1) session = x.get("session") #{title} {description} status = x.get("status") api_hook = unaique_api+"?session="+session for i in range(310): i = i + 1 time.sleep(30) web2 = getWebpagesSimple(api_hook) y = json.loads(web2) status2 = y.get("status") print("\t -> Versuche neuen Artikel von der KI abzuholen ("+str(status2)+") -> Versuch: "+str(i)+" von 100.") if status2 == 200: print("\t -> Abholen des geschriebenen Artikels von der KI war erfolgreich.") return api_hook return api_hook print("Schritt 2: Lese aus Konfiguration die Einstellungen aus.") custom_link_top1 = str(config['DEFAULT']['custom_link_top1']) custom_link_top2 = str(config['DEFAULT']['custom_link_top2']) custom_link_top3 = str(config['DEFAULT']['custom_link_top3']) custom_link_name_top1 = str(config['DEFAULT']['custom_link_name_top1']) custom_link_name_top2 = str(config['DEFAULT']['custom_link_name_top2']) custom_link_name_top3 = str(config['DEFAULT']['custom_link_name_top3']) search_query = str(config['DEFAULT']['search_query']) language_text = str(config['DEFAULT']['language_text']) mydomain = str(config['DEFAULT']['mydomain']) my_www_domain = str(config['DEFAULT']['my_www_domain']) mywwwroot = str(config['DEFAULT']['mywwwroot']) google_analytics_id = str(config['DEFAULT']['google_analytics_id']) use_ssl = str(config['DEFAULT']['use_ssl']) unaique_api = str(config['DEFAULT']['unaique_api']) unaique_session = str(config['DEFAULT']['unaique_session']) myIP = get_ip_address('eth0') unaique_html_template = str("") unaique_api_request = unaique_api+"?language="+language_text+"&keyword="+urllib.parse.quote(search_query) ssl_domain = "https://"+my_www_domain t = str("") if language_text == "de": unaique_html_template = "/home/seo-auto-scaler/files/wwwroot/de/index.tpl" t="deutschen" else: unaique_html_template = "/home/seo-auto-scaler/files/wwwroot/en/index.tpl" t="englischen" if len(unaique_session) > 1: unaique_api_request = unaique_api+"?session="+unaique_session #else: # unaique_api_request_1 = unaique_api+"?language="+language_text+"&keyword="+urllib.parse.quote(search_query) # unaique_api_request = makeWebapiCall(unaique_api_request_1, unaique_api) print("Schritt 3: Erstelle das benötigte Webverzeichnis.") try: os.system("rm -rf "+mywwwroot) os.system("rm -rf /usr/share/nginx/files/nginx.conf") os.system("rm -rf /etc/nginx/nginx.conf") os.system("rm -rf /etc/nginx/nginx.conf") except Exception as e1: pass try: os.makedirs(mywwwroot) os.system("chmod -R 755 "+mywwwroot) os.system("chown -R www-data:www-data "+mywwwroot) except Exception as e1: pass try: os.makedirs(mywwwroot+"/.well-known/acme-challenge/") os.system("chmod -R 755 "+mywwwroot+"/.well-known/acme-challenge/") os.system("chown -R www-data:www-data "+mywwwroot+"/.well-known/acme-challenge/") except Exception as e1: pass try: os.makedirs("/usr/share/nginx/files/") except Exception as e1: pass print("Schritt 4: Erstelle die benötigte Konfiguration für den Webserver.") if use_ssl == "1": print("Schritt 4.1: Nutze SSL https://"+my_www_domain) #os.system("rm -rf /etc/letsencrypt/archive/"+mydomain+"/*")" #os.system("rm -rf /etc/letsencrypt/archive/"+my_www_domain+"/*")" ##########################os.system("certbot certonly --register-unsafely-without-email --agree-tos --dry-run -d "+mydomain+" -d "+my_www_domain+" --authenticator standalone") # muss in live version raus: --dry-run a="/etc/letsencrypt/archive/"+mydomain b="/etc/letsencrypt/archive/"+my_www_domain if os.path.isdir(a): createNginxSSLConfig(mydomain, my_www_domain, myIP, mywwwroot, a) elif os.path.isdir(): createNginxSSLConfig(mydomain, my_www_domain, myIP, mywwwroot, b) doCreateSitemaps(ssl_domain) else: ssl_domain = "http://"+my_www_domain print("Schritt 4.1: Nutze http://") createNginxConfig(mydomain, my_www_domain, myIP, mywwwroot) doCreateSitemaps(ssl_domain) print("Schritt 5: Starte den Webserver.") #os.system("/usr/sbin/nginx -t -c /etc/nginx/nginx.conf") os.system("/usr/sbin/nginx -c /etc/nginx/nginx.conf") #os.system("pidof nginx") print("Schritt 6: Schreibe Text mit Hilfe Künstlicher Intelligenz (unaique.net API).") unaique_api_request2 = makeWebapiCall(unaique_api_request, unaique_api) web = getWebpagesSimple(unaique_api_request2) y = json.loads(web) headline = y.get("headline") #{title} {description} summary = y.get("summary") #structured_data = y.get("structured_data") #structured_data = unescape(structured_data) textbyai = y.get("textbyai") textbyai_simple = textbyai #y.get("textbyai_advanced") topics = y.get("topics") topics = topics.replace("#", "") status = y.get("status") optimized = y.get("optimized") article_score = int(y.get("article_score")) # links: {custom_link_top1} {custom_link_name_top1} words_content = textbyai.split(" ") words = len(words_content) today = date.today() articleBody = summary try: articleBody = " ".join(words_content[0:25]) except Exception as e1: pass structured_data = """ """ structured_data = structured_data.replace("{my_www_domain}", str(my_www_domain)) structured_data = structured_data.replace("{headline}", str(headline)) structured_data = structured_data.replace("{today}", str(today)) structured_data = structured_data.replace("{words}", str(words)) structured_data = structured_data.replace("{topics}", str(topics)) structured_data = structured_data.replace("{articlebody}", str(articleBody)) print("Schritt 8: Kopiere die CSS und JavaScript Dateien für die Webseite.") os.system("/usr/bin/cp -rf /home/seo-auto-scaler/files/wwwroot/* "+mywwwroot) print("Schritt 9: Erstelle die Webseite mit Inhalten und Links.") createIndexFile(unaique_html_template, custom_link_top1, custom_link_top2, custom_link_top3, custom_link_name_top1, custom_link_name_top2, custom_link_name_top3, headline, structured_data, summary, textbyai, textbyai_simple, topics, my_www_domain, mywwwroot, google_analytics_id, optimized, article_score ) print("Schritt 10: Informiere Google und Bing über die neue Webseite.") doSubmitSitemaps(ssl_domain) print("Schritt 11: Webseite http://"+my_www_domain+" ist nun verfügbar!") print("Schritt 12: Informiere deinen Domain Provider:") print("\t -> Domain '"+str(mydomain)+"' setze 'A-Record' für '@' mit IP '"+str(myIP)+"'") print("\t -> Domain '"+str(my_www_domain)+"' setze 'A-Record' für 'www' mit IP '"+str(myIP)+"'") bTime = datetime.datetime.now() deltaRuntime = bTime - aTime print("Schritt 13: Veröffentlichung eines "+str(t)+" Artikels zum Thema '"+str(search_query)+"' war erfolgreich.") print("Schritt 14: Laufzeit des Programmes: "+str(deltaRuntime)+" Minuten") print("Schritt 15: KI Autoblogger fertig - Beende mich.") sys.exit(0) """ nginx installieren unaique api: https://www.unaique.net/en/api/index.php?language=de&keyword=Haus%20in%20Berlin https://www.unaique.net/en/api/index.php?session=aac291dd9290171e924b3439a3fd18bb-UNAIQUENET urlencode = urllib.parse.quote() urldecode = urllib.parse.unquote() "summary" "structured_data" "textbyai" "topics" "status": 200 { "status": 202, "session": "91a401469e495261e5b2fa6e24086d04-API", "fetch_url": "https:\/\/www.unaique.net\/en\/api\/index.php?session=91a401469e495261e5b2fa6e24086d04-API" } """ #certbot certonly --nginx --register-unsafely-without-email --agree-tos --dry-run -d artikelschreiben.com -d www.artikelschreiben.com --webroot-path /home/wwwroot/.well-known/acme-challenge --authenticator webroot --installer nginx """ Successfull: certbot certonly --register-unsafely-without-email --agree-tos --dry-run -d artikelschreiben.com -d www.artikelschreiben.com --authenticator standalone nginx beenden und dann erst: certbot certonly --register-unsafely-without-email --agree-tos -d artikelschreiben.com -d www.artikelschreiben.com --authenticator standalone /etc/letsencrypt/live/artikelschreiben.com/fullchain.pem Your key file has been saved at: /etc/letsencrypt/live/artikelschreiben.com/privkey.pem -rw-r--r-- 1 root root 1.9K Feb 1 12:12 cert1.pem -rw-r--r-- 1 root root 1.6K Feb 1 12:12 chain1.pem -rw-r--r-- 1 root root 3.4K Feb 1 12:12 fullchain1.pem -rw------- 1 root root 1.7K Feb 1 12:12 privkey1.pem root@v2202101139816141041:/etc/letsencrypt/archive/artikelschreiben.com# """