From e367e301e300a72a553708da12c38716ff7cc386 Mon Sep 17 00:00:00 2001 From: sgrtye Date: Sun, 17 Dec 2023 15:49:03 +0000 Subject: [PATCH] initial setting --- .github/actions/action.yml | 17 ++ .github/workflows/action-novel.yml | 27 +++ .github/workflows/action-submanager.yml | 27 +++ .github/workflows/action-telebot.yml | 27 +++ README.md | 4 +- novel/Dockerfile | 13 ++ novel/main.py | 196 ++++++++++++++++++ novel/requirements.txt | 3 + submanager/Dockerfile | 16 ++ submanager/main.py | 264 ++++++++++++++++++++++++ submanager/requirements.txt | 2 + telebot/Dockerfile | 12 ++ telebot/main.py | 171 +++++++++++++++ telebot/requirements.txt | 4 + 14 files changed, 782 insertions(+), 1 deletion(-) create mode 100644 .github/actions/action.yml create mode 100644 .github/workflows/action-novel.yml create mode 100644 .github/workflows/action-submanager.yml create mode 100644 .github/workflows/action-telebot.yml create mode 100644 novel/Dockerfile create mode 100644 novel/main.py create mode 100644 novel/requirements.txt create mode 100644 submanager/Dockerfile create mode 100644 submanager/main.py create mode 100644 submanager/requirements.txt create mode 100644 telebot/Dockerfile create mode 100644 telebot/main.py create mode 100644 telebot/requirements.txt diff --git a/.github/actions/action.yml b/.github/actions/action.yml new file mode 100644 index 0000000..09bfd8b --- /dev/null +++ b/.github/actions/action.yml @@ -0,0 +1,17 @@ +name: Docker + +runs: + using: 'composite' + + steps: + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_HUB_USERNAME }} + password: ${{ secrets.DOCKER_HUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/action-novel.yml b/.github/workflows/action-novel.yml new file mode 100644 index 0000000..aae6079 --- /dev/null +++ b/.github/workflows/action-novel.yml @@ -0,0 +1,27 @@ +name: Novel build + +on: + push: + paths: + - './novel' + workflow_dispatch: + +jobs: + Mcdonalds: + runs-on: ${{ vars.SYSTEM }} + timeout-minutes: ${{ fromJSON(vars.TIMEOUT) }} + + steps: + - uses: actions/checkout@v4 + + - name: Set up docker build environment + uses: ./.github/actions/Docker + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: ./novel + push: true + platforms: linux/arm64/v8 + tags: latest + names: docker.io/${{ secrets.DOCKER_HUB_USERNAME }}/novel:latest \ No newline at end of file diff --git a/.github/workflows/action-submanager.yml b/.github/workflows/action-submanager.yml new file mode 100644 index 0000000..4224d44 --- /dev/null +++ b/.github/workflows/action-submanager.yml @@ -0,0 +1,27 @@ +name: Submanager build + +on: + push: + paths: + - './submanager' + workflow_dispatch: + +jobs: + Mcdonalds: + runs-on: ${{ vars.SYSTEM }} + timeout-minutes: ${{ fromJSON(vars.TIMEOUT) }} + + steps: + - uses: actions/checkout@v4 + + - name: Set up docker build environment + uses: ./.github/actions/Docker + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: ./submanager + push: true + platforms: linux/arm64/v8 + tags: latest + names: docker.io/${{ secrets.DOCKER_HUB_USERNAME }}/submanager:latest \ No newline at end of file diff --git a/.github/workflows/action-telebot.yml b/.github/workflows/action-telebot.yml new file mode 100644 index 0000000..b481966 --- /dev/null +++ b/.github/workflows/action-telebot.yml @@ -0,0 +1,27 @@ +name: Telebot build + +on: + push: + paths: + - './telebot' + workflow_dispatch: + +jobs: + Mcdonalds: + runs-on: ${{ vars.SYSTEM }} + timeout-minutes: ${{ fromJSON(vars.TIMEOUT) }} + + steps: + - uses: actions/checkout@v4 + + - name: Set up docker build environment + uses: ./.github/actions/Docker + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: ./telebot + push: true + platforms: linux/arm64/v8 + tags: latest + names: docker.io/${{ secrets.DOCKER_HUB_USERNAME }}/telebot:latest \ No newline at end of file diff --git a/README.md b/README.md index 92459da..a958a36 100644 --- a/README.md +++ b/README.md @@ -1 +1,3 @@ -# docker \ No newline at end of file +# docker build + +Personal repository for building docker images \ No newline at end of file diff --git a/novel/Dockerfile b/novel/Dockerfile new file mode 100644 index 0000000..c465713 --- /dev/null +++ b/novel/Dockerfile @@ -0,0 +1,13 @@ +FROM python:3.12-alpine + +ENV TZ=Europe/London + +COPY ./requirements.txt /requirements.txt +# RUN pip install --upgrade pip +RUN pip install -r /requirements.txt + +RUN mkdir /novel +VOLUME /novel + +COPY ./main.py /main.py +CMD ["python", "-u", "main.py"] \ No newline at end of file diff --git a/novel/main.py b/novel/main.py new file mode 100644 index 0000000..f752b57 --- /dev/null +++ b/novel/main.py @@ -0,0 +1,196 @@ +import os +import time +import json +import telebot +import requests +import datetime +import threading +import http.server +import socketserver +from lxml import etree + +CACHE_PATH = "/novel/cache.json" + +TELEBOT_TOKEN = os.environ.get("TELEBOT_TOKEN") +TELEBOT_USER_ID = os.environ.get("TELEBOT_USER_ID") +PROXY_URL = os.environ.get("PROXY_URL") + +if TELEBOT_TOKEN is None or TELEBOT_USER_ID is None or PROXY_URL is None: + print("Environment variables not fulfilled") + +bot = telebot.TeleBot(TELEBOT_TOKEN) + +checkedTime = time.time() + + +class HealthCheckHandler(http.server.BaseHTTPRequestHandler): + def log_message(self, format, *args): + # Override the log_message method to do nothing + pass + + def do_GET(self): + if self.path == "/update": + global titles + + self.send_response(200) + self.send_header("Content-type", "application/json") + self.end_headers() + + filtered_titles = { + key: value + for key, value in titles.items() + if not key.endswith("previous") + } + response = json.dumps(filtered_titles) + self.wfile.write(response.encode("utf-8")) + elif self.path == "/status": + global checkedTime + global loopTime + + self.send_response(200) + self.send_header("Content-type", "text/plain") + self.end_headers() + if time.time() - checkedTime < loopTime: + self.wfile.write(b"OK") + else: + self.wfile.write(b"Failed") + else: + self.send_response(404) + self.send_header("Content-type", "text/plain") + self.end_headers() + self.wfile.write(b"Not Found") + + +def start_health_server(): + with socketserver.TCPServer(("0.0.0.0", 8008), HealthCheckHandler) as httpd: + httpd.serve_forever() + + +health_thread = threading.Thread(target=start_health_server) +health_thread.daemon = True +health_thread.start() + +proxies = [] + +try: + response = requests.get(PROXY_URL) + + if response.status_code == 200: + file_content = response.text + ip_list = file_content.strip().split("\n") + + for ip_entry in ip_list: + ip, port, username, password = ip_entry.rstrip("\r").split(":") + if ip != "154.95.36.199": + proxies.append((ip, port, username, password)) + + if len(proxies) == 0: + raise Exception("No available proxy") + + else: + raise Exception("Proxy server not responding") + +except Exception as e: + print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), str(e)) + bot.send_message(TELEBOT_USER_ID, str(e)) + raise SystemExit + +print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "Novel monitor started") + +books = [ + ("46527", "乱世书"), + ("35463", "稳住别浪"), + ("50012", "还说你不是仙人"), + ("49986", "都重生了谁谈恋爱啊"), + ("43660", "这一世,我再也不渣青梅竹马了"), + ("50565", "我满级天师,你让我进规则怪谈?"), +] +i = 0 +j = 0 +loopTime = len(books) * 5 * 60 +sleepInterval = loopTime / len(books) + +titles = dict() +if os.path.exists(CACHE_PATH): + with open(CACHE_PATH, "r") as file: + titles = json.load(file) + booknames = set(name for _, name in books) + booknames_previous = set(name + "previous" for _, name in books) + dict_copy = titles.copy() + for book in dict_copy.keys(): + if book not in booknames and book not in booknames_previous: + del titles[book] + +headers = { + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.6167.8 Safari/537.36" +} + +try: + while True: + for index in range(len(proxies)): + j = (j + 1) % len(proxies) + + ip, port, username, password = proxies[j] + proxy = { + "http": f"http://{username}:{password}@{ip}:{port}", + "https": f"http://{username}:{password}@{ip}:{port}", + } + + try: + url = f"https://www.69shuba.com/book/{books[i][0]}.htm" + html = requests.get( + url, + headers=headers, + proxies=proxy, + ) + html.encoding = "gbk" + tree = etree.HTML(html.text, parser=None) + + div_element = tree.xpath('//div[contains(@class, "qustime")]')[0] + span_element = div_element.xpath("./ul/li[1]/a/span")[0] + title = span_element.text + + if title != titles.get(books[i][1]): + if title == titles.get(books[i][1] + "previous"): + break + + if titles.get(books[i][1]) is not None: + bot.send_message( + TELEBOT_USER_ID, + f"{books[i][1]}\n'{titles.get(books[i][1])}'\n->'{title}'\n{url}", + ) + + titles[books[i][1] + "previous"] = titles.get(books[i][1]) + titles[books[i][1]] = title + + with open(CACHE_PATH, "w") as file: + json.dump(titles, file) + + break + + except Exception as e: + print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), str(e)) + print( + f"Error occured when checking {books[i][1]} with proxy {ip}:{port}" + ) + print( + f"Error occured during iteration {index} on line {e.__traceback__.tb_lineno}" + ) + time.sleep(sleepInterval) + if index == len(proxies) - 1: + raise e + + checkedTime = time.time() + time.sleep(sleepInterval) + i = (i + 1) % len(books) + +except Exception as e: + bot.send_message(TELEBOT_USER_ID, "Novel monitor encountered unexpected exception") + bot.send_message( + TELEBOT_USER_ID, + f"The exception occured when processing book {books[i][1]} with error message: {str(e)}", + ) + print( + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "Error occured, program terminated", + ) diff --git a/novel/requirements.txt b/novel/requirements.txt new file mode 100644 index 0000000..6c8f271 --- /dev/null +++ b/novel/requirements.txt @@ -0,0 +1,3 @@ +lxml +requests +pyTelegramBotAPI \ No newline at end of file diff --git a/submanager/Dockerfile b/submanager/Dockerfile new file mode 100644 index 0000000..9f6473b --- /dev/null +++ b/submanager/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.12-alpine + +ENV TZ=Europe/London + +COPY ./requirements.txt /requirements.txt +# RUN pip install --upgrade pip +RUN pip install -r /requirements.txt + +RUN mkdir /sub +VOLUME /sub + +RUN mkdir /conf.d +VOLUME /conf.d + +COPY ./main.py /main.py +CMD ["python", "-u", "main.py"] \ No newline at end of file diff --git a/submanager/main.py b/submanager/main.py new file mode 100644 index 0000000..81844a2 --- /dev/null +++ b/submanager/main.py @@ -0,0 +1,264 @@ +import os +import json +import time +import shutil +import requests +import schedule +import datetime + +DIRECTORY_PATH = "/sub" +NGINX_PATH = "/conf.d" + +LOCATION_DICT = { + "dalian": "大连", + "foshan": "佛山", + "tianjin": "天津", + "jinghai": "静海", + "beijing": "北京", + "dongguan": "东莞", + "tangshan": "唐山", + "guangzhou": "广州", +} + +USERNAME = os.environ.get("USERNAME") +PASSWORD = os.environ.get("PASSWORD") + +LOGIN_URL = os.environ.get("LOGIN_URL") +INBOUND_URL = os.environ.get("INBOUND_URL") + +if USERNAME is None or PASSWORD is None or LOGIN_URL is None or INBOUND_URL is None: + print("Environment variables not fulfilled") + + +def get_credentials(): + session = requests.Session() + session.post(LOGIN_URL, data={"username": USERNAME, "password": PASSWORD}) + response = session.post(INBOUND_URL) + + if response.status_code != 200: + return None + + results = [] + for inbound in response.json()["obj"]: + client = { + "name": inbound["remark"], + "uuid": json.loads(inbound["settings"])["clients"][0]["id"], + "host": json.loads(inbound["settings"])["clients"][0]["id"][0:5] + + ".sgrtye.tk", + "port": str(inbound["port"]), + "path": json.loads(inbound["streamSettings"])["wsSettings"]["path"][1:], + } + results.append(client) + + return results + + +def get_provider_ip(): + headers = headers = {"Content-Type": "application/json"} + data = {"key": "o1zrmHAF", "type": "v4"} + + response = requests.post( + "https://api.hostmonit.com/get_optimization_ip", json=data, headers=headers + ) + if response.status_code != 200: + return None + + data = response.json() + + providers = {} + providers["yidong"] = { + f"移动节点{index + 1}_IP": value + for index, value in enumerate([entry["ip"] for entry in data["info"]["CM"]]) + } + providers["liantong"] = { + f"联通节点{index + 1}_IP": value + for index, value in enumerate([entry["ip"] for entry in data["info"]["CU"]]) + } + providers["dianxin"] = { + f"电信节点{index + 1}_IP": value + for index, value in enumerate([entry["ip"] for entry in data["info"]["CT"]]) + } + + return providers + + +def read_txt_file(file_path): + with open(file_path, "r") as file: + lines = file.readlines() + + content_dict = {} + for line in lines: + key, value = line.strip().split(":") + content_dict[key.strip()] = value.strip() + + return content_dict + + +def get_location_ip(): + locations = {} + + for filename in os.listdir(os.path.join(DIRECTORY_PATH, "file")): + if filename.endswith(".txt"): + file_path = os.path.join(DIRECTORY_PATH, "file", filename) + file_name = os.path.splitext(filename)[0] + content = read_txt_file(file_path) + locations[file_name] = content + + return locations + + +def generate_config(servers, uuid, host, path, config_path, save_path): + os.makedirs(os.path.dirname(save_path), exist_ok=True) + + with open(config_path, "r", encoding="utf-8") as file: + config_content = file.read() + + for key, value in servers.items(): + config_content = config_content.replace(key, value) + config_content = config_content.replace("UUID_FULL", uuid) + config_content = config_content.replace("CLIENT_PATH", path) + config_content = config_content.replace("HOST_ADDRESS", host) + + with open(save_path, "w", encoding="utf-8") as file: + file.write(config_content) + + +def generate_check_config(locations, uuid, host, path, save_path): + os.makedirs(os.path.dirname(save_path), exist_ok=True) + + config_content = "proxies:" + + for loc, loc_value in locations.items(): + for key, value in loc_value.items(): + config_content = ( + config_content + "\n" + ' - {"name":"' + f"{LOCATION_DICT.get(loc, loc) + key[0:2] + key[4:5]}" + + '","type":"vless","server":"' + + f"{value}" + + '","port":443,"uuid":"UUID_FULL","tls":true,"servername":"HOST_ADDRESS","network":"ws","ws-opts":{"path":"/CLIENT_PATH","headers":{"host":"HOST_ADDRESS"}},"client-fingerprint":"chrome"}' + ) + + config_content = config_content.replace("UUID_FULL", uuid) + config_content = config_content.replace("CLIENT_PATH", path) + config_content = config_content.replace("HOST_ADDRESS", host) + + with open(save_path, "w", encoding="utf-8") as file: + file.write(config_content) + + +def update_client_config(locations, providers, credentials): + directory_path = os.path.join(DIRECTORY_PATH, "conf") + if os.path.exists(directory_path): + shutil.rmtree(directory_path) + print( + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "Old client config files removed", + ) + + for client in credentials: + name, uuid, host, path = ( + client["name"], + client["uuid"], + client["host"], + client["path"], + ) + + servers = { + key: value + for provider in providers.values() + for key, value in provider.items() + } + config_path = os.path.join(DIRECTORY_PATH, "file", "unruled.yaml") + save_path = os.path.join( + DIRECTORY_PATH, "conf", rf"{name}-{path}/china/config.yaml" + ) + generate_config(servers, uuid, host, path, config_path, save_path) + + save_path = os.path.join( + DIRECTORY_PATH, "conf", rf"{name}-{path}/check/config.yaml" + ) + generate_check_config(locations, uuid, host, path, save_path) + + for loc, loc_value in locations.items(): + for pro, pro_value in providers.items(): + servers = { + **{ + f"实时节点{index + 1}_IP": value + for index, value in enumerate(pro_value.values()) + }, + **loc_value, + } + config_path = os.path.join(DIRECTORY_PATH, "file", "ruled.yaml") + save_path = os.path.join( + DIRECTORY_PATH, "conf", rf"{name}-{path}/{loc}/{pro}/config.yaml" + ) + generate_config(servers, uuid, host, path, config_path, save_path) + + print( + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "New client config files generaged", + ) + + +def update_nginx_config(credentials): + with open( + os.path.join(DIRECTORY_PATH, "file", "inbound.conf"), "r", encoding="utf-8" + ) as file: + inbound_template = file.read() + + inbound = "" + for client in credentials: + port, path = client["port"], client["path"] + tmp = inbound_template.replace("PATH", path) + tmp = tmp.replace("PORT", port) + inbound = inbound + "\n\n" + tmp + + with open( + os.path.join(DIRECTORY_PATH, "file", "nginx.conf"), "r", encoding="utf-8" + ) as file: + nginx_template = file.read() + + config = nginx_template.replace("INBOUNDS", inbound) + with open(os.path.join(NGINX_PATH, "nginx.conf"), "w", encoding="utf-8") as file: + file.write(config) + + print( + datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "Successfully generaged new NGINX config", + ) + + +def update(nginx=False): + try: + locations = get_location_ip() + providers = get_provider_ip() + + if providers is None: + raise Exception("No provider data available") + + credentials = get_credentials() + + if credentials is None: + raise Exception("No credentials available") + + if nginx: + update_nginx_config(credentials) + + update_client_config(locations, providers, credentials) + + except Exception as e: + print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), str(e)) + print(f"Error occured on line {e.__traceback__.tb_lineno}") + + +if __name__ == "__main__": + update(nginx=True) + + schedule.every().day.at("00:00").do(update) + schedule.every().day.at("06:00").do(update) + schedule.every().day.at("12:00").do(update) + schedule.every().day.at("18:00").do(update) + + while True: + schedule.run_pending() + time.sleep(10) diff --git a/submanager/requirements.txt b/submanager/requirements.txt new file mode 100644 index 0000000..651d147 --- /dev/null +++ b/submanager/requirements.txt @@ -0,0 +1,2 @@ +requests +schedule \ No newline at end of file diff --git a/telebot/Dockerfile b/telebot/Dockerfile new file mode 100644 index 0000000..8e40a17 --- /dev/null +++ b/telebot/Dockerfile @@ -0,0 +1,12 @@ +FROM python:3.12-alpine + +ENV TZ=Europe/London + +RUN apk add gcc musl-dev linux-headers + +COPY ./requirements.txt /requirements.txt +# RUN pip install --upgrade pip +RUN pip install -r /requirements.txt + +COPY ./main.py /main.py +CMD ["python", "-u", "main.py"] \ No newline at end of file diff --git a/telebot/main.py b/telebot/main.py new file mode 100644 index 0000000..505fb6f --- /dev/null +++ b/telebot/main.py @@ -0,0 +1,171 @@ +import os +import time +import json +import docker +import psutil +import telebot +import requests +import datetime + +TELEBOT_TOKEN = os.environ.get("TELEBOT_TOKEN") +NOVEL_URL = os.environ.get("NOVEL_URL") + +if TELEBOT_TOKEN is None or NOVEL_URL is None: + print("Environment variables not fulfilled") + +bot = telebot.TeleBot(TELEBOT_TOKEN, parse_mode="MarkdownV2") + +commands = [ + telebot.types.BotCommand("info", "Get server usage status"), + telebot.types.BotCommand("novel", "Get novel latest chapters"), + telebot.types.BotCommand("restore", "Restart all exited containers"), +] + +bot.set_my_commands(commands) + + +def MarkdownV2Encode(reply): + text = "\n".join(reply) + return f"```\n{text}```" + + +def dockerUsage(): + # Create a Docker client + client = docker.DockerClient("unix:///var/run/docker.sock") + + # Get all containers + containers = client.containers.list() + + total_cpu_usage = 0 + total_memory_usage = 0 + + reply = [] + + # Iterate over containers and retrieve resource usage + reply.append(f"{'Name':<10} {'CPU':<5} {'Memory':<5}") + + stats1 = dict() + stats2 = dict() + + for container in containers: + stats1[container.name] = container.stats(stream=False) + + time.sleep(10) + + for container in containers: + stats2[container.name] = container.stats(stream=False) + + for container in containers: + # Get container name + container_name = container.name + + # Get container stats + stats = stats1[container.name] + cpu_stats1 = stats["cpu_stats"] + cpu_usage1 = cpu_stats1["cpu_usage"]["total_usage"] + cpu_system1 = cpu_stats1["system_cpu_usage"] + + stats = stats2[container.name] + cpu_stats2 = stats["cpu_stats"] + cpu_usage2 = cpu_stats2["cpu_usage"]["total_usage"] + cpu_system2 = cpu_stats2["system_cpu_usage"] + + memory_stats = stats["memory_stats"] + + # CPU usage percentage + cpu_delta = cpu_usage2 - cpu_usage1 + system_delta = cpu_system2 - cpu_system1 + cpu_percentage = (cpu_delta / system_delta) * 100 + + # Memory usage in MB + memory_usage = memory_stats["usage"] + memory_usage_mb = memory_usage / (1024 * 1024) + + total_cpu_usage += cpu_percentage + total_memory_usage += memory_usage + + reply.append( + f"{container_name:<10} {cpu_percentage:<5.2f}% {memory_usage_mb:<5.1f} MB" + ) + + # Total usage + total_memory_usage_mb = total_memory_usage / (1024 * 1024) + + reply.append(f"\nDocker CPU Usage: {total_cpu_usage:.2f} %") + reply.append(f"Docker Memory Usage: {total_memory_usage_mb:.2f} MB") + + return MarkdownV2Encode(reply) + + +def systemUsage(): + reply = [] + reply.append(f"Total CPU Usage: {psutil.cpu_percent(interval=1):.2f} %") + reply.append( + f"Total Memory Usage: {psutil.virtual_memory().used / (1024 ** 2):.2f} MB" + ) + reply.append(f"Total Swap Usage: {psutil.swap_memory().used / (1024 ** 2):.2f} MB") + load = psutil.getloadavg() + reply.append(f"Load Average: {load[0]:.2f} | {load[1]:.2f} | {load[2]:.2f}") + + return MarkdownV2Encode(reply) + + +def novelUpdate(): + response = requests.get(NOVEL_URL) + reply = [] + + if response.status_code == 200: + content = response.content.decode("utf-8") + data_dict = json.loads(content) + + for novel, title in data_dict.items(): + reply.append(f"{novel}: \n{title}") + else: + reply.append(f"Novel update is not currently available") + + return MarkdownV2Encode(reply) + + +def restore(): + client = docker.DockerClient("unix:///var/run/docker.sock") + containers = client.containers.list(all=True, filters={"status": "exited"}) + exited_containers = [ + c + for c in containers + if c.attrs["HostConfig"]["RestartPolicy"]["Name"] != "unless-stopped" + ] + reply = [] + + for container in exited_containers: + container.start() + reply.append(f"Restarting container: {container.name}") + + if not reply: + reply.append("All containers are running") + + return MarkdownV2Encode(reply) + + +# Booting up all containers that were not turned off intentially +restore() + + +@bot.message_handler(commands=["info"]) +def handle_info_command(message): + bot.reply_to(message, systemUsage()) + bot.reply_to(message, dockerUsage()) + + +@bot.message_handler(commands=["novel"]) +def handle_novel_command(message): + bot.reply_to(message, novelUpdate()) + + +@bot.message_handler(commands=["restore"]) +def handle_novel_command(message): + bot.reply_to(message, restore()) + + +if __name__ == "__main__": + print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "Telegram bot started") + bot.infinity_polling() diff --git a/telebot/requirements.txt b/telebot/requirements.txt new file mode 100644 index 0000000..73b6a7d --- /dev/null +++ b/telebot/requirements.txt @@ -0,0 +1,4 @@ +psutil +docker +requests +pyTelegramBotAPI \ No newline at end of file