diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index 1769204..0000000 Binary files a/.DS_Store and /dev/null differ diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 19fe3c7..8482269 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,24 +5,15 @@ on: paths: - ".github/workflows/**.yml" - "Dockerfile" - - "docker-compose.yml" - "**.py" - "requirements.txt" env: REGISTRY: ghcr.io IMAGE_NAME: ${{ github.repository }} - # CONTAINER_NAME: soybot-container - KAMATERA_USER: soymilk KAMATERA_PORT: 22 KAMATERA_PATH: ~/app/soybot - - POSTGRES_PORT: 5432 - POSTGRES_USER: postgres - POSTGRES_DB: soybot - POSTGRES_PASSWORD: ${{ secrets.POSTGRES_PASSWORD }} - GCPLOGS_AUTH_PATH: /root/.config/gcplogs/auth.json GCP_PROJECT: eesoybot @@ -59,15 +50,15 @@ jobs: cache-from: type=gha cache-to: type=gha,mode=max - - name: Copy docker-compose.yml to Kamatera - run: | - echo "${{ secrets.KAMATERA_KEY }}" > private_key.pem - chmod 600 private_key.pem - scp -o StrictHostKeyChecking=no \ - -i private_key.pem \ - -P ${{ env.KAMATERA_PORT }} \ - ./docker-compose.yml \ - ${{ env.KAMATERA_USER }}@${{ secrets.KAMATERA_HOST }}:${{ env.KAMATERA_PATH }} + # - name: Copy docker-compose.yml to Kamatera + # run: | + # echo "${{ secrets.KAMATERA_KEY }}" > private_key.pem + # chmod 600 private_key.pem + # scp -o StrictHostKeyChecking=no \ + # -i private_key.pem \ + # -P ${{ env.KAMATERA_PORT }} \ + # ./docker-compose.yml \ + # ${{ env.KAMATERA_USER }}@${{ secrets.KAMATERA_HOST }}:${{ env.KAMATERA_PATH }} deploy: needs: build-and-push @@ -80,17 +71,12 @@ jobs: username: soymilk key: ${{ secrets.KAMATERA_KEY }} script: | - cd ${{ env.KAMATERA_PATH }} - - export TOKEN=${{ secrets.TOKEN }} - export OPENAI_API_KEY=${{ secrets.OPENAI_API_KEY }} - export GCP_PROJECT=${{ env.GCP_PROJECT }} - export POSTGRES_PORT=${{ env.POSTGRES_PORT }} - export POSTGRES_USER=${{ env.POSTGRES_USER }} - export POSTGRES_DB=${{ env.POSTGRES_DB }} - export POSTGRES_PASSWORD=${{ env.POSTGRES_PASSWORD }} - - docker compose down - docker system prune -f - docker compose pull - docker compose up -d + docker stop soybot || true + docker run -d \ + --name soybot \ + -e TOKEN=${ secrets.TOKEN } \ + -e OPENAI_API_KEY=${ secrets.OPENAI_API_KEY } \ + -e GCP_PROJECT=${ secrets.GCP_PROJECT } \ + --log-driver=gcplogs \ + --log-opt gcp-project=${ env.GCP_PROJECT } \ + ghcr.io/soymilk/soybot:latest diff --git a/.gitignore b/.gitignore index 8921622..6c0f5a1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +.DS_Store + *.ipynb # logs diff --git a/alembic.ini b/alembic.ini deleted file mode 100644 index d9f94e0..0000000 --- a/alembic.ini +++ /dev/null @@ -1,110 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = alembic - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python-dateutil library that can be -# installed by adding `alembic[tz]` to the pip requirements -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to alembic/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = driver://user:pass@localhost/dbname - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README deleted file mode 100644 index 98e4f9c..0000000 --- a/alembic/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py deleted file mode 100644 index 36112a3..0000000 --- a/alembic/env.py +++ /dev/null @@ -1,78 +0,0 @@ -from logging.config import fileConfig - -from sqlalchemy import engine_from_config -from sqlalchemy import pool - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = None - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako deleted file mode 100644 index 55df286..0000000 --- a/alembic/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/commands/__init__.py b/commands/__init__.py deleted file mode 100644 index 92ef016..0000000 --- a/commands/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from commands.starburst import * diff --git a/commands/starburst.py b/commands/starburst.py deleted file mode 100644 index 65d9efc..0000000 --- a/commands/starburst.py +++ /dev/null @@ -1,31 +0,0 @@ -import asyncio -import aiohttp -from random import choice -from bs4 import BeautifulSoup - -starburst_images = [] - - -async def starburst_session(session, link_starto): - async with session.get(link_starto) as response: - html = await response.text() - imgs = BeautifulSoup(html, 'lxml').find( - 'div', class_='MSG-list8C').find_all('img') - for img in imgs: - starburst_images.append(img['data-src']) - - -async def get_starburst_images(): - creations_url = 'https://home.gamer.com.tw/creationDetail.php' - sns = [4279438, 4443834, 4655069, 4848281, 5046574, 5190390, 5375874] - links_starto = [f'{creations_url}?sn={sn}' for sn in sns] - async with aiohttp.ClientSession() as session: - aws = [starburst_session(session, l) for l in links_starto] - print('獲取星爆圖中...') - await asyncio.gather(*aws) - - -async def starburst_stream(): - if not starburst_images: - await get_starburst_images() - return choice(starburst_images) diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 882b374..0000000 --- a/docker-compose.yml +++ /dev/null @@ -1,42 +0,0 @@ -version: '3.8' -services: - bot: - image: ghcr.io/eesoymilk/soybot-app:main - depends_on: - db: - condition: service_healthy - # volumes: - # - $HOME/.config/gcloud/application_default_credentials.json:/gcp/creds.json - environment: - TOKEN: ${TOKEN} - OPENAI_API_KEY: ${OPENAI_API_KEY} - POSTGRES_HOST: db - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - # GOOGLE_APPLICATION_CREDENTIALS: /gcp/creds.json - # GCPLOGS_AUTH_PATH: /gcp/creds.json - logging: - driver: gcplogs - options: - gcp-project: ${GCP_PROJECT} - - db: - image: postgres:15.2 - restart: always - user: postgres - environment: - POSTGRES_USER: ${POSTGRES_USER} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_DB: ${POSTGRES_DB} - volumes: - - soybot-data:/var/lib/postgresql/data - healthcheck: - test: [ "CMD-SHELL", "pg_isready" ] - interval: 10s - timeout: 5s - retries: 5 - -volumes: - soybot-data: diff --git a/extensions/mojitrack.py b/extensions/mojitrack.py deleted file mode 100644 index 1060115..0000000 --- a/extensions/mojitrack.py +++ /dev/null @@ -1,139 +0,0 @@ -import re -import asyncio -from typing import Optional -from datetime import datetime - -from discord import ( - Message, - Reaction, - Member, - Guild, - PartialEmoji, - Emoji, - Sticker, -) -from discord.ext.commands import Cog, Bot - -from attr import frozen -from emoji import emoji_list - -from utils import get_lumberjack - -log = get_lumberjack(__name__) - - -@frozen -class EmojiUsage: - id: Optional[int] = None - message_id: int - - custom_emoji_id: Optional[int] = None - unicode_emoji: Optional[str] = None - - timestamp: datetime - - -class EmojiNotFoundError(Exception): - pass - - -class MojitrackCog(Cog): - custom_emoji_regex = r'<(a?):([a-zA-Z0-9_]{2,32}):([0-9]{18,22})>' - - def __init__(self, bot: Bot): - self.bot = bot - - @staticmethod - async def get_or_fetch_emoji( - guild: Guild, emoji_id: int - ) -> Emoji: - if (e := guild.get_emoji(emoji_id)) is not None: - return e - return await guild.fetch_emoji(emoji_id) - - async def _fetch_emoji_usages( - self, msg: Message, timestamp: datetime - ) -> tuple[tuple[Emoji | str, ...], tuple[EmojiUsage, ...]]: - # match all valid discord custom emojis, also known as a PartialEmoji - partial_emojis = [ - PartialEmoji.from_str( - match.group() - ) for match in re.finditer(self.custom_emoji_regex, msg.content) - ] - - # we only need custom emojis from this current guild - fetched_emojis = await asyncio.gather(*[ - self.get_or_fetch_emoji(msg.guild, e.id) for e in partial_emojis - ], return_exceptions=True) - custom_emojis = [r for r in fetched_emojis if isinstance(r, Emoji)] - - # find all unicode emojis - unicode_emojis = [e['emoji'] for e in emoji_list(msg.content)] - if not (emojis := custom_emojis + unicode_emojis): - raise EmojiNotFoundError('No emojis are usged in this message') - - # create instances of emoji_usages - custom_emoji_usages = [ - EmojiUsage( - message_id=msg.id, - timestamp=timestamp, - custom_emoji_id=e.id - ) for e in custom_emojis - ] - unicode_emoji_usages = [ - EmojiUsage( - message_id=msg.id, - timestamp=timestamp, - unicode_emoji=e - ) for e in unicode_emojis - ] - emoji_usages = custom_emoji_usages + unicode_emoji_usages - - return tuple(emojis), tuple(emoji_usages) - - def _get_sticker_usages( - self, msg: Message, timestamp: datetime - ) -> tuple[tuple[Sticker, ...], tuple[EmojiUsage, ...]]: - ... - - @Cog.listener() - async def on_message(self, msg: Message): - try: - # TODO: store emoji usages and message to db - emojis, emoji_usages = await self._fetch_emoji_usages( - msg, datetime.utcnow() - ) - except EmojiNotFoundError as err: - log.debug(f'{err}') - return - - log.info(' | '.join([ - f'{msg.guild}', - f'{msg.channel}', - f'{msg.author}', - f'{emojis}' - f'{emoji_usages}' - ])) - - @Cog.listener() - async def on_reaction_add(self, rxn: Reaction, user: Member): - if isinstance(rxn.emoji, PartialEmoji): - return - - # TODO: add reaction to db - if isinstance(rxn.emoji, Emoji): - ... - else: - ... - - @Cog.listener() - async def on_reaction_remove(self, rxn: Reaction, user: Member): - ... - - @Cog.listener() - async def on_message_edit(self, before: Message, after: Message): - ... - - -async def setup(bot: Bot): - await bot.add_cog(MojitrackCog(bot)) diff --git a/extensions/nthu.py b/extensions/nthu.py deleted file mode 100644 index c171145..0000000 --- a/extensions/nthu.py +++ /dev/null @@ -1,119 +0,0 @@ -from dataclasses import dataclass -from datetime import datetime -from enum import IntEnum -import random -import discord -import asyncio - -from discord import app_commands -from discord.ext import commands -from textwrap import dedent -from utils import ANSI, Config, get_lumberjack - -nthu_guild_id = 771595191638687784 -log = get_lumberjack('NTHU', ANSI.Yellow) -angry_dogs_emojis = ( - '<:D11angrydog:946700998024515635>', - '<:D121notangrydog:976082704275763210>', - '<:D12angrydog_hat:991677468794703914>', - '<:D12angrydog_mag:953983783995068436>', - '', - '', - '<:D12angrydog_sleep:991677470216552620>', - '<:D12angrydog_starburst:991676592533295164>', - '<:D6AwkChiwawa:791134264842387496>', - '<:D87dog:980870480804339712>', -) -ugly_dogs_emojis = ( - '<:D5NonChiwawa:791134356563034122>', - '<:D7Chiwawa:958419880997171290>', - '<:D7MuchUglierChiwawa:1052637556388397167>', - '<:D7UglierChiwawa:1052638311539277844>', - '<:D86gaydog:982197038382985246>', - '<:Dg8dog:1052636415260897360>', -) - - -@app_commands.context_menu(name='憤怒狗狗') -@app_commands.guilds(nthu_guild_id) -@app_commands.checks.cooldown(1, 30.0, key=lambda i: (i.channel.id, i.user.id)) -async def angry_dog_react(interaction: discord.Interaction, message: discord.Message): - await interaction.response.defer(ephemeral=True, thinking=True) - await asyncio.gather(*( - message.add_reaction(emoji) - for emoji in angry_dogs_emojis - )) - await interaction.followup.send(content='**憤怒狗狗**已送出') - - -@app_commands.context_menu(name='醜狗醜醜') -@app_commands.guilds(nthu_guild_id) -@app_commands.checks.cooldown(1, 30.0, key=lambda i: (i.channel.id, i.user.id)) -async def ugly_dog_react(interaction: discord.Interaction, message: discord.Message): - await interaction.response.defer(ephemeral=True, thinking=True) - await asyncio.gather(*( - message.add_reaction(emoji) - for emoji in ugly_dogs_emojis - )) - await interaction.followup.send(content='**醜狗醜醜**已送出') - - -class NthuCog(commands.Cog): - def __init__(self, bot: commands.Bot): - self.bot = bot - self.guild = self.bot.get_guild(nthu_guild_id) - if self.guild is not None: - self.daily_bs_channel = self.guild.get_channel( - 771596516443029516) - - @commands.Cog.listener() - async def on_member_join(self, mem: discord.Member) -> None: - if mem.guild.id != nthu_guild_id: - return - - await mem.guild.system_channel.send(dedent(f'''\ - 歡迎{mem.mention}加入**{mem.guild.name}**! - - 請至{mem.guild.get_channel(771684498986500107).mention}留下您的系級和簡短的自我介紹, - 讓我們更加認識你/妳喔!''')) - - @commands.Cog.listener() - async def on_user_update(self, before: discord.User, after: discord.User): - if self.guild is None: - self.guild = await self.bot.fetch_guild(nthu_guild_id) - - try: - if (member := self.guild.get_member(before.id)) is None: - member = await self.guild.fetch_member(before.id) - except discord.NotFound: - return - - if before.avatar == after.avatar or before.id not in Config.user_ids: - return - - try: - if self.daily_bs_channel is None: - self.daily_bs_channel = await self.guild.fetch_channel(771596516443029516) - except discord.NotFound: - return - - await self.daily_bs_channel.send( - f'主要! **{member.mention}**又換頭貼了!', - embed=discord.Embed( - description='➡原頭貼➡\n\n⬇新頭貼⬇', - color=member.color, - timestamp=datetime.now(), - ).set_thumbnail( - url=before.avatar - ).set_image( - url=after.avatar - )) - - -async def setup(bot: commands.Bot): - bot.tree.add_command(angry_dog_react) - bot.tree.add_command(ugly_dog_react) - await bot.add_cog( - NthuCog(bot), - guild=discord.Object(nthu_guild_id) - ) diff --git a/extensions/streak.py b/extensions/streak.py deleted file mode 100644 index dcd015e..0000000 --- a/extensions/streak.py +++ /dev/null @@ -1,84 +0,0 @@ -from dataclasses import dataclass -from datetime import datetime -from enum import IntEnum -import random -import discord - -from discord.ext import commands -from textwrap import dedent -from utils import ANSI, get_lumberjack - - -log = get_lumberjack(__name__, ANSI.Yellow) - - -class MessageStreak: - def __init__(self, message: discord.Message): - self.channel = message.channel - self.init_streak(message) - - def init_streak(self, message: discord.Message): - self.content = message.content - self.stickers = message.stickers - self.reference = message.reference - self.author_ids = [message.author.id] - self.streak_count = 1 - - async def do_streak(self, message: discord.Message): - if not self.validate_streak(message): - self.init_streak(message) - return - - if self.reference is not None: - ref_id = self.reference.message_id - if not message.reference or message.reference.message_id != ref_id: - self.reference = None - - self.author_ids.append(message.author.id) - self.streak_count += 1 - - if self.streak_count == 3: - self.streak_count += 1 - await self.channel.send( - self.content, - stickers=self.stickers, - reference=self.reference - ) - elif self.streak_count > 4: - if random.random() <= 0.3: - await self.channel.send(random.choice([ - '阿玉在洗版', - '度度在拉屎', - '這裡不是洗版區喔 注意一下' - ])) - - def validate_streak(self, message: discord.Message): - if message.author.id in self.author_ids: - return False - if self.content and self.content == message.content: - return True - if self.stickers and self.stickers == message.stickers: - return True - return False - - -class StreakCog(commands.Cog): - def __init__(self, bot: commands.Bot): - self.bot = bot - self._streaks: dict[int, MessageStreak] = dict() - - @commands.Cog.listener(name='on_message') - async def message_streak(self, message: discord.Message): - if not (message.guild) or message.author.bot: - return - - # init a streak for a new channel - if message.channel.id not in self._streaks: - self._streaks[message.channel.id] = MessageStreak(message) - return - - await self._streaks[message.channel.id].do_streak(message) - - -async def setup(bot: commands.Bot): - await bot.add_cog(StreakCog(bot)) diff --git a/utils/db.py b/utils/db.py deleted file mode 100644 index e69de29..0000000