Skip to content

Commit

Permalink
Merge pull request #15 from RusticPotatoes/new_schema_changes
Browse files Browse the repository at this point in the history
New schema changes
  • Loading branch information
extreme4all authored Mar 16, 2024
2 parents ba6ff03 + 1940c98 commit 9abcd58
Show file tree
Hide file tree
Showing 33 changed files with 1,270 additions and 109 deletions.
41 changes: 41 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,54 @@
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Remote Attach",
"type": "python",
"request": "attach",
"connect": {
"host": "localhost",
"port": 5678
},
"pathMappings": [
{
"localRoot": "${workspaceFolder}",
"remoteRoot": "/app"
}
],
},
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": true
},
{
"name": "Compose Up Dev",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/src/main.py", // replace with your script
"console": "integratedTerminal",
"justMyCode": true,
"preLaunchTask": "compose-up-dev" // name of the task to run before launching
},
{
"name": "Run Dev",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/src/main.py",
"console": "integratedTerminal",
"justMyCode": true,
"args": ["--root_path", "/", "--api_port", "5000"],
"env": {
"KAFKA_HOST": "localhost:9094",
"DATABASE_URL": "mysql+aiomysql://root:root_bot_buster@localhost:3306/playerdata",
"POOL_TIMEOUT": "30",
"POOL_RECYCLE": "30"
},
"cwd": "${workspaceFolder}/"
}
]
}

2 changes: 1 addition & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"[python]": {
"editor.defaultFormatter": "ms-python.black-formatter",
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit"
Expand Down
17 changes: 17 additions & 0 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "compose-up",
"type": "shell",
"command": "docker-compose down --volumes && docker-compose up --build -d",
"isBackground": true,
},
{
"label": "compose-up-dev",
"type": "shell",
"command": "docker-compose -f docker-compose-dev.yml down --volumes && docker-compose -f docker-compose-dev.yml up --build -d",
"isBackground": true,
}
]
}
4 changes: 4 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ WORKDIR /project
COPY ./requirements.txt /project
RUN pip install --no-cache-dir -r requirements.txt

# # PTVSD is a Python debugger that can be used in a container
# ARG INSTALL_PTVSD=false
RUN if [ "$INSTALL_PTVSD" = "true" ] ; then pip install debugpy ; fi

# copy the scripts to the folder
COPY ./src /project/src

Expand Down
32 changes: 32 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
.PHONY: build up down clean cleanbuild

build:
docker-compose build

up:
docker-compose up -d

down:
docker-compose down

docker-restart:
docker compose down
docker compose up --build -d

clean:
docker-compose down --volumes

cleanbuild: clean
docker-compose up --build

updev: clean
docker-compose -f docker-compose-dev.yml up -d

create:
python3 -m venv .venv

activate:
source .venv/bin/activate

requirements:
pip install -r requirements.txt
116 changes: 116 additions & 0 deletions docker-compose-dev.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
version: '3'
services:
kafka:
container_name: kafka
image: bitnami/kafka:3.5.1-debian-11-r3
environment:
- ALLOW_PLAINTEXT_LISTENER=yes
- KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094
- KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094
- KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=false
# volumes:
# - ./kafka:/bitnami/kafka:rw
expose:
- 9094
- 9092
ports:
# - 9092:9092
- 9094:9094
healthcheck:
test: ["CMD", "kafka-topics.sh", "--list", "--bootstrap-server", "localhost:9092"]
interval: 30s
timeout: 10s
retries: 5
networks:
- botdetector-network

# kafdrop:
# container_name: kafdrop
# image: obsidiandynamics/kafdrop:latest
# environment:
# - KAFKA_BROKERCONNECT=kafka:9092
# - JVM_OPTS=-Xms32M -Xmx64M
# - SERVER_SERVLET_CONTEXTPATH=/
# ports:
# - 9999:9000
# restart: on-failure
# networks:
# - botdetector-network
# depends_on:
# kafka:
# condition: service_healthy

kafka_setup:
container_name: kafka_setup
image: bot-detector/kafka_setup
build:
context: ./kafka_setup
command: ["python", "setup_kafka.py"]
environment:
- KAFKA_BROKER=kafka:9092
networks:
- botdetector-network
# ports:
# - 9092:9092
depends_on:
kafka:
condition: service_healthy

mysql:
container_name: database
build:
context: ./mysql
image: bot-detector/mysql:latest
environment:
- MYSQL_ROOT_PASSWORD=root_bot_buster
volumes:
- ./mysql/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
# - ./mysql/mount:/var/lib/mysql # creates persistence
ports:
- 3306:3306
expose:
- 3306
networks:
- botdetector-network
healthcheck:
test: ["CMD-SHELL", "mysqladmin ping -h localhost -u root -proot_bot_buster"]
interval: 10s
retries: 3
start_period: 30s
timeout: 5s

# worker:
# container_name: worker
# image: bot-detector/highscore_worker
# build:
# context: .
# dockerfile: Dockerfile
# target: base
# args:
# root_path: /
# api_port: 5000
# # INSTALL_PTVSD: true
# # command: bash -c "apt update && apt install -y curl && sleep infinity"
# command: python src/main.py
# # ports:
# # - 5678:5678
# environment:
# - KAFKA_HOST=kafka:9092
# - DATABASE_URL=mysql+aiomysql://root:root_bot_buster@mysql:3306/playerdata
# - POOL_TIMEOUT=30
# - POOL_RECYCLE=30
# # - ENABLE_DEBUGPY=true
# # - PYDEVD_DISABLE_FILE_VALIDATION=1
# networks:
# - botdetector-network
# volumes:
# - ./src:/project/src
# depends_on:
# kafka:
# condition: service_healthy
# mysql:
# condition: service_healthy

networks:
botdetector-network:
40 changes: 25 additions & 15 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ services:
expose:
- 9094
- 9092
ports:
- 9092:9092
- 9094:9094
healthcheck:
test: ["CMD", "kafka-topics.sh", "--list", "--bootstrap-server", "localhost:9092"]
interval: 30s
Expand All @@ -22,21 +25,21 @@ services:
networks:
- botdetector-network

kafdrop:
container_name: kafdrop
image: obsidiandynamics/kafdrop:latest
environment:
- KAFKA_BROKERCONNECT=kafka:9092
- JVM_OPTS=-Xms32M -Xmx64M
- SERVER_SERVLET_CONTEXTPATH=/
ports:
- 9999:9000
restart: on-failure
networks:
- botdetector-network
depends_on:
kafka:
condition: service_healthy
# kafdrop:
# container_name: kafdrop
# image: obsidiandynamics/kafdrop:latest
# environment:
# - KAFKA_BROKERCONNECT=kafka:9092
# - JVM_OPTS=-Xms32M -Xmx64M
# - SERVER_SERVLET_CONTEXTPATH=/
# ports:
# - 9999:9000
# restart: on-failure
# networks:
# - botdetector-network
# depends_on:
# kafka:
# condition: service_healthy

kafka_setup:
container_name: kafka_setup
Expand All @@ -62,6 +65,8 @@ services:
volumes:
- ./mysql/docker-entrypoint-initdb.d:/docker-entrypoint-initdb.d
# - ./mysql/mount:/var/lib/mysql # creates persistence
ports:
- 3307:3306
# ports:
# - 3306:3306
expose:
Expand All @@ -85,13 +90,18 @@ services:
args:
root_path: /
api_port: 5000
INSTALL_PTVSD: true
# command: bash -c "apt update && apt install -y curl && sleep infinity"
command: python src/main.py
# ports:
# - 5678:5678
environment:
- KAFKA_HOST=kafka:9092
- DATABASE_URL=mysql+aiomysql://root:root_bot_buster@mysql:3306/playerdata
- POOL_TIMEOUT=30
- POOL_RECYCLE=30
# - ENABLE_DEBUGPY=true
# - PYDEVD_DISABLE_FILE_VALIDATION=1
networks:
- botdetector-network
volumes:
Expand Down
40 changes: 39 additions & 1 deletion mysql/docker-entrypoint-initdb.d/01_tables.sql
Original file line number Diff line number Diff line change
Expand Up @@ -123,4 +123,42 @@ CREATE TABLE `playerHiscoreData` (
UNIQUE KEY `Unique_player_date` (`Player_id`,`ts_date`),
CONSTRAINT `FK_Players_id` FOREIGN KEY (`Player_id`) REFERENCES `Players` (`id`) ON DELETE RESTRICT ON UPDATE RESTRICT
);
CREATE TRIGGER `hiscore_date_OnInsert` BEFORE INSERT ON `playerHiscoreData` FOR EACH ROW SET new.ts_date = DATE(new.timestamp);
CREATE TRIGGER `hiscore_date_OnInsert` BEFORE INSERT ON `playerHiscoreData` FOR EACH ROW SET new.ts_date = DATE(new.timestamp);

CREATE TABLE scraper_data (
scraper_id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
record_date DATE AS (DATE(created_at)) STORED,
player_id INT UNSIGNED NOT NULL,
UNIQUE KEY unique_player_per_day (player_id, record_date)
);

CREATE TABLE skills (
skill_id TINYINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, # < 255
skill_name VARCHAR(50) NOT NULL,
UNIQUE KEY unique_skill_name (skill_name)
);
CREATE TABLE activities (
activity_id TINYINT UNSIGNED AUTO_INCREMENT PRIMARY KEY, # < 255
activity_name VARCHAR(50) NOT NULL,
UNIQUE KEY unique_activity_name (activity_name)
);


CREATE TABLE player_skills (
scraper_id BIGINT UNSIGNED NOT NULL,
skill_id TINYINT UNSIGNED NOT NULL,
skill_value INT UNSIGNED NOT NULL DEFAULT 0, # < 200 000 000
FOREIGN KEY (scraper_id) REFERENCES scraper_data(scraper_id) ON DELETE CASCADE,
FOREIGN KEY (skill_id) REFERENCES skills(skill_id) ON DELETE CASCADE,
PRIMARY KEY (scraper_id, skill_id)
);

CREATE TABLE player_activities (
scraper_id BIGINT UNSIGNED NOT NULL,
activity_id TINYINT UNSIGNED NOT NULL,
activity_value INT UNSIGNED NOT NULL DEFAULT 0, # some guy could get over 65k kc
FOREIGN KEY (scraper_id) REFERENCES scraper_data(scraper_id) ON DELETE CASCADE,
FOREIGN KEY (activity_id) REFERENCES activities(activity_id) ON DELETE CASCADE,
PRIMARY KEY (scraper_id, activity_id)
);
Loading

0 comments on commit 9abcd58

Please sign in to comment.