-
Notifications
You must be signed in to change notification settings - Fork 0
/
wcifbot.py
63 lines (48 loc) · 1.83 KB
/
wcifbot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import praw
import pprint
import requests
import re
import time
from apscheduler.schedulers.background import BlockingScheduler
import config
cache = []
reddit = praw.Reddit(client_id=config.CLIENT_ID,
client_secret=config.CLIENT_SECRET,
user_agent=config.USER_AGENT,
username=config.USERNAME,
password=config.PASSWORD)
start_time = int(time.time())
def check_for_new_posts():
submission = reddit.submission(id=config.THREAD_ID)
for comment in submission.comments:
if start_time > comment.created_utc:
continue
if comment.id in cache:
continue
regex = r"myanimelist\.net\/(anime|manga)\/(\d+)"
mal_link = re.search(regex, comment.body)
if not mal_link:
continue
api_data = get_streams_data(mal_link.group(1), mal_link.group(2))
title, streams = parse_streams_data(api_data)
pprint(streams)
# comment.reply(f"""I found streams for {title} available on
# the following services: {", ".join(sorted(streams))}""")
# cache.append(comment.id)
def get_streams_data(content_type, mal_id):
api_url = config.API.format(type=content_type, id=mal_id)
r = requests.get(api_url)
return r.json()
def parse_streams_data(api_data):
stream_count = len(api_data["Sites"])
sites = []
for site in range(stream_count):
sites.append(list(api_data["Sites"].keys())[site])
return ((api_data["title"], sites))
if __name__ == '__main__':
print(f"Authenticated successfully as: {reddit.user.me()}")
print(f"Current time is: {start_time}")
print("Listening for posts...")
scheduler = BlockingScheduler(daemon=True)
scheduler.add_job(check_for_new_posts, "interval", seconds=config.CRON_INTERVAL)
scheduler.start()