Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

first commit #10

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 31 additions & 1 deletion backend/app/config.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
from logging.handlers import TimedRotatingFileHandler

from pydantic_settings import BaseSettings, SettingsConfigDict
from functools import lru_cache
import os
from typing import ClassVar

import logging

class Settings(BaseSettings):

SECRET_KEY: str
ALGORITHM: str
ACCESS_TOKEN_EXPIRE_MINUTES: int
Expand All @@ -26,10 +29,37 @@ class Settings(BaseSettings):
env_file=env_file, _env_file_encoding="utf-8", extra="allow"
)

def setup_logging():
# 设置日志文件名,包含日期
log_name = "coin"
# # 创建logger对象。传入logger名字
# logger = logging.getLogger(log_name)
log_path = os.path.join("./data_logs/", log_name)
# 创建一个 TimedRotatingFileHandler,每天生成一个新日志文件,保留最近7天的日志文件
handler = TimedRotatingFileHandler(
log_path,
when="D", # "D" 表示每天轮转
interval=1, # 每 1 天轮转一次
backupCount=7, # 仅保留最近 7 天的日志
encoding="utf-8"
)

# 设置日志格式
handler.suffix = "%Y-%m-%d.log"
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s - [Class: %(module)s, Function: %(funcName)s, Line: %(lineno)d]')
handler.setFormatter(formatter)
# 配置日志
logging.basicConfig(level=logging.INFO, handlers=[handler])

# 设置 watchfiles 的日志级别为 WARNING,以屏蔽 INFO 日志
logging.getLogger('watchfiles').setLevel(logging.WARNING)
return logging

@lru_cache
def get_settings():
return Settings()


settings = get_settings()
logging = setup_logging()
12 changes: 8 additions & 4 deletions backend/app/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,17 @@
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from .config import settings

from sqlalchemy.orm import scoped_session

engine = create_engine(
settings.SQLALCHEMY_DATABASE_URL
settings.SQLALCHEMY_DATABASE_URL,
pool_pre_ping=True, # 启用预检查
pool_recycle=3600,
pool_size=10,
max_overflow=20
)

SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

# SessionLocal = sessionmaker(autocommit=False, autoflush=True, bind=engine)
SessionLocal = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))
Base = declarative_base()

47 changes: 35 additions & 12 deletions backend/get_repo_oauth.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,19 @@
"Authorization": f"Bearer {token}",
"Accept": "application/vnd.github+json",
}

import time
import logging
from datetime import datetime
# log_filename = f"./data_logs/log_{datetime.now().strftime('%Y-%m-%d')}.txt"
#
# logging.basicConfig(
# filename=log_filename, # 日志文件名包含日期
# level=logging.INFO,
# format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
# encoding='utf-8' # 支持中文
# )
# # 设置 watchfiles 的日志级别为 WARNING,以屏蔽 INFO 日志
# logging.getLogger('watchfiles').setLevel(logging.WARNING)

# 获取用户的所有仓库
def get_repositories():
Expand Down Expand Up @@ -97,20 +109,31 @@ def save_info_to_json(info_dict, filename):
json.dump(info_dict, f, ensure_ascii=False, indent=4)


# Helper function to get all pages of results

def get_all_pages(url,page=1):
headers = {"Authorization": f"token {token}"}
headers = {"Authorization": f"token {token}",'User-Agent':'insomnia/10.1.0'}
items = []
index = 0
while url and index < page:
response = requests.get(url, headers=headers)
if response.status_code == 200:
items.extend(response.json())
# Check if there is a 'next' page
url = response.links.get("next", {}).get("url")
index +=1
else:
break
try:
while url and index < page:
start_time = time.time()
response = requests.get(url, headers=headers)
end_time = time.time() # 结束计时
elapsed_time = end_time - start_time
logging.info(f"Query completed in {elapsed_time:.2f} seconds.")
if response.status_code == 200:
items.extend(response.json())
# Check if there is a 'next' page
url = response.links.get("next", {}).get("url")
index +=1
else:
break
except(Exception) as e:
logging.error(e)
logging.error(url)
# print("exceptions:", e)
# print(url)
# print(headers)
return items


Expand Down
5 changes: 4 additions & 1 deletion backend/insert_repo_user.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@
)

# 创建引擎并连接到数据库
engine = create_engine(DATABASE_URL)
engine = create_engine(DATABASE_URL,
# 添加以下两项配置解决MySQL server has gone away问题
pool_pre_ping=True, # 启用预检查
pool_recycle=3600)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)

# 初始化密码加密上下文,bcrypt
Expand Down
49 changes: 9 additions & 40 deletions backend/main.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
import os
import uvicorn
import logging
from fastapi import FastAPI
from fastapi.staticfiles import StaticFiles
from fastapi.middleware.cors import CORSMiddleware
from fastapi import BackgroundTasks
from app.routers import users
from app.routers import coin
from app.routers import item
from app.routers import update_data
from app.config import settings
from app.database import engine
import datetime
from datetime import datetime
import logging
app = FastAPI()

origins = [
Expand Down Expand Up @@ -44,44 +42,15 @@ def read_root():


from apscheduler.schedulers.background import BackgroundScheduler
from typing import List, Dict
import requests
import threading
import asyncio


# 初始化APScheduler
scheduler = BackgroundScheduler()
scheduler.start()

# 任务函数,用于定时抓取GitHub issues
from update_repo import update_repo
from update_repo import update_repo_test
import time

from update_repo import update_repo, update_repo_by_multithreaded


# 初始化APScheduler
scheduler = BackgroundScheduler()

import logging
from datetime import datetime

# 获取当前日期作为文件名的一部分
log_filename = f"./data_logs/log_{datetime.now().strftime('%Y-%m-%d')}.txt"

logging.basicConfig(
filename=log_filename, # 日志文件名包含日期
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
encoding='utf-8' # 支持中文
)
# 设置 watchfiles 的日志级别为 WARNING,以屏蔽 INFO 日志
logging.getLogger('watchfiles').setLevel(logging.WARNING)

# 启动调度器
scheduler.start()

# 为定时任务分配一个唯一的ID
job_id = "update_repo_job"

Expand All @@ -91,10 +60,9 @@ async def start_scheduled_update():
启动定时任务,定期更新 GitHub 仓库的 issues。
"""
job = scheduler.get_job(job_id)

if job is None:
new_job = scheduler.add_job(
update_repo, "interval", hours=12, minutes=0, seconds=0, id=job_id
update_repo_by_multithreaded, "interval", hours=1, minutes=0, seconds=0, id=job_id
)
next_run_time = new_job.next_run_time.strftime('%Y-%m-%d %H:%M:%S') if new_job.next_run_time else "Unknown"
logging.info(f"Scheduled update started. Next run time: {next_run_time}.")
Expand Down Expand Up @@ -139,12 +107,13 @@ async def stop_scheduled_update():


@app.get("/execute-update")
async def execute_update():
def execute_update():
"""
手动执行数据更新任务,将数据写入数据库,并通知前端刷新。记录当前时间。
"""
try:
update_repo()
# update_repo()
update_repo_by_multithreaded()
current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
logging.info(f"Executed manual update successfully at {current_time}.")
return {
Expand All @@ -158,4 +127,4 @@ async def execute_update():
"message": f"An error occurred while executing the update: {str(e)}",
}
if __name__ == "__main__":
uvicorn.run("main:app", host=settings.HOST, port=settings.PORT, reload=True)
uvicorn.run("main:app", host=settings.HOST, port=settings.PORT, reload=False,workers=1)
Loading