Skip to content

Commit

Permalink
Merge pull request #32 from tukcomCD2024/feat/#29-ai-news-summary
Browse files Browse the repository at this point in the history
Feat/#29 News Summary (뉴스 내용 요약 기능 구현)
  • Loading branch information
yeonjy authored Mar 19, 2024
2 parents b71de8f + 5bf4234 commit ceebfc3
Show file tree
Hide file tree
Showing 9 changed files with 137 additions and 0 deletions.
11 changes: 11 additions & 0 deletions backend/ai_response_processor/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from fastapi import FastAPI
import os
from dotenv import load_dotenv

from news.contentqueue import rabbitmq_consumer

BASE_DIR = os.path.dirname(os.path.abspath(__file__))
load_dotenv(os.path.join(BASE_DIR, ".env"))

app = FastAPI()
rabbitmq_consumer = rabbitmq_consumer
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import os
from dotenv import load_dotenv
import json
import logging
from pika import BlockingConnection, ConnectionParameters, PlainCredentials

from news.crud.news_summarizer import summarize_news

load_dotenv()
logger = logging.getLogger(__name__)

credentials = PlainCredentials(username=os.getenv('RABBITMQ_USERNAME'), password=os.getenv('RABBITMQ_PASSWORD'))
connection = BlockingConnection(ConnectionParameters(host=os.getenv('RABBITMQ_HOST'),
port=int(os.getenv('RABBITMQ_PORT')),
credentials=credentials))
channel = connection.channel()
channel.exchange_declare(exchange=os.getenv('SUMMARY_EXCHANGE'),
exchange_type='direct',
durable=True)
channel.queue_declare(queue=os.getenv('SUMMARY_QUEUE'), durable=True)
channel.queue_bind(exchange=os.getenv('SUMMARY_EXCHANGE'),
queue=os.getenv('SUMMARY_QUEUE'), routing_key=os.getenv('SUMMARY_KEY'))


def callback(ch, method, properties, body):
logger.info(" [x] Queue Received ")
received_data = body.decode()

try:
received_data_json = json.loads(received_data)
if 'content' in received_data_json:
summarize_news(news_id=received_data_json['id'], content=received_data_json['content'])
else:
logger.info("Error: 'content' key is missing in the received data.")
except json.JSONDecodeError:
logger.info("Error decoding JSON from the received data.")


channel.basic_consume(queue=os.getenv('SUMMARY_QUEUE'), on_message_callback=callback, auto_ack=True)
channel.start_consuming()
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import os
from dotenv import load_dotenv
import json
from pika import BlockingConnection, ConnectionParameters, PlainCredentials, BasicProperties

from news.schema.message_item import MessageItem

CONFIG = {
'username': os.getenv('RABBITMQ_USERNAME'),
'password': os.getenv('RABBITMQ_PASSWORD'),
'host': os.getenv('RABBITMQ_HOST'),
'port': int(os.getenv('RABBITMQ_PORT')),
'queue_name': os.getenv('STORE_QUEUE'),
'exchange_name': os.getenv('STORE_EXCHANGE'),
'routing_key': os.getenv('STORE_KEY'),
}

CONTENT_TYPE = 'application/json'

def get_connection_params():
credentials = PlainCredentials(username=CONFIG['username'], password=CONFIG['password'])
return ConnectionParameters(host=CONFIG['host'],
credentials=credentials,
heartbeat=600,
blocked_connection_timeout=300)

def send_message(message: MessageItem):
connection = BlockingConnection(get_connection_params())
channel = connection.channel()
channel.queue_declare(queue=CONFIG['queue_name'], durable=True)

props = BasicProperties(content_type=CONTENT_TYPE, delivery_mode=1)
serialized_message = json.dumps(message.__dict__)

channel.basic_publish(exchange=CONFIG['exchange_name'],
routing_key=CONFIG['routing_key'],
body=serialized_message,
properties=props)
connection.close()
Empty file.
34 changes: 34 additions & 0 deletions backend/ai_response_processor/news/crud/news_summarizer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import os

from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
import logging

from news.contentqueue.rabbitmq_producer import send_message
from news.schema.message_item import MessageItem

logger = logging.getLogger(__name__)
ChatOpenAI.openai_api_key = os.environ['OPENAI_API_KEY']


def summarize_news(news_id: int, content: str):
logger.info("summarize_news start : (id)", news_id)
chat = ChatOpenAI(temperature=0.3)

template = ChatPromptTemplate.from_messages(
[
("system", "You're a news summarizer. Also, the answer must be no more than 500 characters in Korean."),
("user", "{raw_news_content}"),
]
)

prompt = template.format_messages(
raw_news_content={content}
)

result = chat.predict_messages(prompt)
message_item = MessageItem(
id=news_id,
content=result.content
)
send_message(message_item)
Empty file.
7 changes: 7 additions & 0 deletions backend/ai_response_processor/news/schema/message_item.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from dataclasses import dataclass


@dataclass
class MessageItem:
id: int
content: str
6 changes: 6 additions & 0 deletions backend/ai_response_processor/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
langchain==0.0.332
python-dotenv==1.0.0
openai==0.28.0
uvicorn==0.28.0
pika
fastapi

0 comments on commit ceebfc3

Please sign in to comment.