Skip to content

Commit

Permalink
chore: added llm sample
Browse files Browse the repository at this point in the history
  • Loading branch information
MrunmayS committed Nov 2, 2023
1 parent 5d98f33 commit f4d580d
Show file tree
Hide file tree
Showing 11 changed files with 4,529 additions and 0 deletions.
17 changes: 17 additions & 0 deletions usecases/llm/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Use the official Python 3.9 image as the base image
FROM python:3.9

# Set the working directory within the container
WORKDIR /usr/src/app

# Copy the requirements.txt file into the container
COPY requirements.txt .

# Install dependencies from requirements.txt
RUN pip install --no-cache-dir -r requirements.txt

# Copy your application files into the container
COPY . .

EXPOSE 8000
CMD ["python", "-m", "chainlit", "run", "app.py", "--port" ,"8000"]
21 changes: 21 additions & 0 deletions usecases/llm/LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
MIT License

Copyright (c) 2023 Mrunmay Shelar

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
12 changes: 12 additions & 0 deletions usecases/llm/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Hyper-personalized chatbots using LLMs, Dozer and Vector Databases for Banking and Finance

In this example, we will be using Dozer along with Chainlit, Langchain and Pinecone.

### Pre-requisites

1. Pinecone account
2. OpenAI API key
3. Python 3.9/ Docker

### Setup

211 changes: 211 additions & 0 deletions usecases/llm/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,211 @@
import chainlit as cl
import os
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.schema import HumanMessage
## For Vector DB
import pinecone
import uuid
from langchain.vectorstores import Pinecone
from langchain.chains.conversation.memory import ConversationBufferWindowMemory
from langchain.chains import RetrievalQA
from langchain.agents import Tool
#from langchain import PromptTemplate, HuggingFaceHub, LLMChain
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.agents import initialize_agent
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.chat_models import ChatOpenAI
from langchain.chains import RetrievalQAWithSourcesChain
from langchain.prompts.chat import ChatPromptTemplate,HumanMessagePromptTemplate,SystemMessagePromptTemplate
from chainlit import user_session
import os
from pydozer.api import ApiClient
from langchain.agents import ConversationalChatAgent, AgentExecutor


### Dozer

DOZER_CLOUD_HOST = "data.getdozer.io:443"

def get_api_client(app_id=None, token=None):
return ApiClient("financial_profile", url=DOZER_CLOUD_HOST, app_id=app_id,
secure=True, token=token) if app_id else ApiClient("financial_profile", url="localhost:80")


customer_client = get_api_client(app_id=os.environ.get("DOZER_APP_ID"), token=os.environ.get("DOZER_TOKEN"))

def getCustomerData(input):
data = customer_client.query({"$filter": {"id":input}})
rec = data.records[0].record
id = rec.values[0].int_value
name = rec.values[1].string_value
income = rec.values[2].int_value
age = rec.values[3].int_value
dependents = rec.values[4].int_value
address = rec.values[5].string_value
prob = rec.values[6].float_value
credit_amt = rec.values[7].int_value
repay_status = rec.values[8].float_value
util_ratio = rec.values[9].float_value


return [id,name,income,age,dependents,credit_amt,repay_status,util_ratio,address, prob]



#user_env = user_session.get(".env")
OPEN_API_KEY = os.environ.get("OPENAI_API_KEY")
PINECONE_API_KEY = os.environ.get("PINECONE_API_KEY")
PINECONE_ENV = os.environ.get("PINE_ENV")
# platform.openai.com
model_name = 'text-embedding-ada-002'


embed = OpenAIEmbeddings(
model=model_name,
openai_api_key=OPEN_API_KEY
)

# find ENV (cloud region) next to API key in console
index_name = 'langchain-retrieval-agent'
pinecone.init(
api_key=PINECONE_API_KEY,
environment=PINECONE_ENV
)


#embeddings = OpenAIEmbeddings()


text_field = "Answer"
index = pinecone.Index('langchain-retrieval-agent')
# switch back to normal index for langchain
#index = pinecone.Index(index_name)

vectorstore = Pinecone(
index, embed.embed_query, text_field
)

llm = ChatOpenAI(
openai_api_key = OPEN_API_KEY,
model_name='gpt-4',
temperature=0.0
)

conversational_memory = ConversationBufferWindowMemory(
memory_key='chat_history',
k=5,
return_messages=True
)
# retrieval qa chain
chain = RetrievalQA.from_chain_type(
llm=llm,
chain_type="stuff",
retriever=vectorstore.as_retriever()
)

def setName(userName):
global name
name = userName

def customerProfile(input):
data = getCustomerData(int(input))
id = data[0]
name1 = data[1]
income = data[2]
age= data[3]
dependents= data[4]
address= data[5]
prob = data[6]
credit_amt= data[7]
repay_status= data[8]
util_ratio= data[9]


return ( f"ID = {id} ,name = {name1} ,age = {age}, income = {income}, dependents = {dependents},repay_status={repay_status}, credit utilisation ratio ={util_ratio} ,address={address}, available credit={credit_amt}, probability of default = {prob} ")


tools = [
Tool(
name='Knowledge Base',
func=chain.run,
description=(
"Useful when you need general information about bank policies and bank offerings. "\
'use this tool when answering general knowledge queries to get '\
'more information about the topic'
)
),
Tool(
name = 'Customer Data',
func = customerProfile,
description=(
"Useful when you need customer data to decide eligibility for a particular credit card. "
"Use to check the probability of default and available balance to use it for eligibility"
)
)
]


agent = initialize_agent(
agent='chat-conversational-react-description',
tools=tools,
llm=llm,
verbose=True,
max_iterations=5,
early_stopping_method='generate',
memory=conversational_memory
)


global res
@cl.on_chat_start

async def start():
intro = "Hi there, I am an assistant for Bank A. I am here to assist you with all your banking needs! Please enter your id: "

res = await cl.AskUserMessage(content=intro,timeout=45,raise_on_timeout=True).send()

id = int(res['content'])
greeting = f"Hi {res['content']}. What brings you here?"
await cl.Message(content=greeting).send()
setName(res['content'])
# global credit
# credit = getCredit(int(res['content']))
global customerinfo
customerinfo = customerProfile(int(res['content']) )
print(customerinfo)

agent = ConversationalChatAgent.from_llm_and_tools(
llm=llm,
tools=tools,
system_message = f"You have customer info of a customer which is as follows {customerinfo}. You have to use this information along with Knowledge base to decide if the customer is eligible for a credit card or not.",
verbose=True,
max_iterations=5,
early_stopping_method='generate',
memory=conversational_memory

)

global agent_chain
agent_chain = AgentExecutor.from_agent_and_tools(
agent = agent,
tools = tools,
verbose = True,
max_iterations = 5,
early_stopping_method = 'generate',
memory = conversational_memory

)
cl.user_session.set("chain", agent_chain)



@cl.on_message
async def main(message: str):
agent_chain = cl.user_session.get("chain")
response = await cl.make_async(agent_chain.run)(message.content)

await cl.Message(
content=response,
).send()
31 changes: 31 additions & 0 deletions usecases/llm/chainlit.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Welcome to GoldenGate Bank ! 🏦💵

Hi there, Customer ! 👋 We're excited to have you on board. GoldenGate Bank has been serving customers since 1970 . With a commitment to excellence, we have grown into a reputable financial institution, providing a wide range of services to meet your banking needs.

# Our Services 🎄🎄🎁

At 'GoldenGate Bank', we offer a comprehensive range of banking services to cater to your financial requirements. Our services include:

**Savings Accounts**: Choose from a variety of savings accounts with competitive interest rates and personalized features.
**Current Accounts**: Our current accounts are designed to support businesses and individuals with seamless transaction capabilities.
**Fixed Deposits**: Benefit from attractive interest rates and flexible tenure options with our fixed deposit accounts.

# Credit Cards 💳💳

Experience the convenience and benefits of GoldenGate Bridge credit cards, tailored to suit your lifestyle and spending habits. We offer the following credit cards:

1. **Rewards Card** 📦🎁
Earn reward points on every purchase.
Eligibility: Minimum monthly income of $X,XX and a credit score of [Credit Score Requirement].

2. **Travel Card** ✈️🧳
Enjoy exclusive travel perks and offers.
Eligibility: Minimum monthly income of $X,XX and a credit score of [Credit Score Requirement].

3. **Cashback Card** 💵💵
Get cashback on eligible purchases.
Eligibility: Minimum monthly income of $X,XX and a credit score of [Credit Score Requirement].

4. **Premium Card** 💰💲
Access premium benefits, including airport lounge access and concierge services.
Eligibility: Minimum monthly income of $X,XXX and a credit score of [Credit Score Requirement].
Binary file added usecases/llm/data/BankFAQs.xlsx
Binary file not shown.
13 changes: 13 additions & 0 deletions usecases/llm/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
version: '4.25'

services:
app:
build:
context: .
dockerfile: Dockerfile
volumes:
- .:/usr/src/app
env_file:
- .env
ports:
- '8000:8000'
63 changes: 63 additions & 0 deletions usecases/llm/dozer-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
app_name: llm-sample
version: 1
connections:
- name: onnx
config : !Postgres
user: "{{PG_USER}}"
password: "{{PG_PASSWORD}}"
host: "{{PG_HOST}}"
port: 5432
database: "{{PG_DB}}"
- name: s3data
config: !S3Storage
details:
access_key_id: "{{AWS_ACCESS_KEY}}"
secret_access_key: "{{AWS_SECRET_KEY}}"
region: "{{AWS_REGION_S3}}"
bucket_name: "{{AWS_BUCKET_NAME}}"
tables:
- !Table
name: customers
config: !CSV
path: customer_data/
extension: .csv

sources:
- name: credit
table_name: cred2
connection: onnx

- name: data
table_name: customers
connection: s3data

sql: |
WITH template AS
(SELECT c.ID as ID, c.Name as name,c.Income as total_income, c.Age as age,c.Dependents as dependents, c.address as address FROM data c ),
res1 AS ( SELECT torch_jit( LIMIT_BAL, SEX, EDUCATION, MARRIAGE, AGE, PAY_AMT6,BILL_AMT6,PAY_6,PAY_AMT5,BILL_AMT5,PAY_5,PAY_AMT4,BILL_AMT4,PAY_4,
PAY_AMT3,BILL_AMT3,PAY_3,
PAY_AMT2,BILL_AMT2,PAY_2,
PAY_AMT1,BILL_AMT1,PAY_0) as result, ID as id , LIMIT_BAL,
(PAY_0 + PAY_2 + PAY_3 + PAY_4 + PAY_5 + PAY_6)/5 as avg_repayment_status,
(BILL_AMT1+BILL_AMT2+BILL_AMT3+BILL_AMT4+BILL_AMT5+BILL_AMT6)/(LIMIT_BAL * 6) as utilization_ratio
from credit )
SELECT ID as id, name, total_income, age, dependents, address, result, LIMIT_BAL as avail_credit, avg_repayment_status, utilization_ratio
INTO customer_profile
FROM template t
JOIN res1 r
ON t.ID = r.id;
endpoints:
- name: financial_profile
path: /customers
table_name: customer_profile


udfs:
- name: torch_jit
config: !Onnx
path: ./sequential_31.onnx
Loading

0 comments on commit f4d580d

Please sign in to comment.