forked from Dataherald/dataherald
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.env.example
49 lines (40 loc) · 2.54 KB
/
.env.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# Openai info. All these fields are required for the engine to work.
OPENAI_API_KEY = #This field is required for the engine to work.
ORG_ID =
# what is the LLM model to be used
LLM_MODEL = "gpt-4-turbo-preview"
# All of our SQL generation agents are using different tools to generate SQL queries, in order to limit the number of times that agents can
# use different tools you can set the "AGENT_MAX_ITERATIONS" env variable. By default it is set to 20 iterations.
AGENT_MAX_ITERATIONS = 15
#timeout in seconds for the engine to return a response. Defaults to 150 seconds
DH_ENGINE_TIMEOUT = 150
#tmeout for SQL execution, our agents exceute the SQL query to recover from errors, this is the timeout for that execution. Defaults to 30 seconds
SQL_EXECUTION_TIMEOUT =
#The upper limit on number of rows returned from the query engine (equivalent to using LIMIT N in PostgreSQL/MySQL/SQlite). Defauls to 50
UPPER_LIMIT_QUERY_RETURN_ROWS = 50
#Encryption key for storing DB connection data in Mongo
ENCRYPT_KEY =
GOLDEN_SQL_COLLECTION = 'my-golden-records'
#Pinecone info. These fields are required if the vector store used is Pinecone
PINECONE_API_KEY =
PINECONE_ENVIRONMENT =
#AstraDB info. These fields are required if the vector store used is AstraDB
ASTRA_DB_API_ENDPOINT =
ASTRA_DB_APPLICATION_TOKEN =
# Module implementations to be used names for each required component. You can use the default ones or create your own
API_SERVER = "dataherald.api.fastapi.FastAPI"
EVALUATOR = "dataherald.eval.simple_evaluator.SimpleEvaluator"
DB = "dataherald.db.mongo.MongoDB"
VECTOR_STORE = 'dataherald.vector_store.chroma.Chroma'
CONTEXT_STORE = 'dataherald.context_store.default.DefaultContextStore' # Set a context store class, the default one is DefaultContextStore
DB_SCANNER = 'dataherald.db_scanner.sqlalchemy.SqlAlchemyScanner'
# mongo database information
MONGODB_URI = "mongodb://admin:admin@mongodb:27017"
MONGODB_DB_NAME = 'dataherald'
MONGODB_DB_USERNAME = 'admin'
MONGODB_DB_PASSWORD = 'admin'
# The enncryption key is used to encrypt database connection info before storing in Mongo. Please refer to the README on how to set it.
S3_AWS_ACCESS_KEY_ID=
S3_AWS_SECRET_ACCESS_KEY=
ONLY_STORE_CSV_FILES_LOCALLY = False # Set to True if only want to save generated CSV files locally instead of S3. Note that if stored locally they should be treated as ephemeral, i.e., they will disappear when the engine is restarted.
CORE_PORT = 80 # This env var defines the port that will be exposed by the container. It serves as the configuration for both the internal and external container ports.