Skip to content

Commit

Permalink
Merge branch 'openai_compatible_gauntlet' of github.com:mosaicml/llm-…
Browse files Browse the repository at this point in the history
…foundry into openai_compatible_gauntlet
  • Loading branch information
bmosaicml committed Apr 3, 2024
2 parents 5720676 + 7afee25 commit c303b91
Showing 1 changed file with 1 addition and 9 deletions.
10 changes: 1 addition & 9 deletions llmfoundry/models/inference_api_wrapper/openai_causal_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,22 +43,14 @@ def __init__(self, om_model_config: DictConfig, tokenizer: AutoTokenizer, api_ke
conda_package='openai',
conda_channel='conda-forge') from e
if api_key is None:
api_key = os.environ.get('OPENAI_API_KEY')

api_key = os.environ.get('OPENAI_API_KEY')
api_key = os.environ.get(om_model_config.get('api_env_key', 'OPENAI_API_KEY'))
base_url = om_model_config.get('base_url')
if base_url is None:
# Using OpenAI default, where the API key is required
if api_key is None:
raise ValueError(
'No OpenAI API Key found. Ensure it is saved as an environmental variable called OPENAI_API_KEY.'
)
else:
# Using a custom base URL, where the API key may not be required
log.info(
f'Making request to custom base URL: {base_url}{"" if api_key is not None else " (no API key set)"}'
)
api_key = 'placeholder' # This cannot be None

self.client = openai.OpenAI(base_url=base_url, api_key=api_key)
if 'version' in om_model_config:
Expand Down

0 comments on commit c303b91

Please sign in to comment.