Skip to content

Commit

Permalink
Merge pull request #56 from plasma-umass/litellm_and_bedrock_support
Browse files Browse the repository at this point in the history
Moving to litellm; added Bedrock.
  • Loading branch information
emeryberger authored Feb 5, 2024
2 parents e506157 + 4dbd79e commit a29d555
Show file tree
Hide file tree
Showing 3 changed files with 91 additions and 29 deletions.
26 changes: 20 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@ Go, Java, LaTeX, PHP, Python, Ruby, Rust, Swift, and TypeScript.

> **Note**
>
> CWhy needs to be connected to an [OpenAI account](https://openai.com/api/). _Your account will need to have a
> positive balance for this to work_ ([check your balance](https://platform.openai.com/usage)).
> [Get a key here](https://platform.openai.com/api-keys).
> CWhy needs to be connected to an [OpenAI account](https://openai.com/api/) or an Amazon Web Services account.
> _Your account will need to have a
> positive balance for this to work_ ([check your OpenAI balance](https://platform.openai.com/usage)).
> [Get an OpenAI key here](https://platform.openai.com/api-keys).
>
> CWhy currently defaults to GPT-4, and falls back to GPT-3.5-turbo if a request error occurs. For the newest and best
> model (GPT-4) to work, you need to have purchased at least $1 in credits (if your API account was created before
Expand All @@ -28,13 +29,26 @@ Go, Java, LaTeX, PHP, Python, Ruby, Rust, Swift, and TypeScript.
> Once you have an API key, set it as an environment variable called `OPENAI_API_KEY`.
>
> ```bash
> # On Linux/MacOs.
> # On Linux/MacOS:
> export OPENAI_API_KEY=<your-api-key>
>
> # On Windows.
> # On Windows:
> $env:OPENAI_API_KEY=<your-api-key>
> ```
>
> **New**: CWhy now has alpha support for Amazon Bedrock, using the Claude model.
> To use Bedrock, you need to set three environment variables.
>
> ```bash
> # On Linux/MacOS:
> export AWS_ACCESS_KEY_ID=<your-access-key>
> export AWS_SECRET_ACCESS_KEY=<your-secret-key>
> export AWS_REGION_NAME=us-west-2
> ```
>
> CWhy will automatically select which AI service to use (OpenAI or AWS Bedrock)
> when it detects that the appropriate environment variables have been set.
```
python3 -m pip install cwhy
```
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@ build-backend = "hatchling.build"

[project]
name = "cwhy"
version = "0.4.2"
version = "0.4.3"
authors = [
{ name="Emery Berger", email="[email protected]" },
{ name="Nicolas van Kempen", email="[email protected]" },
{ name="Bryce Adelstein Lelbach", email="[email protected]" }
]
dependencies = ["llm_utils==0.2.4", "openai>=1.3.6", "PyYAML>=6.0.1", "rich>=13.7.0"]

dependencies = [ "openai>=1.3.6", "PyYAML>=6.0.1", "rich>=13.7.0", "litellm>=1.22.3", "tiktoken>=0.5.2", "llm-utils>=0.2.5", "setuptools>=68.2.2"]
description = "Explains and proposes fixes for compile-time errors for many programming languages."
readme = "README.md"
requires-python = ">=3.7"
Expand Down
89 changes: 68 additions & 21 deletions src/cwhy/cwhy.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,75 @@
import argparse
import contextlib
import os
import subprocess
import sys

import openai

import litellm # type: ignore
import llm_utils

from . import conversation, prompts
litellm.set_verbose=False

# Turn off most logging
from llm_utils import logging
logging.getLogger().setLevel(logging.ERROR)

from openai import NotFoundError, RateLimitError, APITimeoutError, OpenAIError, BadRequestError

from . import conversation, prompts

def print_key_info():
print("You need a key (or keys) from an AI service to use CWhy.")
print()
print("OpenAI:")
print(" You can get a key here: https://platform.openai.com/api-keys")
print(" Set the environment variable OPENAI_API_KEY to your key value:")
print(" export OPENAI_API_KEY=<your key>")
print()
print("Bedrock:")
print(" To use Bedrock, you need an AWS account.")
print(" Set the following environment variables:")
print(" export AWS_ACCESS_KEY_ID=<your key id>")
print(" export AWS_SECRET_ACCESS_KEY=<your secret key>")
print(" export AWS_REGION_NAME=us-west-2")

# If keys are defined in the environment, we use the appropriate service.
AI_service = None
_DEFAULT_FALLBACK_MODELS = []

with contextlib.suppress(KeyError):
if os.environ["OPENAI_API_KEY"]:
AI_service = "OpenAI"
_DEFAULT_FALLBACK_MODELS = ["openai/gpt-4", "openai/gpt-3.5-turbo"]
with contextlib.suppress(KeyError):
if not _DEFAULT_FALLBACK_MODELS:
if os.environ["AWS_ACCESS_KEY_ID"] and os.environ["AWS_SECRET_ACCESS_KEY"] and os.environ["AWS_REGION_NAME"]:
AI_service = "Bedrock"
_DEFAULT_FALLBACK_MODELS = ["bedrock/anthropic.claude-v2:1"]

# If no AI service was available given the environment variables, print key info and exit.
if not AI_service:
print_key_info()
sys.exit(1)

def complete(client, args, user_prompt, **kwargs):
try:
completion = client.chat.completions.create(
completion = litellm.completion(
model=args.llm,
messages=[{"role": "user", "content": user_prompt}],
**kwargs,
)
return completion
except openai.NotFoundError as e:
except NotFoundError as e:
print(f"'{args.llm}' either does not exist or you do not have access to it.")
raise e
except openai.RateLimitError as e:
except BadRequestError as e:
print("Something is wrong with your prompt.")
raise e
except RateLimitError as e:
print("You have exceeded a rate limit or have no remaining funds.")
raise e
except openai.APITimeoutError as e:
print("The OpenAI API timed out.")
except APITimeoutError as e:
print("The API timed out.")
print("You can increase the timeout with the --timeout option.")
raise e

Expand Down Expand Up @@ -77,17 +122,14 @@ def evaluate_diff(client, args, stdin):
return completion


_DEFAULT_FALLBACK_MODELS = ["gpt-4", "gpt-3.5-turbo"]


def evaluate_with_fallback(client, args, stdin):
for i, model in enumerate(_DEFAULT_FALLBACK_MODELS):
if i != 0:
print(f"Falling back to {model}...")
args.llm = model
try:
return evaluate(client, args, stdin)
except openai.NotFoundError:
except NotFoundError:
continue


Expand Down Expand Up @@ -136,28 +178,33 @@ def main(args: argparse.Namespace) -> None:
print("CWhy")
print("==================================================")
try:
client = openai.OpenAI(timeout=args.timeout)
except openai.OpenAIError:
print("You need an OpenAI key to use this tool.")
print("You can get a key here: https://platform.openai.com/api-keys")
print("Set the environment variable OPENAI_API_KEY to your key value.")
result = evaluate(None, args, process.stderr if process.stderr else process.stdout)
print(result)
except OpenAIError:
print_key_info()
sys.exit(1)
print(evaluate(client, args, process.stderr if process.stderr else process.stdout))
print("==================================================")

sys.exit(process.returncode)


def evaluate_text_prompt(client, args, prompt, wrap=True, **kwargs):

completion = complete(client, args, prompt, **kwargs)

msg = f"Analysis from {AI_service}:"
print(msg)
print("-" * len(msg))
text = completion.choices[0].message.content

if wrap:
text = llm_utils.word_wrap_except_code_blocks(text)

cost = llm_utils.calculate_cost(
completion.usage.prompt_tokens, completion.usage.completion_tokens, args.llm
)
cost = litellm.completion_cost(completion_response=completion)
# llm_utils.calculate_cost(
# completion.usage.prompt_tokens, completion.usage.completion_tokens, args.llm
# )

text += "\n\n"
text += f"(Total cost: approximately ${cost:.2f} USD.)"

Expand Down

0 comments on commit a29d555

Please sign in to comment.