pvanand commited on
Commit
cc281d5
1 Parent(s): 08238c2

Update helper_functions_api.py

Browse files
Files changed (1) hide show
  1. helper_functions_api.py +15 -19
helper_functions_api.py CHANGED
@@ -65,6 +65,7 @@ from brave import Brave
65
  from fuzzy_json import loads
66
  from half_json.core import JSONFixer
67
  from openai import OpenAI
 
68
 
69
  llm_default_small = "llama3-8b-8192"
70
  llm_default_medium = "llama3-70b-8192"
@@ -82,25 +83,20 @@ def limit_tokens(input_string, token_limit=8000):
82
  """
83
  return encoding.decode(encoding.encode(input_string)[:token_limit])
84
 
85
- def together_response(message, model=llm_default_small, SysPrompt = SysPromptDefault,temperature=0.2):
86
-
87
- client = OpenAI(
88
- api_key=GROQ_API_KEY,
89
- base_url="https://gateway.hconeai.com/openai/v1",
90
- default_headers={
91
- "Helicone-Auth": f"Bearer {HELICON_API_KEY}",
92
- "Helicone-Target-Url": "https://api.groq.com"
93
- }
94
- )
95
-
96
- messages=[{"role": "system", "content": SysPrompt},{"role": "user", "content": message}]
97
-
98
- response = client.chat.completions.create(
99
- model=model,
100
- messages=messages,
101
- temperature=temperature,
102
- )
103
- return response.choices[0].message.content
104
 
105
 
106
  def json_from_text(text):
 
65
  from fuzzy_json import loads
66
  from half_json.core import JSONFixer
67
  from openai import OpenAI
68
+ from together import Together
69
 
70
  llm_default_small = "llama3-8b-8192"
71
  llm_default_medium = "llama3-70b-8192"
 
83
  """
84
  return encoding.decode(encoding.encode(input_string)[:token_limit])
85
 
86
+ def together_response(message, model = "meta-llama/Llama-3-8b-chat-hf", SysPrompt = SysPromptDefault, temperature=0.2):
87
+ client = OpenAI(
88
+ api_key=TOGETHER_API_KEY,
89
+ base_url="https://together.hconeai.com/v1",
90
+ default_headers={ "Helicone-Auth": f"Bearer {HELICON_API_KEY}"})
91
+
92
+ messages=[{"role": "system", "content": SysPrompt},{"role": "user", "content": message}]
93
+
94
+ response = client.chat.completions.create(
95
+ model=model,
96
+ messages=messages,
97
+ temperature=temperature,
98
+ )
99
+ return response.choices[0].message.content
 
 
 
 
 
100
 
101
 
102
  def json_from_text(text):