ka1kuk commited on
Commit
b0a18d0
1 Parent(s): 5d9f8e7

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +15 -41
main.py CHANGED
@@ -1,44 +1,18 @@
1
- import gradio as gr
2
- from langchain_experimental.llms.ollama_functions import OllamaFunctions
3
 
4
- # Initialize the Ollama model
5
- model = OllamaFunctions(model="gemma:7b")
6
- model = model.bind(
7
- functions=[
8
- {
9
- "name": "get_current_weather",
10
- "description": "Get the current weather in a given location",
11
- "parameters": {
12
- "type": "object",
13
- "properties": {
14
- "location": {
15
- "type": "string",
16
- "description": "The city and state, e.g., San Francisco, CA",
17
- },
18
- "unit": {
19
- "type": "string",
20
- "enum": ["celsius", "fahrenheit"],
21
- },
22
- },
23
- "required": ["location"],
24
- },
25
- }
26
- ],
27
- function_call={"name": "get_current_weather"},
28
- )
29
 
30
- def get_weather(location, unit):
31
- user_input = f"{location}, {unit}"
32
- result = model.invoke(user_input)
33
- return result
 
 
34
 
35
- iface = gr.Interface(
36
- fn=get_weather,
37
- inputs=[gr.Textbox(label="Location (e.g., 'San Francisco, CA')"), gr.Radio(choices=["celsius", "fahrenheit"], label="Unit")],
38
- outputs=gr.Text(label="Weather Information"),
39
- title="Weather Information",
40
- description="Enter a location and select the unit to get the current weather.",
41
- allow_flagging="never"
42
- )
43
-
44
- iface.launch()
 
1
+ import subprocess
 
2
 
3
+ def run_command(command):
4
+ try:
5
+ result = subprocess.run(command, shell=True, check=True, text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
6
+ print(f"Success: {result.stdout}")
7
+ except subprocess.CalledProcessError as e:
8
+ print(f"Error: {e.stderr}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
+ def main(model):
11
+ # Install script
12
+ run_command("curl -fsSL https://ollama.com/install.sh | sh")
13
+
14
+ # Start Ollama server
15
+ run_command("ollama serve")
16
 
17
+ if __name__ == "__main__":
18
+ main()