-
Notifications
You must be signed in to change notification settings - Fork 2
/
util.py
140 lines (116 loc) · 5.91 KB
/
util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
import openai
import json
import os
from function_call import *
TICKERS = {"GM": "General Motors"}
with open("credentials.json") as f:
creds = json.load(f)
openai.organization = creds["org_id"]
openai.api_key = creds["openai_api"]
os.environ["OPENAI_API_KEY"] = creds["openai_api"]
def initialize_state(st):
# Initialise session state variables
if 'tickers' not in st.session_state:
st.session_state['tickers'] = []
if 'current_ticker' not in st.session_state:
st.session_state['current_ticker'] = ''
if 'ticker_states' not in st.session_state:
st.session_state['ticker_states'] = {}
def initialize_ticker(st, ticker):
if ticker not in st.session_state['ticker_states']:
st.session_state['ticker_states'][ticker] = {}
if 'generated' not in st.session_state['ticker_states'][ticker]:
st.session_state['ticker_states'][ticker]['generated'] = []
if 'past' not in st.session_state['ticker_states'][ticker]:
st.session_state['ticker_states'][ticker]['past'] = []
if 'messages' not in st.session_state['ticker_states'][ticker]:
st.session_state['ticker_states'][ticker]['messages'] = [
{"role": "system", "content": "You are a helpful assistant."}
]
def reset_state(st, ticker):
del st.session_state['ticker_states'][ticker]
all_tickers = st.session_state['tickers']
del st.session_state['tickers'][all_tickers.index(ticker)]
st.session_state['current_ticker'] = ''
def generate_response(prompt, ticker, st):
# Step 1: send the conversation and available functions to GPT
st.session_state['ticker_states'][ticker]['messages'].append({"role": "user", "content": prompt})
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
functions=FUNCTIONS,
function_call="auto",
messages=st.session_state['ticker_states'][ticker]['messages']
)
response_message = response["choices"][0]["message"]
# Step 2: check if GPT wanted to call a function
if response_message.get("function_call"):
# Step 3: call the function
# Note: the JSON response may not always be valid; be sure to handle errors
available_functions = {
"create_price_volume_chart": create_price_volume_chart,
"create_price_chart": create_price_chart,
"create_volume_chart": create_volume_chart,
"context_fetcher": context_fetcher,
"financial_reports_answerer": financial_reports_answerer,
"create_revenue_comparator": create_revenue_comparator,
"create_growthchart": create_growthchart,
"universe_correlator":universe_correlator,
}
function_name = response_message["function_call"]["name"]
function_to_call = available_functions[function_name]
function_args = json.loads(response_message["function_call"]["arguments"])
if function_name in ("create_price_volume_chart", "create_price_chart",
"create_volume_chart",
"create_growthchart"):
function_response, fig = function_to_call(
ticker=function_args.get("ticker"),
name=function_args.get("name"),
)
elif function_name in ("context_fetcher"):
function_response = function_to_call(
ticker=function_args.get("ticker")
)
elif function_name in ("financial_reports_answerer"):
output, function_response = function_to_call(
ticker=function_args.get("ticker"),
prompt=function_args.get("prompt")
)
elif function_name in ("create_revenue_comparator"):
function_response, fig = function_to_call(
ticker1=function_args.get("ticker1"),
ticker2=function_args.get("ticker2"),
)
elif function_name in ("universe_correlator"):
#print(function_args) #for debugging
function_response, fig = function_to_call(
tickers=json.loads(function_args.get("tickers")),
sector=function_args.get("sector"),
hierarchical=function_args.get("hierarchical")
)
# Step 4: send the info on the function call and function response to GPT
st.session_state['ticker_states'][ticker]['messages'].append(response_message)
st.session_state['ticker_states'][ticker]['messages'].append(
{
"role": "function",
"name": function_name,
"content": function_response,
}
) # extend conversation with function response
response_message2 = openai.ChatCompletion.create(
model="gpt-3.5-turbo-0613",
messages=st.session_state['ticker_states'][ticker]['messages'],
)["choices"][0]["message"] # get a new response from GPT where it can see the function response
if function_name in ("context_fetcher"):
st.session_state['ticker_states'][ticker]['messages'].append({"role": "assistant", "content": response_message2.content})
return response_message2.content
elif function_name in ("financial_reports_answerer"):
# Do not feed this answer back into the msg history because AI can hallucinate on this.
### st.session_state['ticker_states'][ticker]['messages'].append({"role": "assistant", "content": response_message2.content})
return output
else:
return [fig]
st.session_state['ticker_states'][ticker]['messages'].append({"role": "assistant", "content": response_message.content})
return response_message.content
# response = completion.choices[0].message.content
# st.session_state['ticker_states'][ticker]['messages'].append({"role": "assistant", "content": response})
# return response