redfernstech commited on
Commit
24001ab
·
verified ·
1 Parent(s): 6d30421

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +304 -146
app.py CHANGED
@@ -1,56 +1,249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import os
2
  import time
3
- from fastapi import FastAPI,Request
4
- from fastapi.responses import HTMLResponse
 
5
  from fastapi.staticfiles import StaticFiles
6
- from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
7
- from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
  from pydantic import BaseModel
9
- from fastapi.responses import JSONResponse
10
- import uuid # for generating unique IDs
11
- import datetime
12
  from fastapi.middleware.cors import CORSMiddleware
13
  from fastapi.templating import Jinja2Templates
14
- from huggingface_hub import InferenceClient
15
- import json
16
- import re
17
- from gradio_client import Client
18
  from simple_salesforce import Salesforce, SalesforceLogin
19
- from llama_index.llms.huggingface import HuggingFaceLLM
20
- # from llama_index.llms.huggingface import HuggingFaceInferenceAPI
21
-
22
 
23
  # Define Pydantic model for incoming request body
24
  class MessageRequest(BaseModel):
25
  message: str
26
- repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
27
- llm_client = InferenceClient(
28
- model=repo_id,
29
- token=os.getenv("HF_TOKEN"),
30
- )
31
-
32
-
33
- os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
34
- username = os.getenv("username")
35
- password = os.getenv("password")
36
- security_token = os.getenv("security_token")
37
- domain = os.getenv("domain")# Using sandbox environment
38
- session_id, sf_instance = SalesforceLogin(username=username, password=password, security_token=security_token, domain=domain)
39
-
40
- # Create Salesforce object
41
- sf = Salesforce(instance=sf_instance, session_id=session_id)
42
 
 
43
  app = FastAPI()
44
 
45
-
46
- @app.middleware("http")
47
- async def add_security_headers(request: Request, call_next):
48
- response = await call_next(request)
49
- response.headers["Content-Security-Policy"] = "frame-ancestors *; frame-src *; object-src *;"
50
- response.headers["X-Frame-Options"] = "ALLOWALL"
51
- return response
52
-
53
-
54
  # Allow CORS requests from any domain
55
  app.add_middleware(
56
  CORSMiddleware,
@@ -60,157 +253,109 @@ app.add_middleware(
60
  allow_headers=["*"],
61
  )
62
 
63
-
64
-
65
-
66
- @app.get("/favicon.ico")
67
- async def favicon():
68
- return HTMLResponse("") # or serve a real favicon if you have one
69
-
70
-
71
  app.mount("/static", StaticFiles(directory="static"), name="static")
72
-
73
  templates = Jinja2Templates(directory="static")
74
- # Configure Llama index settings
75
- Settings.llm = HuggingFaceLLM(
76
- model_name="meta-llama/Meta-Llama-3-8B-Instruct",
77
- tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
78
- context_window=3000,
79
- token=os.getenv("HF_TOKEN"),
80
- max_new_tokens=512,
81
- generate_kwargs={"temperature": 0.1},
82
- )
83
 
84
- Settings.embed_model = HuggingFaceEmbedding(
85
- model_name="BAAI/bge-small-en-v1.5"
86
- )
87
 
88
- PERSIST_DIR = "db"
89
- PDF_DIRECTORY = 'data'
 
 
 
 
 
90
 
91
- # Ensure directories exist
92
- os.makedirs(PDF_DIRECTORY, exist_ok=True)
93
- os.makedirs(PERSIST_DIR, exist_ok=True)
94
  chat_history = []
95
  current_chat_history = []
96
- def data_ingestion_from_directory():
97
- documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
98
- storage_context = StorageContext.from_defaults()
99
- index = VectorStoreIndex.from_documents(documents)
100
- index.storage_context.persist(persist_dir=PERSIST_DIR)
101
-
102
- def initialize():
103
- start_time = time.time()
104
- data_ingestion_from_directory() # Process PDF ingestion at startup
105
- print(f"Data ingestion time: {time.time() - start_time} seconds")
106
- def split_name(full_name):
107
- # Split the name by spaces
108
- words = full_name.strip().split()
109
-
110
- # Logic for determining first name and last name
111
- if len(words) == 1:
112
- first_name = ''
113
- last_name = words[0]
114
- elif len(words) == 2:
115
- first_name = words[0]
116
- last_name = words[1]
117
- else:
118
- first_name = words[0]
119
- last_name = ' '.join(words[1:])
120
-
121
- return first_name, last_name
122
-
123
- initialize() # Run initialization tasks
124
-
125
 
126
  def handle_query(query):
127
- chat_text_qa_msgs = [
128
- (
129
- "user",
130
- """
131
- You are the Clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. Give response within 10-15 words only
132
- {context_str}
133
- Question:
134
- {query_str}
135
- """
136
- )
137
- ]
138
- text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
139
-
140
- storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
141
- index = load_index_from_storage(storage_context)
142
  context_str = ""
143
  for past_query, response in reversed(current_chat_history):
144
  if past_query.strip():
145
  context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
146
 
 
 
 
147
 
148
- query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
149
- answer = query_engine.query(query)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
150
 
151
- if hasattr(answer, 'response'):
152
- response=answer.response
153
- elif isinstance(answer, dict) and 'response' in answer:
154
- response =answer['response']
155
- else:
156
- response ="Sorry, I couldn't find an answer."
157
- current_chat_history.append((query, response))
158
- return response
159
  @app.get("/ch/{id}", response_class=HTMLResponse)
160
  async def load_chat(request: Request, id: str):
161
  return templates.TemplateResponse("index.html", {"request": request, "user_id": id})
162
- # Route to save chat history
163
  @app.post("/hist/")
164
  async def save_chat_history(history: dict):
165
- # Check if 'userId' is present in the incoming dictionary
166
  user_id = history.get('userId')
167
- print(user_id)
168
-
169
- # Ensure user_id is defined before proceeding
170
  if user_id is None:
171
  return {"error": "userId is required"}, 400
172
 
173
- # Construct the chat history string
174
  hist = ''.join([f"'{entry['sender']}: {entry['message']}'\n" for entry in history['history']])
175
  hist = "You are a Redfernstech summarize model. Your aim is to use this conversation to identify user interests solely based on that conversation: " + hist
176
- print(hist)
177
-
178
- # Get the summarized result from the client model
179
- result = hist
180
 
181
  try:
182
- sf.Lead.update(user_id, {'Description': result})
183
  except Exception as e:
184
  return {"error": f"Failed to update lead: {str(e)}"}, 500
185
 
186
- return {"summary": result, "message": "Chat history saved"}
 
187
  @app.post("/webhook")
188
  async def receive_form_data(request: Request):
189
  form_data = await request.json()
190
- # Log in to Salesforce
191
  first_name, last_name = split_name(form_data['name'])
192
  data = {
193
- 'FirstName': first_name,
194
- 'LastName': last_name,
195
- 'Description': 'hii', # Static description
196
- 'Company': form_data['company'], # Assuming company is available in form_data
197
- 'Phone': form_data['phone'].strip(), # Phone from form data
198
- 'Email': form_data['email'], # Email from form data
199
  }
200
- a=sf.Lead.create(data)
201
- # Generate a unique ID (for tracking user)
202
  unique_id = a['id']
203
-
204
- # Here you can do something with form_data like saving it to a database
205
  print("Received form data:", form_data)
206
-
207
- # Send back the unique id to the frontend
208
  return JSONResponse({"id": unique_id})
209
 
210
  @app.post("/chat/")
211
  async def chat(request: MessageRequest):
212
- message = request.message # Access the message from the request body
213
- response = handle_query(message) # Process the message
214
  message_data = {
215
  "sender": "User",
216
  "message": message,
@@ -219,7 +364,20 @@ async def chat(request: MessageRequest):
219
  }
220
  chat_history.append(message_data)
221
  return {"response": response}
 
222
  @app.get("/")
223
  def read_root():
224
  return {"message": "Welcome to the API"}
225
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # import os
2
+ # import time
3
+ # from fastapi import FastAPI,Request
4
+ # from fastapi.responses import HTMLResponse
5
+ # from fastapi.staticfiles import StaticFiles
6
+ # from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
7
+ # from llama_index.embeddings.huggingface import HuggingFaceEmbedding
8
+ # from pydantic import BaseModel
9
+ # from fastapi.responses import JSONResponse
10
+ # import uuid # for generating unique IDs
11
+ # import datetime
12
+ # from fastapi.middleware.cors import CORSMiddleware
13
+ # from fastapi.templating import Jinja2Templates
14
+ # from huggingface_hub import InferenceClient
15
+ # import json
16
+ # import re
17
+ # from gradio_client import Client
18
+ # from simple_salesforce import Salesforce, SalesforceLogin
19
+ # from llama_index.llms.huggingface import HuggingFaceLLM
20
+ # # from llama_index.llms.huggingface import HuggingFaceInferenceAPI
21
+
22
+
23
+ # # Define Pydantic model for incoming request body
24
+ # class MessageRequest(BaseModel):
25
+ # message: str
26
+ # repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
27
+ # llm_client = InferenceClient(
28
+ # model=repo_id,
29
+ # token=os.getenv("HF_TOKEN"),
30
+ # )
31
+
32
+
33
+ # os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
34
+ # username = os.getenv("username")
35
+ # password = os.getenv("password")
36
+ # security_token = os.getenv("security_token")
37
+ # domain = os.getenv("domain")# Using sandbox environment
38
+ # session_id, sf_instance = SalesforceLogin(username=username, password=password, security_token=security_token, domain=domain)
39
+
40
+ # # Create Salesforce object
41
+ # sf = Salesforce(instance=sf_instance, session_id=session_id)
42
+
43
+ # app = FastAPI()
44
+
45
+
46
+ # @app.middleware("http")
47
+ # async def add_security_headers(request: Request, call_next):
48
+ # response = await call_next(request)
49
+ # response.headers["Content-Security-Policy"] = "frame-ancestors *; frame-src *; object-src *;"
50
+ # response.headers["X-Frame-Options"] = "ALLOWALL"
51
+ # return response
52
+
53
+
54
+ # # Allow CORS requests from any domain
55
+ # app.add_middleware(
56
+ # CORSMiddleware,
57
+ # allow_origins=["*"],
58
+ # allow_credentials=True,
59
+ # allow_methods=["*"],
60
+ # allow_headers=["*"],
61
+ # )
62
+
63
+
64
+
65
+
66
+ # @app.get("/favicon.ico")
67
+ # async def favicon():
68
+ # return HTMLResponse("") # or serve a real favicon if you have one
69
+
70
+
71
+ # app.mount("/static", StaticFiles(directory="static"), name="static")
72
+
73
+ # templates = Jinja2Templates(directory="static")
74
+ # # Configure Llama index settings
75
+ # Settings.llm = HuggingFaceLLM(
76
+ # model_name="meta-llama/Meta-Llama-3-8B-Instruct",
77
+ # tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct",
78
+ # context_window=3000,
79
+ # token=os.getenv("HF_TOKEN"),
80
+ # max_new_tokens=512,
81
+ # generate_kwargs={"temperature": 0.1},
82
+ # )
83
+
84
+ # Settings.embed_model = HuggingFaceEmbedding(
85
+ # model_name="BAAI/bge-small-en-v1.5"
86
+ # )
87
+
88
+ # PERSIST_DIR = "db"
89
+ # PDF_DIRECTORY = 'data'
90
+
91
+ # # Ensure directories exist
92
+ # os.makedirs(PDF_DIRECTORY, exist_ok=True)
93
+ # os.makedirs(PERSIST_DIR, exist_ok=True)
94
+ # chat_history = []
95
+ # current_chat_history = []
96
+ # def data_ingestion_from_directory():
97
+ # documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data()
98
+ # storage_context = StorageContext.from_defaults()
99
+ # index = VectorStoreIndex.from_documents(documents)
100
+ # index.storage_context.persist(persist_dir=PERSIST_DIR)
101
+
102
+ # def initialize():
103
+ # start_time = time.time()
104
+ # data_ingestion_from_directory() # Process PDF ingestion at startup
105
+ # print(f"Data ingestion time: {time.time() - start_time} seconds")
106
+ # def split_name(full_name):
107
+ # # Split the name by spaces
108
+ # words = full_name.strip().split()
109
+
110
+ # # Logic for determining first name and last name
111
+ # if len(words) == 1:
112
+ # first_name = ''
113
+ # last_name = words[0]
114
+ # elif len(words) == 2:
115
+ # first_name = words[0]
116
+ # last_name = words[1]
117
+ # else:
118
+ # first_name = words[0]
119
+ # last_name = ' '.join(words[1:])
120
+
121
+ # return first_name, last_name
122
+
123
+ # initialize() # Run initialization tasks
124
+
125
+
126
+ # def handle_query(query):
127
+ # chat_text_qa_msgs = [
128
+ # (
129
+ # "user",
130
+ # """
131
+ # You are the Clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. Give response within 10-15 words only
132
+ # {context_str}
133
+ # Question:
134
+ # {query_str}
135
+ # """
136
+ # )
137
+ # ]
138
+ # text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs)
139
+
140
+ # storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
141
+ # index = load_index_from_storage(storage_context)
142
+ # context_str = ""
143
+ # for past_query, response in reversed(current_chat_history):
144
+ # if past_query.strip():
145
+ # context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
146
+
147
+
148
+ # query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str)
149
+ # answer = query_engine.query(query)
150
+
151
+ # if hasattr(answer, 'response'):
152
+ # response=answer.response
153
+ # elif isinstance(answer, dict) and 'response' in answer:
154
+ # response =answer['response']
155
+ # else:
156
+ # response ="Sorry, I couldn't find an answer."
157
+ # current_chat_history.append((query, response))
158
+ # return response
159
+ # @app.get("/ch/{id}", response_class=HTMLResponse)
160
+ # async def load_chat(request: Request, id: str):
161
+ # return templates.TemplateResponse("index.html", {"request": request, "user_id": id})
162
+ # # Route to save chat history
163
+ # @app.post("/hist/")
164
+ # async def save_chat_history(history: dict):
165
+ # # Check if 'userId' is present in the incoming dictionary
166
+ # user_id = history.get('userId')
167
+ # print(user_id)
168
+
169
+ # # Ensure user_id is defined before proceeding
170
+ # if user_id is None:
171
+ # return {"error": "userId is required"}, 400
172
+
173
+ # # Construct the chat history string
174
+ # hist = ''.join([f"'{entry['sender']}: {entry['message']}'\n" for entry in history['history']])
175
+ # hist = "You are a Redfernstech summarize model. Your aim is to use this conversation to identify user interests solely based on that conversation: " + hist
176
+ # print(hist)
177
+
178
+ # # Get the summarized result from the client model
179
+ # result = hist
180
+
181
+ # try:
182
+ # sf.Lead.update(user_id, {'Description': result})
183
+ # except Exception as e:
184
+ # return {"error": f"Failed to update lead: {str(e)}"}, 500
185
+
186
+ # return {"summary": result, "message": "Chat history saved"}
187
+ # @app.post("/webhook")
188
+ # async def receive_form_data(request: Request):
189
+ # form_data = await request.json()
190
+ # # Log in to Salesforce
191
+ # first_name, last_name = split_name(form_data['name'])
192
+ # data = {
193
+ # 'FirstName': first_name,
194
+ # 'LastName': last_name,
195
+ # 'Description': 'hii', # Static description
196
+ # 'Company': form_data['company'], # Assuming company is available in form_data
197
+ # 'Phone': form_data['phone'].strip(), # Phone from form data
198
+ # 'Email': form_data['email'], # Email from form data
199
+ # }
200
+ # a=sf.Lead.create(data)
201
+ # # Generate a unique ID (for tracking user)
202
+ # unique_id = a['id']
203
+
204
+ # # Here you can do something with form_data like saving it to a database
205
+ # print("Received form data:", form_data)
206
+
207
+ # # Send back the unique id to the frontend
208
+ # return JSONResponse({"id": unique_id})
209
+
210
+ # @app.post("/chat/")
211
+ # async def chat(request: MessageRequest):
212
+ # message = request.message # Access the message from the request body
213
+ # response = handle_query(message) # Process the message
214
+ # message_data = {
215
+ # "sender": "User",
216
+ # "message": message,
217
+ # "response": response,
218
+ # "timestamp": datetime.datetime.now().isoformat()
219
+ # }
220
+ # chat_history.append(message_data)
221
+ # return {"response": response}
222
+ # @app.get("/")
223
+ # def read_root():
224
+ # return {"message": "Welcome to the API"}
225
+
226
  import os
227
  import time
228
+ import requests
229
+ from fastapi import FastAPI, Request
230
+ from fastapi.responses import HTMLResponse, JSONResponse
231
  from fastapi.staticfiles import StaticFiles
 
 
232
  from pydantic import BaseModel
 
 
 
233
  from fastapi.middleware.cors import CORSMiddleware
234
  from fastapi.templating import Jinja2Templates
 
 
 
 
235
  from simple_salesforce import Salesforce, SalesforceLogin
236
+ import uuid
237
+ import datetime
238
+ import json
239
 
240
  # Define Pydantic model for incoming request body
241
  class MessageRequest(BaseModel):
242
  message: str
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
243
 
244
+ # Initialize FastAPI app
245
  app = FastAPI()
246
 
 
 
 
 
 
 
 
 
 
247
  # Allow CORS requests from any domain
248
  app.add_middleware(
249
  CORSMiddleware,
 
253
  allow_headers=["*"],
254
  )
255
 
256
+ # Mount static files
 
 
 
 
 
 
 
257
  app.mount("/static", StaticFiles(directory="static"), name="static")
 
258
  templates = Jinja2Templates(directory="static")
 
 
 
 
 
 
 
 
 
259
 
260
+ # Configure ChatGroq API
261
+ CHATGROQ_API_URL = "https://api.chatgroq.com/v1/chat/completions" # Replace with actual endpoint
262
+ CHATGROQ_API_KEY = os.getenv("CHATGROQ_API_KEY")
263
 
264
+ # Salesforce credentials
265
+ username = os.getenv("username")
266
+ password = os.getenv("password")
267
+ security_token = os.getenv("security_token")
268
+ domain = os.getenv("domain") # Using sandbox environment
269
+ session_id, sf_instance = SalesforceLogin(username=username, password=password, security_token=security_token, domain=domain)
270
+ sf = Salesforce(instance=sf_instance, session_id=session_id)
271
 
272
+ # Chat history
 
 
273
  chat_history = []
274
  current_chat_history = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
275
 
276
  def handle_query(query):
277
+ # Prepare context from chat history
 
 
 
 
 
 
 
 
 
 
 
 
 
 
278
  context_str = ""
279
  for past_query, response in reversed(current_chat_history):
280
  if past_query.strip():
281
  context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
282
 
283
+ # Construct the prompt for ChatGroq
284
+ prompt = f"""
285
+ You are the Clara Redfernstech chatbot. Your goal is to provide accurate, professional, and helpful answers to user queries based on the company's data. Always ensure your responses are clear and concise. Give response within 10-15 words only.
286
 
287
+ Context:
288
+ {context_str}
289
+
290
+ Question:
291
+ {query}
292
+ """
293
+
294
+ # Send request to ChatGroq API
295
+ headers = {
296
+ "Authorization": f"Bearer {CHATGROQ_API_KEY}",
297
+ "Content-Type": "application/json",
298
+ }
299
+ payload = {
300
+ "model": "meta-llama/llama-4-maverick-17b-128e-instruct", # Replace with the actual model name
301
+ "messages": [{"role": "user", "content": prompt}],
302
+ "max_tokens": 50, # Adjust as needed
303
+ "temperature": 0.1,
304
+ }
305
+
306
+ try:
307
+ response = requests.post(CHATGROQ_API_URL, headers=headers, json=payload)
308
+ response_data = response.json()
309
+ response_text = response_data["choices"][0]["message"]["content"].strip()
310
+ except Exception as e:
311
+ print(f"Error querying ChatGroq: {e}")
312
+ response_text = "Sorry, I couldn't find an answer."
313
+
314
+ # Update chat history
315
+ current_chat_history.append((query, response_text))
316
+ return response_text
317
 
 
 
 
 
 
 
 
 
318
  @app.get("/ch/{id}", response_class=HTMLResponse)
319
  async def load_chat(request: Request, id: str):
320
  return templates.TemplateResponse("index.html", {"request": request, "user_id": id})
321
+
322
  @app.post("/hist/")
323
  async def save_chat_history(history: dict):
 
324
  user_id = history.get('userId')
 
 
 
325
  if user_id is None:
326
  return {"error": "userId is required"}, 400
327
 
 
328
  hist = ''.join([f"'{entry['sender']}: {entry['message']}'\n" for entry in history['history']])
329
  hist = "You are a Redfernstech summarize model. Your aim is to use this conversation to identify user interests solely based on that conversation: " + hist
 
 
 
 
330
 
331
  try:
332
+ sf.Lead.update(user_id, {'Description': hist})
333
  except Exception as e:
334
  return {"error": f"Failed to update lead: {str(e)}"}, 500
335
 
336
+ return {"summary": hist, "message": "Chat history saved"}
337
+
338
  @app.post("/webhook")
339
  async def receive_form_data(request: Request):
340
  form_data = await request.json()
 
341
  first_name, last_name = split_name(form_data['name'])
342
  data = {
343
+ 'FirstName': first_name,
344
+ 'LastName': last_name,
345
+ 'Description': 'hii', # Static description
346
+ 'Company': form_data['company'], # Assuming company is available in form_data
347
+ 'Phone': form_data['phone'].strip(), # Phone from form data
348
+ 'Email': form_data['email'], # Email from form data
349
  }
350
+ a = sf.Lead.create(data)
 
351
  unique_id = a['id']
 
 
352
  print("Received form data:", form_data)
 
 
353
  return JSONResponse({"id": unique_id})
354
 
355
  @app.post("/chat/")
356
  async def chat(request: MessageRequest):
357
+ message = request.message
358
+ response = handle_query(message)
359
  message_data = {
360
  "sender": "User",
361
  "message": message,
 
364
  }
365
  chat_history.append(message_data)
366
  return {"response": response}
367
+
368
  @app.get("/")
369
  def read_root():
370
  return {"message": "Welcome to the API"}
371
 
372
+ def split_name(full_name):
373
+ words = full_name.strip().split()
374
+ if len(words) == 1:
375
+ first_name = ''
376
+ last_name = words[0]
377
+ elif len(words) == 2:
378
+ first_name = words[0]
379
+ last_name = words[1]
380
+ else:
381
+ first_name = words[0]
382
+ last_name = ' '.join(words[1:])
383
+ return first_name, last_name