Lord-Raven commited on
Commit
9afecae
·
1 Parent(s): d64cd1f

Some cleanup.

Browse files
Files changed (1) hide show
  1. app.py +2 -23
app.py CHANGED
@@ -35,7 +35,6 @@ tokenizer_name = "MoritzLaurer/deberta-v3-base-zeroshot-v2.0"
35
 
36
  classifier_cpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name)
37
  classifier_gpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name, device="cuda:0")
38
- # classifier = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name)
39
 
40
  def classify(data_string, request: gradio.Request):
41
  if request:
@@ -55,12 +54,12 @@ def classify(data_string, request: gradio.Request):
55
  try:
56
  if 'cpu' not in data:
57
  result = zero_shot_classification_gpu(data)
58
- print(f"GPU Classification @ [{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] took {time.time() - start_time}.")
59
  except Exception as e:
60
  print(f"GPU classification failed: {e}\nFall back to CPU.")
61
  if not result:
62
  result = zero_shot_classification_cpu(data)
63
- print(f"CPU Classification @ [{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] took {time.time() - start_time}.")
64
  return json.dumps(result)
65
 
66
  def zero_shot_classification_cpu(data):
@@ -73,22 +72,6 @@ def zero_shot_classification_gpu(data):
73
  def create_sequences(data):
74
  return [data['sequence'] + '\n' + data['hypothesis_template'].format(label) for label in data['candidate_labels']]
75
 
76
- # def few_shot_classification(data):
77
- # sequences = create_sequences(data)
78
- # print(sequences)
79
- # # results = onnx_few_shot_model(sequences)
80
- # probs = onnx_few_shot_model.predict_proba(sequences)
81
- # scores = [true[0] for true in probs]
82
-
83
- # composite = list(zip(scores, data['candidate_labels']))
84
- # composite = sorted(composite, key=lambda x: x[0], reverse=True)
85
-
86
- # labels, scores = zip(*composite)
87
-
88
- # response_dict = {'scores': scores, 'labels': labels}
89
- # print(response_dict)
90
- # response_string = json.dumps(response_dict)
91
- # return response_strin
92
  gradio_interface = gradio.Interface(
93
  fn = classify,
94
  inputs = gradio.Textbox(label="JSON Input"),
@@ -99,7 +82,3 @@ app.mount("/gradio", gradio_interface)
99
 
100
  # app = gradio.mount_gradio_app(app, gradio_interface, path="/gradio")
101
  gradio_interface.launch()
102
-
103
- # if __name__ == "__main__":
104
- # import uvicorn
105
- # uvicorn.run(app, host="0.0.0.0", port=8000)
 
35
 
36
  classifier_cpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name)
37
  classifier_gpu = pipeline(task="zero-shot-classification", model=model_name, tokenizer=tokenizer_name, device="cuda:0")
 
38
 
39
  def classify(data_string, request: gradio.Request):
40
  if request:
 
54
  try:
55
  if 'cpu' not in data:
56
  result = zero_shot_classification_gpu(data)
57
+ print(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - GPU Classification took {time.time() - start_time}.")
58
  except Exception as e:
59
  print(f"GPU classification failed: {e}\nFall back to CPU.")
60
  if not result:
61
  result = zero_shot_classification_cpu(data)
62
+ print(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} - CPU Classification took {time.time() - start_time}.")
63
  return json.dumps(result)
64
 
65
  def zero_shot_classification_cpu(data):
 
72
  def create_sequences(data):
73
  return [data['sequence'] + '\n' + data['hypothesis_template'].format(label) for label in data['candidate_labels']]
74
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  gradio_interface = gradio.Interface(
76
  fn = classify,
77
  inputs = gradio.Textbox(label="JSON Input"),
 
82
 
83
  # app = gradio.mount_gradio_app(app, gradio_interface, path="/gradio")
84
  gradio_interface.launch()