kevinhug commited on
Commit
669119c
·
1 Parent(s): 3b88140
Files changed (4) hide show
  1. aiSum.py +37 -0
  2. app.py +15 -0
  3. requirements.txt +1 -0
  4. xgb/__init__.py +0 -0
aiSum.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import PegasusTokenizer, PegasusForConditionalGeneration, TFPegasusForConditionalGeneration
2
+
3
+ # Let's load the model and the tokenizer
4
+ model_name = "human-centered-summarization/financial-summarization-pegasus"
5
+ tokenizer = PegasusTokenizer.from_pretrained(model_name)
6
+ model = PegasusForConditionalGeneration.from_pretrained(model_name) # If you want to use the Tensorflow model
7
+ # just replace with TFPegasusForConditionalGeneration
8
+
9
+
10
+ # Some text to summarize here
11
+ text_to_summarize = "Customer service was terrible. Called the number for accounts and forced to listen to advertisements from their partners with no escape. When it was finally over it just went to a loop with a number to call for more promotional offers. Called a different number and got transferred from a human back to their answering service-- which hung up on me."
12
+
13
+ class Sum():
14
+ def __init__(self):
15
+ pass
16
+
17
+ @staticmethod
18
+ def summarize(text_to_summarize):
19
+ # Tokenize our text
20
+ # If you want to run the code in Tensorflow, please remember to return the particular tensors as simply as using return_tensors = 'tf'
21
+ input_ids = tokenizer(text_to_summarize, return_tensors="pt").input_ids
22
+
23
+ # Generate the output (Here, we use beam search but you can also use any other strategy you like)
24
+ output = model.generate(
25
+ input_ids,
26
+ max_length=32,
27
+ num_beams=5,
28
+ early_stopping=True
29
+ )
30
+
31
+ # Finally, we can print the generated summary
32
+ #print(tokenizer.decode(output[0], skip_special_tokens=True))
33
+ return tokenizer.decode(output[0], skip_special_tokens=True)
34
+ # Generated Output: Saudi bank to pay a 3.5% premium to Samba share price. Gulf region’s third-largest lender will have total assets of $220 billion
35
+
36
+ if __name__ == "__main__":
37
+ print(Sum().summarize(text_to_summarize))
app.py CHANGED
@@ -11,7 +11,12 @@ https://hits.seeyoufarm.com/
11
  PORTFOLIO OPTIMIZATION
12
  '''
13
  from aiOpt import Asset
 
14
  import numpy as np
 
 
 
 
15
  def optimize(cost, prob, its):
16
  s = Asset(np.asfarray(cost.split()),
17
  np.asfarray(prob.split()))
@@ -220,6 +225,16 @@ With no need for jargon, SSDS delivers tangible value to our fintech operations.
220
  tuning the distance for use case
221
  """)
222
 
 
 
 
 
 
 
 
 
 
 
223
  with gr.Tab("Explainable AI"):
224
  df=pd.read_csv("./xgb/re.csv")
225
 
 
11
  PORTFOLIO OPTIMIZATION
12
  '''
13
  from aiOpt import Asset
14
+ from aiSum import Sum
15
  import numpy as np
16
+
17
+ def summarize(text):
18
+ return Sum().summarize(text)
19
+
20
  def optimize(cost, prob, its):
21
  s = Asset(np.asfarray(cost.split()),
22
  np.asfarray(prob.split()))
 
225
  tuning the distance for use case
226
  """)
227
 
228
+ with gr.Tab("Generative AI Summarization"):
229
+ in_sum = gr.Textbox(placeholder="Customer service was terrible. Called the number for accounts and forced to listen to advertisements from their partners with no escape. When it was finally over it just went to a loop with a number to call for more promotional offers. Called a different number and got transferred from a human back to their answering service-- which hung up on me.",
230
+ label="Long Text",
231
+ info="Summarization"
232
+ )
233
+ out_sum = gr.JSON(label="Summarized Verbatim")
234
+
235
+ btn_sum = gr.Button("Find Similar Verbatim")
236
+ btn_sum.click(fn=summarize, inputs=in_sum, outputs=out_sum)
237
+
238
  with gr.Tab("Explainable AI"):
239
  df=pd.read_csv("./xgb/re.csv")
240
 
requirements.txt CHANGED
@@ -5,3 +5,4 @@ pandas==2.1.3
5
  #yfinance==0.2.31
6
  #scikit-learn
7
  plotly
 
 
5
  #yfinance==0.2.31
6
  #scikit-learn
7
  plotly
8
+ transformers
xgb/__init__.py ADDED
File without changes