amir22010 commited on
Commit
3d14d30
·
1 Parent(s): 99872c6

added langchain open ai

Browse files
Files changed (2) hide show
  1. app.py +9 -5
  2. requirements.txt +2 -1
app.py CHANGED
@@ -7,6 +7,9 @@ import wave
7
  import uuid
8
  from nemoguardrails import LLMRails, RailsConfig
9
  from GoogleTTS import GoogleTTS
 
 
 
10
 
11
  #tts
12
  #import torchaudio
@@ -51,9 +54,9 @@ def combine_audio_files(audio_files):
51
  return outfile
52
 
53
  #client
54
- client = Groq(
55
- api_key=os.getenv("GROQ_API_KEY"),
56
- )
57
 
58
  llm = Llama.from_pretrained(
59
  repo_id="amir22010/fine_tuned_product_marketing_email_gemma_2_9b_q4_k_m", #custom fine tuned model
@@ -76,7 +79,7 @@ marketing_email_prompt = """Below is a product and description, please write a m
76
  ### Marketing Email:
77
  {}"""
78
 
79
- def greet(product,description):
80
  user_reques = marketing_email_prompt.format(
81
  product, # product
82
  description, # description
@@ -88,9 +91,10 @@ def greet(product,description):
88
  #nemo guard
89
  config = RailsConfig.from_path("guard")
90
  #config = RailsConfig.from_content(yaml_content=YAML_CONFIG)
 
91
  app = LLMRails(config=config, llm=client)
92
  options = {"output_vars": ["triggered_input_rail", "triggered_output_rail"]}
93
- output = app.generate(messages=messages, options=options)
94
  print(output)
95
  warning_message = output.output_data["triggered_input_rail"] or output.output_data["triggered_output_rail"]
96
  if warning_message:
 
7
  import uuid
8
  from nemoguardrails import LLMRails, RailsConfig
9
  from GoogleTTS import GoogleTTS
10
+ from langchain_openai import ChatOpenAI
11
+
12
+ os.environ["TOKENIZERS_PARALLELISM"] = "false"
13
 
14
  #tts
15
  #import torchaudio
 
54
  return outfile
55
 
56
  #client
57
+ # client = Groq(
58
+ # api_key=os.getenv("GROQ_API_KEY"),
59
+ # )
60
 
61
  llm = Llama.from_pretrained(
62
  repo_id="amir22010/fine_tuned_product_marketing_email_gemma_2_9b_q4_k_m", #custom fine tuned model
 
79
  ### Marketing Email:
80
  {}"""
81
 
82
+ async def greet(product,description):
83
  user_reques = marketing_email_prompt.format(
84
  product, # product
85
  description, # description
 
91
  #nemo guard
92
  config = RailsConfig.from_path("guard")
93
  #config = RailsConfig.from_content(yaml_content=YAML_CONFIG)
94
+ client = ChatOpenAI(openai_api_key=os.getenv("GROQ_API_KEY"), model_name="llama-3.2-11b-text-preview")
95
  app = LLMRails(config=config, llm=client)
96
  options = {"output_vars": ["triggered_input_rail", "triggered_output_rail"]}
97
+ output = await app.generate_async(messages=messages, options=options)
98
  print(output)
99
  warning_message = output.output_data["triggered_input_rail"] or output.output_data["triggered_output_rail"]
100
  if warning_message:
requirements.txt CHANGED
@@ -4,4 +4,5 @@ llama-cpp-python
4
  groq
5
  transformers
6
  nemoguardrails==0.9.1.1
7
- google-tts
 
 
4
  groq
5
  transformers
6
  nemoguardrails==0.9.1.1
7
+ google-tts
8
+ langchain-openai