Spaces:
Sleeping
Sleeping
File size: 1,392 Bytes
fc8c1ae cf2d436 fc8c1ae cf2d436 fc8c1ae cf2d436 fc8c1ae cf2d436 fc8c1ae cf2d436 fc8c1ae cf2d436 fc8c1ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import openai
import os
# Set up your OpenAI API key
openai.api_key = os.getenv("22ec84421ec24230a3638d1b51e3a7dc")
def generate_use_cases(company_info):
use_cases = []
references = ["McKinsey AI Report", "Deloitte Industry Insights"]
# Generate suggestions based on company focus areas
if "customer experience" in company_info["focus_areas"]:
suggestion = get_openai_response("Suggest a GenAI-driven chatbot for enhanced customer support.")
use_cases.append(suggestion)
if "operations" in company_info["focus_areas"]:
suggestion = get_openai_response("Propose using predictive maintenance models to streamline operations.")
use_cases.append(suggestion)
if "supply chain" in company_info["focus_areas"]:
suggestion = get_openai_response("Describe how real-time analytics could improve supply chain transparency.")
use_cases.append(suggestion)
return {"use_cases": use_cases, "references": references}
def get_openai_response(prompt, model="gpt-4"):
try:
response = openai.ChatCompletion.create(
model=model,
messages=[{"role": "user", "content": prompt}]
)
return response['choices'][0]['message']['content']
except Exception as e:
print(f"Error generating response from OpenAI: {e}")
return "Could not generate use case."
|