File size: 6,652 Bytes
ead2510 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 |
import os
import json
import time
from typing import Dict, List, Any
from rich import print
from rich.console import Console
from rich.table import Table
from rich.panel import Panel
from deepinfra_client import DeepInfraClient
console = Console()
def print_proxy_status(client):
"""Print the proxy and IP rotation status"""
status = []
if client.use_proxy_rotation and client.proxy_finder:
proxy_counts = {k: len(v) for k, v in client.proxy_finder.proxy_dict.items()}
total_proxies = sum(proxy_counts.values())
status.append(f"Proxy rotation: [green]Enabled[/green] ({total_proxies} proxies)")
# Show available proxies per type
table = Table(title="Available Proxies")
table.add_column("Type", style="cyan")
table.add_column("Count", style="green")
for proxy_type, count in proxy_counts.items():
if count > 0:
table.add_row(proxy_type, str(count))
console.print(table)
else:
status.append("Proxy rotation: [red]Disabled[/red]")
if client.use_ip_rotation and client.ip_rotator:
status.append(f"IP rotation: [green]Enabled[/green] (AWS API Gateway - {len(client.ip_rotator.gateways)} regions)")
else:
status.append("IP rotation: [red]Disabled[/red]")
if client.use_random_user_agent:
status.append("User-Agent rotation: [green]Enabled[/green]")
else:
status.append("User-Agent rotation: [red]Disabled[/red]")
console.print(Panel("\n".join(status), title="Client Configuration", border_style="blue"))
def chat_with_model():
"""Demonstrate interactive chat with DeepInfra models"""
# Initialize the client with all rotation features enabled
client = DeepInfraClient(
api_key=os.getenv("DEEPINFRA_API_KEY"), # Set this environment variable if you have an API key
use_random_user_agent=True,
use_ip_rotation=True,
use_proxy_rotation=True,
proxy_types=['http', 'socks5'],
model="meta-llama/Llama-3.3-70B-Instruct-Turbo" # Use a good default model
)
print_proxy_status(client)
# Show available models
console.print("\n[bold cyan]Fetching available models...[/bold cyan]")
try:
models_response = client.models.list()
model_table = Table(title="Available Models")
model_table.add_column("Model", style="green")
for model in models_response["data"]:
model_table.add_row(model["id"])
console.print(model_table)
except Exception as e:
console.print(f"[red]Error fetching models: {str(e)}[/red]")
# Start interactive chat
console.print("\n[bold green]Starting interactive chat (type 'quit' to exit)[/bold green]")
console.print("[yellow]Note: Every 3 messages, the client will rotate IP and proxy[/yellow]\n")
messages = [{"role": "system", "content": "You are a helpful assistant."}]
message_count = 0
while True:
user_input = input("\nYou: ")
if user_input.lower() in ["quit", "exit", "bye"]:
break
messages.append({"role": "user", "content": user_input})
# Rotate IP and proxy every 3 messages
message_count += 1
if message_count % 3 == 0:
console.print("[yellow]Rotating IP and proxy...[/yellow]")
client.refresh_proxies()
client.refresh_session()
# Make the API call
console.print("\n[cyan]Waiting for response...[/cyan]")
start_time = time.time()
try:
response = client.chat.create(
messages=messages,
temperature=0.7,
max_tokens=1024
)
elapsed = time.time() - start_time
assistant_message = response["choices"][0]["message"]["content"]
# Add the assistant's message to our history
messages.append({"role": "assistant", "content": assistant_message})
console.print(f"\n[bold green]Assistant[/bold green] [dim]({elapsed:.2f}s)[/dim]:")
console.print(assistant_message)
except Exception as e:
console.print(f"[bold red]Error: {str(e)}[/bold red]")
console.print("[yellow]Refreshing session and trying again...[/yellow]")
client.refresh_session()
def stream_example():
"""Demonstrate streaming responses"""
client = DeepInfraClient(
use_random_user_agent=True,
use_ip_rotation=True,
use_proxy_rotation=True
)
print_proxy_status(client)
prompt = "Write a short story about a robot that learns to feel emotions."
console.print(f"\n[bold cyan]Prompt:[/bold cyan] {prompt}")
console.print("\n[bold green]Streaming response:[/bold green]")
try:
response_stream = client.completions.create(
prompt=prompt,
temperature=0.8,
max_tokens=1024,
stream=True
)
full_response = ""
for chunk in response_stream:
if 'choices' in chunk and len(chunk['choices']) > 0:
delta = chunk['choices'][0].get('delta', {})
if 'content' in delta:
content = delta['content']
print(content, end='', flush=True)
full_response += content
print("\n")
except Exception as e:
console.print(f"\n[bold red]Error: {str(e)}[/bold red]")
if __name__ == "__main__":
console.print(Panel.fit(
"[bold green]DeepInfra Client Example[/bold green]\n"
"This example demonstrates the enhanced client with proxy and IP rotation",
border_style="yellow"
))
while True:
console.print("\n[bold cyan]Choose an option:[/bold cyan]")
console.print("1. Interactive Chat")
console.print("2. Streaming Example")
console.print("3. Exit")
choice = input("\nEnter your choice (1-3): ")
if choice == "1":
chat_with_model()
elif choice == "2":
stream_example()
elif choice == "3":
console.print("[yellow]Exiting...[/yellow]")
break
else:
console.print("[red]Invalid choice. Please try again.[/red]")
|