Integration Guide
Python Integration
Use ParrotRouter with Python using the official OpenAI SDK
Installation
Install via pipbash
pip install openai
Or with Poetrybash
poetry add openai
Or with Condabash
conda install -c conda-forge openai
ParrotRouter is fully compatible with the official OpenAI Python SDK v1.0+. No special SDK required!
Quick Start
Basic Setuppython
from openai import OpenAI
# Initialize the client
client = OpenAI(
base_url="https://api.parrotrouter.com/v1",
api_key="your-api-key-here" # Get from dashboard
)
# Make a request
response = client.chat.completions.create(
model="gpt-4",
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Hello! How are you?"}
]
)
print(response.choices[0].message.content)
Common Use Cases
Streaming Responses
# Stream responses for better UX
stream = client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Write a haiku about Python"}],
stream=True
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
print(chunk.choices[0].delta.content, end="")
Using Different Models
# GPT-4 for complex tasks
response = client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "Explain quantum computing"}]
)
# Claude for analysis
response = client.chat.completions.create(
model="claude-3-opus-20240229",
messages=[{"role": "user", "content": "Analyze this business plan..."}]
)
# Llama for open source
response = client.chat.completions.create(
model="llama-3-70b-instruct",
messages=[{"role": "user", "content": "Generate code for a REST API"}]
)
Function Calling
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get the current weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}
},
"required": ["location"]
}
}
}
]
response = client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": "What's the weather in NYC?"}],
tools=tools,
tool_choice="auto"
)
# Check if the model wants to call a function
if response.choices[0].message.tool_calls:
print(response.choices[0].message.tool_calls[0].function)
Structured Outputs
response = client.chat.completions.create(
model="gpt-4-turbo-preview",
messages=[
{"role": "user", "content": "List 3 Python web frameworks"}
],
response_format={
"type": "json_schema",
"json_schema": {
"name": "frameworks",
"schema": {
"type": "object",
"properties": {
"frameworks": {
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {"type": "string"},
"description": {"type": "string"},
"popularity": {"type": "string"}
}
}
}
},
"required": ["frameworks"]
}
}
}
)
import json
data = json.loads(response.choices[0].message.content)
for framework in data["frameworks"]:
print(f"{framework['name']}: {framework['description']}")
Advanced Features
Async Support
import asyncio
from openai import AsyncOpenAI
# Async client
async_client = AsyncOpenAI(
base_url="https://api.parrotrouter.com/v1",
api_key="your-api-key"
)
async def get_response(prompt):
response = await async_client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": prompt}]
)
return response.choices[0].message.content
# Run async
async def main():
tasks = [
get_response("What is Python?"),
get_response("What is JavaScript?"),
get_response("What is Rust?")
]
results = await asyncio.gather(*tasks)
for result in results:
print(result)
asyncio.run(main())
Error Handling
from openai import OpenAI, RateLimitError, APIError
import time
client = OpenAI(
base_url="https://api.parrotrouter.com/v1",
api_key="your-api-key"
)
def make_request_with_retry(messages, max_retries=3):
for attempt in range(max_retries):
try:
response = client.chat.completions.create(
model="gpt-4",
messages=messages
)
return response
except RateLimitError as e:
if attempt < max_retries - 1:
wait_time = 2 ** attempt # Exponential backoff
print(f"Rate limit hit. Waiting {wait_time}s...")
time.sleep(wait_time)
else:
raise
except APIError as e:
print(f"API error: {e}")
raise
except Exception as e:
print(f"Unexpected error: {e}")
raise
# Use with retry logic
response = make_request_with_retry([
{"role": "user", "content": "Hello!"}
])
Token Counting
import tiktoken
def count_tokens(messages, model="gpt-4"):
"""Count tokens before making a request"""
encoding = tiktoken.encoding_for_model(model)
tokens = 0
for message in messages:
tokens += 4 # Every message follows {role/name}
{content}
for key, value in message.items():
tokens += len(encoding.encode(value))
tokens += 2 # Every reply is primed with assistant
return tokens
# Check before sending
messages = [
{"role": "user", "content": "Write a long story..."}
]
token_count = count_tokens(messages)
print(f"This request will use ~{token_count} tokens")
if token_count > 4000:
print("Warning: This might be expensive!")
Best Practices
Environment Variables
Never hardcode API keys in your code
import os
from openai import OpenAI
client = OpenAI(
base_url="https://api.parrotrouter.com/v1",
api_key=os.getenv("PARROTROUTER_API_KEY")
)
Connection Pooling
Reuse client instances for better performance
# Create once, use many times
client = OpenAI(
base_url="https://api.parrotrouter.com/v1",
api_key=os.getenv("PARROTROUTER_API_KEY"),
max_retries=3,
timeout=30.0
)
# Use throughout your application
def get_ai_response(prompt):
return client.chat.completions.create(
model="gpt-4",
messages=[{"role": "user", "content": prompt}]
)
Full Example Application
chatbot.pypython
#!/usr/bin/env python3
"""
Simple chatbot using ParrotRouter
"""
import os
import sys
from openai import OpenAI
from typing import List, Dict
class Chatbot:
def __init__(self, model: str = "gpt-4"):
self.client = OpenAI(
base_url="https://api.parrotrouter.com/v1",
api_key=os.getenv("PARROTROUTER_API_KEY")
)
self.model = model
self.messages: List[Dict[str, str]] = [
{"role": "system", "content": "You are a helpful assistant."}
]
def add_message(self, role: str, content: str):
self.messages.append({"role": role, "content": content})
def get_response(self, user_input: str) -> str:
self.add_message("user", user_input)
try:
response = self.client.chat.completions.create(
model=self.model,
messages=self.messages,
temperature=0.7,
max_tokens=500
)
assistant_message = response.choices[0].message.content
self.add_message("assistant", assistant_message)
return assistant_message
except Exception as e:
return f"Error: {str(e)}"
def reset(self):
self.messages = self.messages[:1] # Keep system message
def main():
if not os.getenv("PARROTROUTER_API_KEY"):
print("Please set PARROTROUTER_API_KEY environment variable")
sys.exit(1)
bot = Chatbot()
print("Chatbot ready! Type 'quit' to exit, 'reset' to clear history.\n")
while True:
user_input = input("You: ").strip()
if user_input.lower() == 'quit':
break
elif user_input.lower() == 'reset':
bot.reset()
print("Chat history cleared.\n")
continue
print("Bot: ", end="", flush=True)
response = bot.get_response(user_input)
print(response + "\n")
if __name__ == "__main__":
main()