import os
from llama_api_client import LlamaAPIClient
# Load environment variables
helicone_api_key = os.getenv("HELICONE_API_KEY")
llama_api_key = os.getenv("LLAMA_API_KEY")
client = LlamaAPIClient(
api_key=llama_api_key,
base_url="https://llama.helicone.ai/v1",
default_headers={
"Helicone-Auth": f"Bearer {helicone_api_key}"
}
)
completion = client.chat.completions.create(
model="Llama-4-Maverick-17B-128E-Instruct-FP8",
messages=[
{
"role": "user",
"content": "What is the moon made of?",
}
],
)
print(completion.completion_message.content.text)