Use this file to discover all available pages before exploring further.
Edgee is an Agent Gateway, the infrastructure layer between your coding agent (Claude Code, Codex, OpenCode, Cursor) and the LLM provider APIs (Anthropic, OpenAI, GLM, others). It applies three things to every request: compression of context, intelligent routing across providers, and observability of token consumption.
import Edgee from 'edgee';const edgee = new Edgee("your-api-key");const response = await edgee.send({ model: 'gpt-5.2', input: 'What is the capital of France?',});console.log(response.text);if (response.compression) { console.log(`Tokens saved: ${response.compression.saved_tokens}`);}
from edgee import Edgeeedgee = Edgee("your-api-key")response = edgee.send( model="gpt-5.2", input="What is the capital of France?")print(response.text)if response.compression: print(f"Tokens saved: {response.compression.saved_tokens}")
package mainimport ( "fmt" "log" "github.com/edgee-ai/go-sdk/edgee")func main() { client, _ := edgee.NewClient("your-api-key") response, err := client.Send("gpt-5.2", "What is the capital of France?") if err != nil { log.Fatal(err) } fmt.Println(response.Text()) if response.Compression != nil { fmt.Printf("Tokens saved: %d\n", response.Compression.SavedTokens) }}
use edgee::Edgee;let client = Edgee::with_api_key("your-api-key");let response = client.send("gpt-5.2", "What is the capital of France?").await.unwrap();println!("{}", response.text().unwrap_or(""));if let Some(compression) = &response.compression { println!("Tokens saved: {}", compression.saved_tokens);}
import OpenAI from "openai";const openai = new OpenAI({ baseURL: "https://api.edgee.ai/v1", apiKey: process.env.EDGEE_API_KEY,});const completion = await openai.chat.completions.create({ model: "gpt-5.2", messages: [ { role: "user", content: "What is the capital of France?" } ],});console.log(completion.choices[0].message.content);
import Anthropic from '@anthropic-ai/sdk';const client = new Anthropic({ baseURL: 'https://api.edgee.ai', apiKey: process.env.EDGEE_API_KEY,});const message = await client.messages.create({ model: 'claude-sonnet-4.5', max_tokens: 1024, messages: [ { role: 'user', content: 'What is the capital of France?' } ]});console.log(message.content);
from langchain_openai import ChatOpenAIfrom langchain_core.messages import HumanMessageimport osllm = ChatOpenAI( base_url="https://api.edgee.ai/v1", api_key=os.getenv("EDGEE_API_KEY"), model="gpt-5.2",)response = llm.invoke([HumanMessage(content="What is the capital of France?")])print(response.content)
curl https://api.edgee.ai/v1/chat/completions \ -H "Authorization: Bearer $EDGEE_API_KEY" \ -H "Content-Type: application/json" \ -d '{"model":"gpt-5.2","messages":[{"role":"user","content":"What is the capital of France?"}]}'
Edgee works with any OpenAI or Anthropic-compatible client by setting baseURL to https://api.edgee.ai.