Quick Start¶
This guide will help you get up and running with the Honeycomb API Python client in minutes.
Your First Request¶
import asyncio
from honeycomb import HoneycombClient
async def main():
async with HoneycombClient(api_key="your-api-key") as client:
# List all datasets
datasets = await client.datasets.list_async()
for dataset in datasets:
print(f"Dataset: {dataset.name} ({dataset.slug})")
print(f" Columns: {dataset.regular_columns_count}")
print(f" Last written: {dataset.last_written_at}")
asyncio.run(main())
For scripts and CLI tools, you can use the synchronous mode:
from honeycomb import HoneycombClient
def main():
with HoneycombClient(api_key="your-api-key", sync=True) as client:
datasets = client.datasets.list()
for dataset in datasets:
print(f"Dataset: {dataset.name}")
print(f" Columns: {dataset.regular_columns_count}")
print(f" Last written: {dataset.last_written_at}")
main()
Common Operations¶
Working with Triggers¶
from honeycomb import (
HoneycombClient,
TriggerCreate,
TriggerThreshold,
TriggerThresholdOp,
QueryBuilder,
)
async with HoneycombClient(api_key="...") as client:
# List existing triggers
triggers = await client.triggers.list_async("my-dataset")
print(f"Found {len(triggers)} triggers")
# Create a new trigger using the fluent QueryBuilder
trigger = await client.triggers.create_async(
"my-dataset",
TriggerCreate(
name="High Error Rate",
description="Alert when error rate exceeds 5%",
threshold=TriggerThreshold(
op=TriggerThresholdOp.GREATER_THAN,
value=0.05,
),
frequency=300, # Check every 5 minutes
query=QueryBuilder()
.last_30_minutes()
.avg("error_rate")
.build_for_trigger(),
)
)
print(f"Created trigger: {trigger.id}")
# Update the trigger
updated = await client.triggers.update_async(
"my-dataset",
trigger.id,
TriggerCreate(
name="High Error Rate (Updated)",
threshold=TriggerThreshold(
op=TriggerThresholdOp.GREATER_THAN_OR_EQUAL,
value=0.10,
),
frequency=300,
query=QueryBuilder()
.last_30_minutes()
.avg("error_rate")
.build_for_trigger(),
)
)
# Delete the trigger
await client.triggers.delete_async("my-dataset", trigger.id)
from honeycomb import (
HoneycombClient,
TriggerCreate,
TriggerThreshold,
TriggerThresholdOp,
QueryBuilder,
)
with HoneycombClient(api_key="...", sync=True) as client:
# List existing triggers
triggers = client.triggers.list("my-dataset")
print(f"Found {len(triggers)} triggers")
# Create a new trigger using the fluent QueryBuilder
trigger = client.triggers.create(
"my-dataset",
TriggerCreate(
name="High Error Rate",
description="Alert when error rate exceeds 5%",
threshold=TriggerThreshold(
op=TriggerThresholdOp.GREATER_THAN,
value=0.05,
),
frequency=300, # Check every 5 minutes
query=QueryBuilder()
.last_30_minutes()
.avg("error_rate")
.build_for_trigger(),
)
)
print(f"Created trigger: {trigger.id}")
# Update the trigger
updated = client.triggers.update(
"my-dataset",
trigger.id,
TriggerCreate(
name="High Error Rate (Updated)",
threshold=TriggerThreshold(
op=TriggerThresholdOp.GREATER_THAN_OR_EQUAL,
value=0.10,
),
frequency=300,
query=QueryBuilder()
.last_30_minutes()
.avg("error_rate")
.build_for_trigger(),
)
)
# Delete the trigger
client.triggers.delete("my-dataset", trigger.id)
Running Queries¶
from honeycomb import HoneycombClient, QueryBuilder
async with HoneycombClient(api_key="...") as client:
# Create a saved query and run it using the fluent QueryBuilder
query, result = await client.query_results.create_and_run_async(
QueryBuilder()
.dataset("my-dataset")
.last_1_hour()
.p99("duration_ms")
.group_by("endpoint"),
poll_interval=1.0,
timeout=60.0,
)
print(f"Saved as query: {query.id}")
# Process results
for row in result.data.rows:
print(f"Endpoint: {row.get('endpoint')}, P99: {row.get('P99')}")
from honeycomb import HoneycombClient, QueryBuilder
with HoneycombClient(api_key="...", sync=True) as client:
# Create a saved query and run it using the fluent QueryBuilder
query, result = client.query_results.create_and_run(
QueryBuilder()
.dataset("my-dataset")
.last_1_hour()
.p99("duration_ms")
.group_by("endpoint"),
poll_interval=1.0,
timeout=60.0,
)
print(f"Saved as query: {query.id}")
# Process results
for row in result.data.rows:
print(f"Endpoint: {row.get('endpoint')}, P99: {row.get('P99')}")
Query Execution Options
See the Queries Guide for two ways to run queries:
- Saved (
create+run) - Save for reuse, then run - Both (
create_and_run) - Save AND execute in one call (recommended)
Creating SLOs¶
from honeycomb import HoneycombClient, SLOCreate, SLI
async with HoneycombClient(api_key="...") as client:
slo = await client.slos.create_async(
"my-dataset",
SLOCreate(
name="API Availability",
description="99.9% uptime target",
sli=SLI(alias="api-availability"),
time_period_days=30,
target_per_million=999000, # 99.9%
)
)
print(f"Created SLO: {slo.id}")
from honeycomb import HoneycombClient, SLOCreate, SLI
with HoneycombClient(api_key="...", sync=True) as client:
slo = client.slos.create(
"my-dataset",
SLOCreate(
name="API Availability",
description="99.9% uptime target",
sli=SLI(alias="api-availability"),
time_period_days=30,
target_per_million=999000, # 99.9%
)
)
print(f"Created SLO: {slo.id}")
Error Handling¶
The client provides specific exception types for different error scenarios:
from honeycomb import (
HoneycombClient,
HoneycombNotFoundError,
HoneycombRateLimitError,
HoneycombAuthError,
)
async with HoneycombClient(api_key="...") as client:
try:
trigger = await client.triggers.get_async("dataset", "invalid-id")
except HoneycombNotFoundError as e:
print(f"Not found: {e.message}")
print(f"Request ID: {e.request_id}")
except HoneycombRateLimitError as e:
print(f"Rate limited. Retry after {e.retry_after} seconds")
except HoneycombAuthError:
print("Invalid API key")
See the Error Handling guide for more details.
CLI Quick Start¶
For quick operations without writing Python, use the CLI:
# Run without installing (using uvx or pipx)
export HONEYCOMB_API_KEY=your_api_key_here
uvx honeycomb-api triggers list
# or
pipx run honeycomb-api triggers list
# Or install and use the short alias
uv tool install honeycomb-api
# or
pipx install honeycomb-api
hny triggers list
hny query run --dataset my-dataset --count --last-30-minutes
See the CLI Reference for full documentation.
Next Steps¶
- Dive deeper: Check out the Usage Guides for detailed examples
- Learn about auth: See Authentication for API key and management key setup
- Advanced features: Explore Retry Configuration and Async vs Sync
- API Reference: Browse the complete API Reference