Pagination
Navigate large datasets efficiently with cursor-based and page-based pagination patterns and best practices
Pagination
All list endpoints in the GetPaidHQ API use cursor-based pagination to efficiently handle large datasets. This approach provides consistent performance regardless of dataset size and handles real-time data changes gracefully.
List response format
List endpoints return a standardized pagination structure:
{
"items": [
{
"id": "cus_1234567890",
"email": "customer1@example.com",
"name": "John Doe",
"created_at": "2024-01-15T10:30:00Z"
},
{
"id": "cus_1234567891",
"email": "customer2@example.com",
"name": "Jane Smith",
"created_at": "2024-01-15T10:25:00Z"
}
],
"total_count": 1247,
"page": 1,
"page_size": 25,
"has_more": true,
"next_cursor": "eyJpZCI6ImN1c18xMjM0NTY3ODkxIiwiY3JlYXRlZF9hdCI6IjIwMjQtMDEtMTVUMTA6MjU6MDBaIn0="
}
Response fields
items
: Array of resources for the current pagetotal_count
: Total number of resources matching the querypage
: Current page number (1-indexed)page_size
: Number of items per page (actual count may be less on last page)has_more
: Boolean indicating if more pages are availablenext_cursor
: Opaque cursor token for fetching the next page
Basic pagination
Page-based pagination
Request specific pages using the page
parameter:
curl "https://api.getpaidhq.com/api/customers?page=1&limit=25" \
-H "Authorization: Bearer pk_live_..."
curl "https://api.getpaidhq.com/api/customers?page=2&limit=25" \
-H "Authorization: Bearer pk_live_..."
Cursor-based pagination
For better performance and consistency, use cursor-based pagination:
# First page
curl "https://api.getpaidhq.com/api/customers?limit=25" \
-H "Authorization: Bearer pk_live_..."
# Next page using cursor from previous response
curl "https://api.getpaidhq.com/api/customers?limit=25&cursor=eyJpZCI6ImN1c18xMjM0NTY3ODkxIn0=" \
-H "Authorization: Bearer pk_live_..."
Pagination parameters
Standard parameters
All list endpoints support these pagination parameters:
Parameter | Type | Default | Description |
---|---|---|---|
page | integer | 1 | Page number (1-indexed) |
limit | integer | 25 | Items per page (max 100) |
cursor | string | - | Cursor for next page |
Parameter validation
page
: Must be >= 1limit
: Must be between 1 and 100cursor
: Must be a valid cursor token
# ❌ Invalid: limit too high
curl "https://api.getpaidhq.com/api/customers?limit=500"
# Returns 400 Bad Request
# ✅ Valid: within limits
curl "https://api.getpaidhq.com/api/customers?limit=100"
Advanced pagination patterns
Iterating through all pages
async function getAllCustomers() {
const allCustomers = [];
let page = 1;
let hasMore = true;
while (hasMore) {
const response = await getpaidhq.customers.list({
page: page,
limit: 100
});
allCustomers.push(...response.items);
hasMore = response.has_more;
page++;
}
return allCustomers;
}
Using cursors for large datasets
async function getAllCustomersCursor() {
const allCustomers = [];
let cursor = null;
while (true) {
const params = { limit: 100 };
if (cursor) params.cursor = cursor;
const response = await getpaidhq.customers.list(params);
allCustomers.push(...response.items);
if (!response.has_more) break;
cursor = response.next_cursor;
}
return allCustomers;
}
Processing pages in parallel
For read-only operations, you can process multiple pages concurrently:
async function processCustomersInParallel() {
// Get first page to determine total pages
const firstPage = await getpaidhq.customers.list({ limit: 100 });
const totalPages = Math.ceil(firstPage.total_count / 100);
// Create promises for remaining pages
const pagePromises = [];
for (let page = 2; page <= totalPages; page++) {
pagePromises.push(
getpaidhq.customers.list({ page, limit: 100 })
);
}
// Process all pages
const allPages = await Promise.all(pagePromises);
const allCustomers = [
...firstPage.items,
...allPages.flatMap(page => page.items)
];
return allCustomers;
}
Filtering with pagination
Combine filters with pagination for targeted data retrieval:
# Get active customers created in the last month
curl "https://api.getpaidhq.com/api/customers?status=active&created_after=2024-01-01&limit=50" \
-H "Authorization: Bearer pk_live_..."
// Find all unpaid invoices for a specific customer
async function getUnpaidInvoices(customerId) {
const allInvoices = [];
let page = 1;
let hasMore = true;
while (hasMore) {
const response = await getpaidhq.invoices.list({
customer_id: customerId,
status: 'unpaid',
page: page,
limit: 100
});
allInvoices.push(...response.items);
hasMore = response.has_more;
page++;
}
return allInvoices;
}
Ordering and pagination
Control the order of results with ordering parameters:
# Get customers ordered by creation date (newest first)
curl "https://api.getpaidhq.com/api/customers?order_by=created_at&order_dir=desc&limit=25" \
-H "Authorization: Bearer pk_live_..."
# Get subscriptions ordered by amount (highest first)
curl "https://api.getpaidhq.com/api/subscriptions?order_by=amount&order_dir=desc&limit=25" \
-H "Authorization: Bearer pk_live_..."
Common ordering options:
created_at
: Order by creation dateupdated_at
: Order by last modificationamount
: Order by monetary amount (where applicable)name
: Order alphabetically
Order directions:
asc
: Ascending (default)desc
: Descending
Performance considerations
Choose appropriate page sizes
- Small pages (10-25): Better for interactive UIs with quick loading
- Medium pages (50-100): Good balance for most applications
- Large pages (100): More efficient for bulk processing
Use cursors for large datasets
Cursor-based pagination is more efficient for large datasets:
// ✅ Efficient for large datasets
async function processLargeDataset() {
let cursor = null;
while (true) {
const params = { limit: 100 };
if (cursor) params.cursor = cursor;
const response = await getpaidhq.customers.list(params);
// Process batch
await processBatch(response.items);
if (!response.has_more) break;
cursor = response.next_cursor;
}
}
// ❌ Inefficient for large datasets
async function processLargeDatasetSlow() {
let page = 1;
while (true) {
const response = await getpaidhq.customers.list({
page: page,
limit: 100
});
await processBatch(response.items);
if (!response.has_more) break;
page++; // Slower as page number increases
}
}
Avoid deep pagination
For very large datasets, avoid requesting pages far from the beginning:
// ❌ Slow: Deep pagination
const page1000 = await getpaidhq.customers.list({ page: 1000, limit: 100 });
// ✅ Fast: Use cursors or filters
const recentCustomers = await getpaidhq.customers.list({
created_after: '2024-01-01',
limit: 100
});
SDK helpers
Official SDKs provide pagination helpers:
Node.js auto-pagination
// Automatically iterate through all pages
for await (const customer of getpaidhq.customers.listAutoPaging({ limit: 100 })) {
console.log('Processing customer:', customer.id);
}
// Collect all items into an array
const allCustomers = await getpaidhq.customers.listAll({
status: 'active',
limit: 100
});
Python pagination
# Iterate through all pages
for customer in client.customers.list_auto_paging(limit=100):
print(f"Processing customer: {customer.id}")
# Get all items as a list
all_customers = list(client.customers.list_all(status='active', limit=100))
Error handling
Handle pagination errors gracefully:
async function robustPagination() {
let page = 1;
const maxRetries = 3;
while (true) {
let retries = 0;
while (retries < maxRetries) {
try {
const response = await getpaidhq.customers.list({
page: page,
limit: 100
});
// Process successful response
await processBatch(response.items);
if (!response.has_more) return;
page++;
break;
} catch (error) {
retries++;
if (error.status === 429) {
// Rate limited - wait and retry
await new Promise(resolve => setTimeout(resolve, 60000));
} else if (error.status >= 500) {
// Server error - exponential backoff
const delay = Math.pow(2, retries) * 1000;
await new Promise(resolve => setTimeout(resolve, delay));
} else {
// Other errors - don't retry
throw error;
}
}
}
if (retries >= maxRetries) {
throw new Error('Max retries exceeded for pagination');
}
}
}
Best practices
- Use appropriate page sizes based on your use case
- Implement error handling for network issues and rate limits
- Consider cursor-based pagination for large datasets
- Cache results when possible to reduce API calls
- Use filters to reduce the total dataset size
- Monitor performance and adjust page sizes as needed
- Implement progress indicators for long-running operations
Common patterns
Real-time data synchronization
async function syncCustomers(lastSyncTime) {
const newCustomers = [];
let page = 1;
while (true) {
const response = await getpaidhq.customers.list({
updated_after: lastSyncTime,
order_by: 'updated_at',
order_dir: 'asc',
page: page,
limit: 100
});
newCustomers.push(...response.items);
if (!response.has_more) break;
page++;
}
return newCustomers;
}
Batch processing with progress tracking
async function batchProcessWithProgress(processingFunction) {
const firstPage = await getpaidhq.customers.list({ limit: 100 });
const totalItems = firstPage.total_count;
let processedItems = 0;
let page = 1;
while (true) {
const response = await getpaidhq.customers.list({
page: page,
limit: 100
});
await processingFunction(response.items);
processedItems += response.items.length;
console.log(`Progress: ${processedItems}/${totalItems} (${Math.round(processedItems/totalItems*100)}%)`);
if (!response.has_more) break;
page++;
}
}