Overview
List endpoints in the Mighty Networks API return paginated results to efficiently handle large datasets. Understanding pagination is essential for building robust integrations that work with all your data.
When you request a list of resources (members, posts, spaces, etc.), the API returns:
- Items - An array of resources for the current page
- Links - URLs for navigating between pages
- Metadata - Information about the pagination state (in some endpoints)
{
"items": [
{ "id": 1, "name": "First Item" },
{ "id": 2, "name": "Second Item" }
],
"links": {
"self": "https://api.mn.co/admin/v1/networks/123/members?page=1",
"next": "https://api.mn.co/admin/v1/networks/123/members?page=2"
}
}
Control pagination using query parameters:
| Parameter | Description | Default | Maximum |
page | Page number to retrieve | 1 | No limit |
per_page | Number of items per page | 25 | 100 |
Example Request
curl "https://api.mn.co/admin/v1/networks/{network_id}/members?page=2&per_page=50" \
-H "Authorization: Bearer YOUR_API_TOKEN"
Checking for More Pages
The links object in the response indicates if there are more pages:
links.next exists - There are more pages available
links.next is null/absent - You’ve reached the last page
const response = await fetch(
`https://api.mn.co/admin/v1/networks/${NETWORK_ID}/members`,
{ headers: { 'Authorization': `Bearer ${API_TOKEN}` } }
);
const data = await response.json();
if (data.links?.next) {
console.log('More pages available');
console.log('Next page URL:', data.links.next);
} else {
console.log('This is the last page');
}
Iterating Through All Pages
async function getAllMembers(networkId, apiToken) {
let allMembers = [];
let page = 1;
let hasMore = true;
while (hasMore) {
const response = await fetch(
`https://api.mn.co/admin/v1/networks/${networkId}/members?page=${page}&per_page=100`,
{ headers: { 'Authorization': `Bearer ${apiToken}` } }
);
const data = await response.json();
allMembers = allMembers.concat(data.items);
// Check if there's a next page
hasMore = data.links?.next != null;
page++;
// Optional: Add delay to respect rate limits
if (hasMore) {
await new Promise(resolve => setTimeout(resolve, 100));
}
}
return allMembers;
}
Using the Next Link
Instead of manually incrementing the page number, you can use the next link:
async function getAllPostsUsingLinks(networkId, apiToken) {
let allPosts = [];
let url = `https://api.mn.co/admin/v1/networks/${networkId}/posts?per_page=100`;
while (url) {
const response = await fetch(url, {
headers: { 'Authorization': `Bearer ${apiToken}` }
});
const data = await response.json();
allPosts = allPosts.concat(data.items);
// Use the next link from the response
url = data.links?.next || null;
// Rate limiting
if (url) {
await new Promise(resolve => setTimeout(resolve, 100));
}
}
return allPosts;
}
Advanced Patterns
Paginated Generator Function
For memory efficiency with large datasets:
async function* paginateMembers(networkId, apiToken) {
let page = 1;
let hasMore = true;
while (hasMore) {
const response = await fetch(
`https://api.mn.co/admin/v1/networks/${networkId}/members?page=${page}&per_page=100`,
{ headers: { 'Authorization': `Bearer ${apiToken}` } }
);
const data = await response.json();
// Yield each member individually
for (const member of data.items) {
yield member;
}
hasMore = data.links?.next != null;
page++;
// Rate limiting
if (hasMore) {
await new Promise(resolve => setTimeout(resolve, 100));
}
}
}
// Usage
for await (const member of paginateMembers(NETWORK_ID, API_TOKEN)) {
console.log(member.email);
// Process each member without loading all into memory
}
Fetch multiple pages concurrently (use with caution regarding rate limits):
async function getPageRange(networkId, startPage, endPage, apiToken) {
const promises = [];
for (let page = startPage; page <= endPage; page++) {
promises.push(
fetch(
`https://api.mn.co/admin/v1/networks/${networkId}/members?page=${page}&per_page=100`,
{ headers: { 'Authorization': `Bearer ${apiToken}` } }
).then(r => r.json())
);
}
const results = await Promise.all(promises);
return results.flatMap(r => r.items);
}
Parallel pagination can quickly exhaust your rate limit. Use sparingly and monitor your rate limit headers.
Show progress when fetching large datasets:
async function getAllMembersWithProgress(networkId, apiToken, onProgress) {
let allMembers = [];
let page = 1;
let hasMore = true;
let totalFetched = 0;
while (hasMore) {
const response = await fetch(
`https://api.mn.co/admin/v1/networks/${networkId}/members?page=${page}&per_page=100`,
{ headers: { 'Authorization': `Bearer ${apiToken}` } }
);
const data = await response.json();
allMembers = allMembers.concat(data.items);
totalFetched += data.items.length;
hasMore = data.links?.next != null;
// Call progress callback
if (onProgress) {
onProgress({
page,
currentPageSize: data.items.length,
totalFetched,
hasMore
});
}
page++;
if (hasMore) {
await new Promise(resolve => setTimeout(resolve, 100));
}
}
return allMembers;
}
// Usage
const members = await getAllMembersWithProgress(
NETWORK_ID,
API_TOKEN,
(progress) => {
console.log(`Fetched page ${progress.page}: ${progress.totalFetched} members so far`);
}
);
Python Examples
def get_all_members(network_id, api_token):
all_members = []
page = 1
has_more = True
while has_more:
response = requests.get(
f"https://api.mn.co/admin/v1/networks/{network_id}/members",
headers={"Authorization": f"Bearer {api_token}"},
params={"page": page, "per_page": 100}
)
data = response.json()
all_members.extend(data["items"])
has_more = "next" in data.get("links", {})
page += 1
# Rate limiting
if has_more:
time.sleep(0.1)
return all_members
Generator Pattern
def paginate_posts(network_id, api_token):
page = 1
has_more = True
while has_more:
response = requests.get(
f"https://api.mn.co/admin/v1/networks/{network_id}/posts",
headers={"Authorization": f"Bearer {api_token}"},
params={"page": page, "per_page": 100}
)
data = response.json()
for post in data["items"]:
yield post
has_more = "next" in data.get("links", {})
page += 1
if has_more:
time.sleep(0.1)
# Usage
for post in paginate_posts(NETWORK_ID, API_TOKEN):
print(post["id"])
Best Practices
- Use Maximum Page Size - Set
per_page=100 to minimize API calls
- Check for Next Page - Use
links.next instead of guessing total pages
- Implement Rate Limiting - Add delays between requests to respect rate limits
- Handle Errors - Implement retry logic for failed pagination requests
- Use Generators - For large datasets, use generator patterns to avoid memory issues
- Cache Results - Store paginated results when appropriate to reduce API calls
- Monitor Progress - Implement progress tracking for long-running pagination
- Avoid Parallel Requests - Unless necessary, paginate sequentially to stay within rate limits
Common Pitfalls
❌ Hardcoding Total Pages
// Don't do this - you don't know the total pages
for (let page = 1; page <= 10; page++) {
// Fetch page
}
✅ Check for Next Link
// Do this instead
while (hasMore) {
// Fetch page
hasMore = data.links?.next != null;
}
❌ Ignoring Rate Limits
// Don't spam requests
while (hasMore) {
await fetchPage(page);
page++;
}
✅ Add Delays
// Respect rate limits
while (hasMore) {
await fetchPage(page);
if (hasMore) {
await new Promise(r => setTimeout(r, 100));
}
page++;
}
Next Steps