You can use the following code to fetch more than 5000 records from Datadog API.
import axios from 'axios';
import * as fs from 'fs';
interface RUMEvent {
// Define the properties of a RUM event here
}
interface RUMResponse {
data: RUMEvent[];
links: {
next: string;
};
meta: {
page: {
after: string;
};
};
}
const getRUMData = async (
apiKey: string,
rumApplicationID: string,
startTime: number,
endTime: number
): Promise<RUMEvent[]> => {
let cursor: string | undefined = undefined;
const query = {
filter: {
query:
'@application.id:0070aef4-9021-4b1a-8af8-85682b7d3f68 @action.name:element_timing @action.type:custom @type:action',
from: startTime,
to: endTime,
},
};
let rumData: RUMEvent[] = [];
let hasMoreData = true;
while (hasMoreData) {
const response = await axios.post(
'https://api.datadoghq.com/api/v2/rum/events/search',
{
...query,
page: {
limit: 5000,
cursor,
},
},
{
headers: {
'DD-API-KEY': apiKey,
'DD-APPLICATION-KEY': rumApplicationID,
},
}
);
const responseData: RUMResponse = response.data;
if (responseData.data.length > 0) {
rumData = rumData.concat(responseData.data);
cursor = responseData.meta.page.after;
console.log(`current page response count : ${responseData.data.length}`);
console.log(`current collected : ${responseData.data.length}`);
} else {
hasMoreData = false;
}
}
return rumData;
};
const writeRUMDataToFile = async (
data: RUMEvent[],
fileName: string
): Promise<void> => {
return new Promise((resolve, reject) => {
const stream = fs.createWriteStream(fileName);
stream.once('open', () => {
data.forEach((event) => {
stream.write(`${JSON.stringify(event)}\n`);
});
stream.end();
resolve();
});
});
};
// Example usage
const apiKey = '<YOUR_API_KEY>';
const rumApplicationID = '<YOUR_APPLICATION_KEY>';
const startTime = 1676949491156;
const endTime = 1676950331699;
getRUMData(apiKey, rumApplicationID, startTime, endTime)
.then((data) => writeRUMDataToFile(data, 'rum_data.json'))
.then(() => console.log('Data written to file'))
.catch((err) => console.error(err));
Top comments (0)