Sure! Here's a basic example of how you can use the twitter
module in Node.js to create a CSV with the screen names of everyone you're following, along with the date and time of their most recent tweet:
const Twitter = require('twitter');
const fs = require('fs');
const client = new Twitter({
consumer_key: 'YOUR_CONSUMER_KEY',
consumer_secret: 'YOUR_CONSUMER_SECRET',
access_token_key: 'YOUR_ACCESS_TOKEN_KEY',
access_token_secret: 'YOUR_ACCESS_TOKEN_SECRET'
});
async function getFollowers() {
try {
const followers = await client.get('friends/list', { count: 200 });
const data = followers.users.map(user => {
return {
screen_name: user.screen_name,
latest_tweet_time: user.status ? user.status.created_at : ''
};
});
const csv = data
.map(row => `${row.screen_name},${row.latest_tweet_time}`)
.join('\n');
fs.writeFileSync('followers.csv', `screen_name,latest_tweet_time\n${csv}`);
console.log('Followers CSV created successfully!');
} catch (error) {
console.error(error);
}
}
getFollowers();
This code uses the twitter
module to authenticate with the Twitter API and retrieve a list of the users you're following. The map
function is then used to extract the relevant data from the response, which is the screen name and the date and time of their most recent tweet. Finally, the extracted data is written to a CSV file using the fs
module.
Note that you'll need to replace 'YOUR_CONSUMER_KEY'
, 'YOUR_CONSUMER_SECRET'
, 'YOUR_ACCESS_TOKEN_KEY'
, and 'YOUR_ACCESS_TOKEN_SECRET'
with your own Twitter API credentials.
Sure! Here's an updated version of the code that gets all the users you're following, page-by-page, while respecting Twitter's rate limiting:
const Twitter = require('twitter');
const fs = require('fs');
const client = new Twitter({
consumer_key: 'YOUR_CONSUMER_KEY',
consumer_secret: 'YOUR_CONSUMER_SECRET',
access_token_key: 'YOUR_ACCESS_TOKEN_KEY',
access_token_secret: 'YOUR_ACCESS_TOKEN_SECRET'
});
async function getFollowers() {
let cursor = -1;
let followers = [];
while (cursor != 0) {
const response = await client.get('friends/list', { count: 200, cursor });
followers = followers.concat(response.users);
cursor = response.next_cursor_str;
// Twitter rate limit is 15 requests per 15 minutes, so wait a second
// before making the next request to avoid hitting the rate limit
await new Promise(resolve => setTimeout(resolve, 1000));
}
const data = followers.map(user => {
return {
screen_name: user.screen_name,
latest_tweet_time: user.status ? user.status.created_at : ''
};
});
const csv = data
.map(row => `${row.screen_name},${row.latest_tweet_time}`)
.join('\n');
fs.writeFileSync('followers.csv', `screen_name,latest_tweet_time\n${csv}`);
console.log('Followers CSV created successfully!');
}
getFollowers();
In this version of the code, the get
method of the Twitter
client is called in a loop, with the cursor
parameter set to the next_cursor_str
value from the previous response. This allows you to retrieve all the users you're following, in pages of 200 users at a time.
To respect Twitter's rate limiting, the code waits for one second after each request. This way, the code will make no more than 15 requests in any given 15-minute window, and you won't run into rate limit issues.
My apologies, you're correct! Here's an updated version of the code that makes a maximum of 15 requests per 15 minutes and handles rate limit errors:
const Twitter = require('twitter');
const fs = require('fs');
const client = new Twitter({
consumer_key: 'YOUR_CONSUMER_KEY',
consumer_secret: 'YOUR_CONSUMER_SECRET',
access_token_key: 'YOUR_ACCESS_TOKEN_KEY',
access_token_secret: 'YOUR_ACCESS_TOKEN_SECRET'
});
async function getFollowers() {
let cursor = -1;
let followers = [];
while (cursor != 0) {
try {
const response = await client.get('friends/list', { count: 200, cursor });
followers = followers.concat(response.users);
cursor = response.next_cursor_str;
// Twitter rate limit is 15 requests per 15 minutes, so wait one minute
// before making the next request to avoid hitting the rate limit
await new Promise(resolve => setTimeout(resolve, 60 * 1000));
} catch (error) {
if (error[0].code === 88) {
console.log('Rate limit reached, waiting 15 minutes before retrying...');
await new Promise(resolve => setTimeout(resolve, 15 * 60 * 1000));
} else {
throw error;
}
}
}
const data = followers.map(user => {
return {
screen_name: user.screen_name,
latest_tweet_time: user.status ? user.status.created_at : ''
};
});
const csv = data
.map(row => `${row.screen_name},${row.latest_tweet_time}`)
.join('\n');
fs.writeFileSync('followers.csv', `screen_name,latest_tweet_time\n${csv}`);
console.log('Followers CSV created successfully!');
}
getFollowers();
In this version of the code, the rate limit errors are caught and handled by checking the error code. If the error code is 88, which indicates a rate limit error, the code waits for 15 minutes before retrying the request. This way, the code will make no more than 15 requests in any given 15-minute window, and you won't run into rate limit issues.