diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..3f7037b --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +TWITTER_USERNAME=myaccount +TWITTER_PASSWORD=MyPassword!!! +TWITTER_EMAIL=myemail@gmail.com +TWITTER_COOKIES= # Check the README for how to set this-- important if you don't want your account to get flagged +PROXY_URL= # HTTP(s) proxy for requests (optional) \ No newline at end of file diff --git a/README.md b/README.md index 7f4a76e..55b2798 100644 --- a/README.md +++ b/README.md @@ -15,5 +15,18 @@ TWITTER_USERNAME= # Account username TWITTER_PASSWORD= # Account password TWITTER_EMAIL= # Account email TWITTER_COOKIES= # JSON-serialized array of cookies of an authenticated session -PROXY_URL= # HTTP(s) proxy for requests (optional) +PROXY_URL= # HTTP(s) proxy for requests (necessary for browsers) +``` + +#### Getting Twitter Cookies +It is important that you use Twitter cookies so that you don't send a new login request to twitter every time you want to do something. + +In your application, you will probably want to have a check for cookies. If you don't have cookies, log in with user auth credentials. Then, cache the cookies for future use. +```ts + const scraper = await getScraper({ authMethod: 'password' }); + + scraper.getCookies().then((cookies) => { + console.log(cookies); + // Remove 'Cookies' and save the cookies as a JSON array + }); ``` \ No newline at end of file diff --git a/src/auth.test.ts b/src/auth.test.ts index 437de6a..263eebc 100644 --- a/src/auth.test.ts +++ b/src/auth.test.ts @@ -6,6 +6,11 @@ testLogin( 'scraper can log in', async () => { const scraper = await getScraper({ authMethod: 'password' }); + + scraper.getCookies().then((cookies) => { + console.log(cookies); + }); + await expect(scraper.isLoggedIn()).resolves.toBeTruthy(); }, 15000, diff --git a/src/scraper.test.ts b/src/scraper.test.ts index 3c8aa86..d37da7b 100644 --- a/src/scraper.test.ts +++ b/src/scraper.test.ts @@ -1,18 +1,5 @@ import { Scraper } from './scraper'; -test('scraper uses request transform when provided', async () => { - const scraper = new Scraper({ - transform: { - // Should throw "TypeError: Only absolute URLs are supported" - request: () => [''], - }, - }); - - await expect(scraper.getLatestTweet('twitter')).rejects.toThrowError( - TypeError, - ); -}); - test('scraper uses response transform when provided', async () => { const scraper = new Scraper({ transform: { @@ -33,5 +20,5 @@ test('scraper uses response transform when provided', async () => { }, }); - await expect(scraper.getLatestTweet('twitter')).rejects.toThrowError(); + await expect(scraper.getLatestTweet('twitter')).rejects.toThrow(); }); diff --git a/src/tweets.test.ts b/src/tweets.test.ts index ee360ef..89872dd 100644 --- a/src/tweets.test.ts +++ b/src/tweets.test.ts @@ -312,15 +312,6 @@ test('scraper can get tweet thread', async () => { expect(tweet?.thread.length).toStrictEqual(7); }); -test('scraper can get liked tweets', async () => { - const scraper = await getScraper(); - const liked = scraper.getLikedTweets('elonmusk', 10); - const tweet = await liked.next(); - expect(tweet.value).not.toBeUndefined(); - expect(tweet.done).toBeFalsy(); - expect(tweet.value?.id).not.toBeUndefined(); -}); - test('sendTweet successfully sends a tweet', async () => { const scraper = await getScraper(); const draftText = 'This is a test tweet';