-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.py
79 lines (65 loc) · 2.24 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
import os
from selenium.webdriver.common.by import By
import time
import logging
from dotenv import load_dotenv
import scraper
import sys
import fb
import airtable
from pyairtable.formulas import match
URL = "https://facebook.com"
GROUP_ID = "320292845738195"
# Setup logging
# Defining handlers
fhandler = logging.FileHandler(filename="tmp.log")
shandler = logging.StreamHandler(stream=sys.stdout)
handlers = [fhandler, shandler]
logging.basicConfig(
level=logging.INFO,
format="[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s",
handlers=handlers,
)
logger = logging.getLogger(__name__)
def main():
s = scraper.Scraper()
root_window_id = s.initDriver()
at = airtable.MyAirtable()
fb.login(s)
post_urls = fb.scrapeGroup(s, GROUP_ID) # List of unique urls returned
# for url in post_urls
records = [] # Records to be added to the db
for url in post_urls:
d = {"uid": f"{GROUP_ID}/{url.split('/')[-1]}", "url": url}
records.append(d)
records_created = at.table.batch_create(records)
logger.debug(f"main: {len(records_created)=} | {records_created=}")
logger.info(
f"main: added {len(records_created)} out of {len(records)} queued to be inserted"
)
unprocessed_records = at.getMatchingRecords(
match_formula=match({"description": ""}), fields_to_return=["uid", "url"]
)
unprocessed_urls = [x["fields"]["url"] for x in unprocessed_records] # List of urls
logger.info(f"main: got {len(unprocessed_records)} records in airtable")
post_descriptions = fb.extractPostDescriptions(s, unprocessed_urls)
logger.debug(f"main: {post_descriptions=}")
records_to_upsert = []
for r in unprocessed_records:
records_to_upsert.append(
{
"id": r["id"],
"fields": {
"description": post_descriptions.get(
r["fields"]["url"], "No description available"
)
},
}
)
updated_records = at.table.batch_update(records_to_upsert)
logger.info(
f"main: {len(updated_records)} records out of {len(unprocessed_records)} queued were updated"
)
if __name__ == "__main__":
# main()
main()