Add requirements.txt
This commit is contained in:
parent
92863bd82c
commit
80fa94b0f0
2 changed files with 25 additions and 2 deletions
|
@ -1,12 +1,12 @@
|
||||||
import json
|
import json
|
||||||
from facebook_scraper import get_posts
|
import facebook_scraper
|
||||||
|
|
||||||
FILE = 'facebook_data.json'
|
FILE = 'facebook_data.json'
|
||||||
|
|
||||||
|
|
||||||
def scrape_data():
|
def scrape_data():
|
||||||
post_list = []
|
post_list = []
|
||||||
for post in get_posts('amicale.deseleves', pages=3):
|
for post in facebook_scraper.get_posts('amicale.deseleves', pages=3):
|
||||||
print(post)
|
print(post)
|
||||||
cleaned_post = {
|
cleaned_post = {
|
||||||
"post_id": post["post_id"],
|
"post_id": post["post_id"],
|
||||||
|
|
23
requirements.txt
Normal file
23
requirements.txt
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
appdirs==1.4.4
|
||||||
|
beautifulsoup4==4.9.1
|
||||||
|
bs4==0.0.1
|
||||||
|
certifi==2020.6.20
|
||||||
|
chardet==3.0.4
|
||||||
|
cssselect==1.1.0
|
||||||
|
facebook-scraper==0.2.9
|
||||||
|
fake-useragent==0.1.11
|
||||||
|
html2text==2020.1.16
|
||||||
|
idna==2.10
|
||||||
|
lxml==4.5.2
|
||||||
|
parse==1.16.0
|
||||||
|
pyee==7.0.2
|
||||||
|
pyppeteer==0.2.2
|
||||||
|
pyquery==1.4.1
|
||||||
|
requests==2.24.0
|
||||||
|
requests-html==0.10.0
|
||||||
|
six==1.15.0
|
||||||
|
soupsieve==2.0.1
|
||||||
|
tqdm==4.48.2
|
||||||
|
urllib3==1.25.10
|
||||||
|
w3lib==1.22.0
|
||||||
|
websockets==8.1
|
Loading…
Reference in a new issue