Compare commits
10 commits
Author | SHA1 | Date | |
---|---|---|---|
|
b4c1e0fcaf | ||
|
0cf69f9ff9 | ||
|
f85335adad | ||
|
4f8c054663 | ||
|
33fd3a4668 | ||
|
091bed7fa5 | ||
|
e1ff78118d | ||
|
c971d90595 | ||
|
e9fbf09a35 | ||
|
15641f0c7b |
5 changed files with 214 additions and 45 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,5 +1,6 @@
|
|||
/facebook/token
|
||||
/washinsa/washinsa_data.json
|
||||
/washinsa/tripode_b_data.json
|
||||
/facebook/facebook_data.json
|
||||
/dashboard/dashboard_data.json
|
||||
/menu/menu_data.json
|
||||
|
|
89
README.md
89
README.md
|
@ -1,3 +1,88 @@
|
|||
# Serveur de l'application de l'Amicale
|
||||
# Serveur pour l'application de l'Amicale (Campus)
|
||||
|
||||
Partie serveur de l'application pour l'amicale, publiée sous licence GPLv3.
|
||||
Partie serveur pour [l'application de l'amicale](https://git.etud.insa-toulouse.fr/vergnet/application-amicale), publiée sous licence GPLv3.
|
||||
|
||||
Le serveur est programmé avec python 3.6 en utilisant des [venv](https://docs.python.org/3/tutorial/venv.html).
|
||||
|
||||
## Structure
|
||||
|
||||
Pour des raisons de compatibilité, 2 versions sont en ligne sur le serveur: une dans `publich_html` et une autre dans `public_html/v2`. La première version est à ignorer et à supprimer dans le futur. La v2 est celle actuellement utilisée.
|
||||
|
||||
## Installation
|
||||
|
||||
Tout d'abord, clonez ce dépot dans le dossier désiré et déplacez vous dedans.
|
||||
|
||||
```shell
|
||||
git clone https://git.etud.insa-toulouse.fr/vergnet/application-amicale-serveur.git
|
||||
cd application-amicale-serveur
|
||||
```
|
||||
|
||||
Ensuite, créez le venv:
|
||||
```shell
|
||||
python3 -m venv tutorial-env
|
||||
```
|
||||
|
||||
Et enfin, installez les dépendances:
|
||||
|
||||
```shell
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Mettre à jour les dépendances
|
||||
|
||||
Ouvrez le fichier `requirements.txt` et écrivez la nouvelle version de la librairie à utiliser.
|
||||
Ensuite, chargez le venv dans votre terminal:
|
||||
|
||||
```shell
|
||||
source .venv/bin/activate
|
||||
```
|
||||
|
||||
Cette commande permet d'utiliser le python installé dans le venv au lieu de celui du système.
|
||||
Il ne reste plus qu'à installer les nouvelles versions référencées dans `requirements.txt`:
|
||||
|
||||
```shell
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Envoyer les mises à jour sur le serveur
|
||||
|
||||
Le serveur est synchronisé avec git, il suffit donc de se connecter sur l'espace web, de se déplacer dans le dossier v2 et de récupérer les derniers changements:
|
||||
|
||||
```shell
|
||||
ssh amicale_app@etud.insa-toulouse.fr
|
||||
cd public_html/v2
|
||||
git pull
|
||||
```
|
||||
|
||||
Si vous avez modifié les versions des librairies dans `requirements.txt`, pensez à les mettre à jour sur le serveur avec la commande suivante:
|
||||
|
||||
```shell
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
## Mises à jour 'BREAKING'
|
||||
|
||||
Si une mise à jour casse la compatibilité avec la version actuelle de l'application, il est nécessaire de garder l'ancienne version du logiciel serveur le temps que tout le monde mette l'application à jour (plusieurs mois).
|
||||
|
||||
Pour cela, créez un nouveau dossier pour la nouvelle version dans `public_html`. Par exemple, pour passer de la version 2 (installée dans `public_html/v2`), il faut installer la nouvelle version dans le dossier `public_html/v3`.
|
||||
|
||||
Pour cela, il faut tout réinstaller dans ce dossier comme suit:
|
||||
|
||||
```shell
|
||||
ssh amicale_app@etud.insa-toulouse.fr
|
||||
cd public_html
|
||||
git clone https://git.etud.insa-toulouse.fr/vergnet/application-amicale-serveur.git v<NUMERO_DE_VERSION>
|
||||
cd v<NUMERO_DE_VERSION>
|
||||
```
|
||||
Ensuite, créez le venv:
|
||||
```shell
|
||||
python3 -m venv tutorial-env
|
||||
```
|
||||
|
||||
Et enfin, installez les dépendances:
|
||||
|
||||
```shell
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
Pensez ensuite à rediriger l'application vers cette nouvelle version.
|
|
@ -1,20 +1,21 @@
|
|||
import json
|
||||
import facebook_scraper
|
||||
import enum
|
||||
|
||||
FILE = 'facebook_data.json'
|
||||
|
||||
PAGES = ["amicale.deseleves", "campus.insat"]
|
||||
|
||||
|
||||
def scrape_data(page):
|
||||
post_list = []
|
||||
for post in facebook_scraper.get_posts(page, pages=3):
|
||||
for post in facebook_scraper.get_posts(page, pages=4):
|
||||
print(post)
|
||||
cleaned_post = {
|
||||
"id": post["post_id"],
|
||||
"message": post["post_text"],
|
||||
"url": post["post_url"],
|
||||
"image": post["image"],
|
||||
"images": post["images"],
|
||||
"video": post["video"],
|
||||
"link": post["link"],
|
||||
"time": post["time"].timestamp(),
|
||||
|
@ -27,6 +28,7 @@ def scrape_data(page):
|
|||
def get_all_data():
|
||||
data = {}
|
||||
for page in PAGES:
|
||||
print(" -> " + page)
|
||||
data[page] = scrape_data(page)
|
||||
return data
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ bs4==0.0.1
|
|||
certifi==2020.6.20
|
||||
chardet==3.0.4
|
||||
cssselect==1.1.0
|
||||
facebook-scraper==0.2.9
|
||||
facebook-scraper==0.2.34
|
||||
fake-useragent==0.1.11
|
||||
html2text==2020.1.16
|
||||
idna==2.10
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
# Parser made with BeautifulSoup4
|
||||
# https://www.crummy.com/software/BeautifulSoup/bs4/doc
|
||||
from json import JSONDecodeError
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import urllib.request
|
||||
from enum import Enum
|
||||
import re
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from typing.io import TextIO
|
||||
|
||||
'''
|
||||
PAGE STRUCTURE
|
||||
as of june 2020
|
||||
as of july 2021
|
||||
|
||||
A table with a row (tr html tag) for each machine
|
||||
Each machine row is composed of 6 columns
|
||||
|
@ -19,11 +23,20 @@ Each machine row is composed of 6 columns
|
|||
- 4 - Program (Name of the program or empty)
|
||||
- 5 - Start time (The start time in format HH:MM or empty)
|
||||
- 6 - End time (The end time in format HH:MM or empty)
|
||||
|
||||
Custom message (errors displayed on the website)
|
||||
Must use the non-raw url to see it.
|
||||
In the <font> under the <div> of id msg-permanent
|
||||
example message: Perturbations operateur, laverie non connectee a internet depuis le 12/07/2021 a 19h45
|
||||
'''
|
||||
|
||||
DUMP_FILE = "washinsa_data.json"
|
||||
WASHINSA_URL = "https://www.proxiwash.com/weblaverie/component/weblaverie/?view=instancesfiche&format=raw&s=cf4f39"
|
||||
DUMP_FILE_INSA = "washinsa_data.json"
|
||||
DUMP_FILE_TRIPODE_B = "tripode_b_data.json"
|
||||
WASHINSA_RAW_URL = "https://www.proxiwash.com/weblaverie/component/weblaverie/?view=instancesfiche&format=raw&s="
|
||||
WASHINSA_URL = "https://www.proxiwash.com/weblaverie/ma-laverie-2?s="
|
||||
DRYER_STRING = "SECHE LINGE"
|
||||
# 10 min
|
||||
CUSTOM_MESSAGE_INTERVAL = 10 * 60 * 1000
|
||||
|
||||
|
||||
class State(Enum):
|
||||
|
@ -48,19 +61,64 @@ STATE_CONVERSION_TABLE = {
|
|||
TIME_RE = re.compile("^\d\d:\d\d$")
|
||||
|
||||
|
||||
def download_page():
|
||||
def get_json(code: str, file: TextIO):
|
||||
file_json = {
|
||||
"info": {},
|
||||
"dryers": [],
|
||||
"washers": []
|
||||
}
|
||||
try:
|
||||
file_json = json.load(file)
|
||||
except JSONDecodeError as e:
|
||||
print("Error reading file " + file.name)
|
||||
print(e)
|
||||
|
||||
if not ("info" in file_json):
|
||||
file_json["info"] = {}
|
||||
|
||||
info = file_json["info"]
|
||||
if not ("last_checked" in info) or info[
|
||||
"last_checked"] < datetime.now().timestamp() * 1000 - CUSTOM_MESSAGE_INTERVAL:
|
||||
print("Updating proxiwash message")
|
||||
info["message"] = get_message(code)
|
||||
info["last_checked"] = datetime.now().timestamp() * 1000
|
||||
parsed_data = get_machines(code)
|
||||
file_json["dryers"] = parsed_data["dryers"]
|
||||
file_json["washers"] = parsed_data["washers"]
|
||||
return file_json
|
||||
|
||||
|
||||
def get_machines(code: str):
|
||||
soup = BeautifulSoup(download_page(code), 'html.parser')
|
||||
rows = get_rows(soup)
|
||||
return get_parsed_data(rows)
|
||||
|
||||
|
||||
def get_message(code: str):
|
||||
soup = BeautifulSoup(download_page(code, False), 'html.parser')
|
||||
msg = soup.find(id="msg-permanent")
|
||||
if msg:
|
||||
return soup.find(id="msg-permanent").font.string
|
||||
return None
|
||||
|
||||
|
||||
def download_page(code: str, raw=True):
|
||||
"""
|
||||
Downloads the page from proxiwash website
|
||||
"""
|
||||
url = WASHINSA_RAW_URL + code
|
||||
if not raw:
|
||||
url = WASHINSA_URL + code
|
||||
|
||||
try:
|
||||
with urllib.request.urlopen(WASHINSA_URL) as response:
|
||||
with urllib.request.urlopen(url) as response:
|
||||
return response.read().decode()
|
||||
except:
|
||||
print("Error processing following url: " + WASHINSA_URL)
|
||||
print("Error processing following url: " + url)
|
||||
return ""
|
||||
|
||||
|
||||
def get_rows(soup):
|
||||
def get_rows(soup: BeautifulSoup):
|
||||
"""
|
||||
Gets rows corresponding to machines on the page
|
||||
"""
|
||||
|
@ -78,6 +136,13 @@ def is_machine_dryer(row):
|
|||
return DRYER_STRING in row.contents[0].text
|
||||
|
||||
|
||||
def get_machine_weight(row):
|
||||
"""
|
||||
Find the maximum weight supported by the machine.
|
||||
"""
|
||||
return int(re.search("LINGE (.*?) KG", row.contents[0].text).group(1))
|
||||
|
||||
|
||||
def get_machine_number(row):
|
||||
"""
|
||||
Gets the current machine number.
|
||||
|
@ -166,53 +231,69 @@ def get_machine_remaining_time(row):
|
|||
return time
|
||||
|
||||
|
||||
def is_machine_parsed(dryers, washers, number: int):
|
||||
for m in dryers:
|
||||
if m["number"] == number:
|
||||
return True
|
||||
for m in washers:
|
||||
if m["number"] == number:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_parsed_data(rows):
|
||||
"""
|
||||
Gets the parsed data from the web page, farmatting it in a easy to use object
|
||||
Gets the parsed data from the web page, formatting it in a easy to use object
|
||||
"""
|
||||
dryers = []
|
||||
washers = []
|
||||
for row in rows:
|
||||
state = get_machine_state(row)
|
||||
machine = {
|
||||
"number": get_machine_number(row),
|
||||
"state": state.value,
|
||||
"startTime": "",
|
||||
"endTime": "",
|
||||
"donePercent": "",
|
||||
"remainingTime": "",
|
||||
"program": "",
|
||||
}
|
||||
if state == State.RUNNING:
|
||||
machine_times = get_machine_times(row)
|
||||
machine["startTime"] = machine_times[0]
|
||||
machine["endTime"] = machine_times[1]
|
||||
if len(machine_times[0]) == 0:
|
||||
state = State.RUNNING_NOT_STARTED
|
||||
machine["state"] = state.value
|
||||
machine["program"] = get_machine_program(row)
|
||||
machine["donePercent"] = get_machine_done_percent(row)
|
||||
machine["remainingTime"] = get_machine_remaining_time(row)
|
||||
|
||||
if is_machine_dryer(row):
|
||||
dryers.append(machine)
|
||||
else:
|
||||
washers.append(machine)
|
||||
machine_number = get_machine_number(row)
|
||||
if not is_machine_parsed(dryers, washers, machine_number):
|
||||
state = get_machine_state(row)
|
||||
machine = {
|
||||
"number": machine_number,
|
||||
"state": state.value,
|
||||
"maxWeight": get_machine_weight(row),
|
||||
"startTime": "",
|
||||
"endTime": "",
|
||||
"donePercent": "",
|
||||
"remainingTime": "",
|
||||
"program": "",
|
||||
}
|
||||
if state == State.RUNNING:
|
||||
machine_times = get_machine_times(row)
|
||||
machine["startTime"] = machine_times[0]
|
||||
machine["endTime"] = machine_times[1]
|
||||
if len(machine_times[0]) == 0:
|
||||
state = State.RUNNING_NOT_STARTED
|
||||
machine["state"] = state.value
|
||||
machine["program"] = get_machine_program(row)
|
||||
machine["donePercent"] = get_machine_done_percent(row)
|
||||
machine["remainingTime"] = get_machine_remaining_time(row)
|
||||
|
||||
if is_machine_dryer(row):
|
||||
dryers.append(machine)
|
||||
else:
|
||||
washers.append(machine)
|
||||
return {
|
||||
"dryers": dryers,
|
||||
"washers": washers
|
||||
}
|
||||
|
||||
|
||||
def write_json(data, f: TextIO):
|
||||
f.seek(0)
|
||||
f.truncate(0)
|
||||
json.dump(data, f)
|
||||
|
||||
|
||||
def main():
|
||||
soup = BeautifulSoup(download_page(), 'html.parser')
|
||||
rows = get_rows(soup)
|
||||
with open(DUMP_FILE, 'w') as f:
|
||||
json.dump(get_parsed_data(rows), f)
|
||||
dump_data = {}
|
||||
with open(DUMP_FILE_INSA, 'r+', encoding='utf-8') as f:
|
||||
write_json(get_json("cf4f39", f), f)
|
||||
with open(DUMP_FILE_TRIPODE_B, 'r+', encoding='utf-8') as f:
|
||||
write_json(get_json("b310b7", f), f)
|
||||
|
||||
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue