Compare commits

..

4 commits

Author SHA1 Message Date
docjyJ
e1ff78118d Edit download_page function 2020-09-03 18:10:59 +02:00
docjyJ
c971d90595 Add Support of maxWeight 2020-09-03 18:07:44 +02:00
docjyJ
e9fbf09a35 Update api 2020-09-02 16:00:42 +02:00
docjyJ
15641f0c7b Update Proxiwash 2020-09-02 15:39:15 +02:00
2 changed files with 26 additions and 12 deletions

1
.gitignore vendored
View file

@ -1,5 +1,6 @@
/facebook/token
/washinsa/washinsa_data.json
/washinsa/tripode_b_data.json
/facebook/facebook_data.json
/dashboard/dashboard_data.json
/menu/menu_data.json

View file

@ -21,8 +21,9 @@ Each machine row is composed of 6 columns
- 6 - End time (The end time in format HH:MM or empty)
'''
DUMP_FILE = "washinsa_data.json"
WASHINSA_URL = "https://www.proxiwash.com/weblaverie/component/weblaverie/?view=instancesfiche&format=raw&s=cf4f39"
DUMP_FILE_INSA = "washinsa_data.json"
DUMP_FILE_TRIPODE_B = "tripode_b_data.json"
WASHINSA_URL = "https://www.proxiwash.com/weblaverie/component/weblaverie/?view=instancesfiche&format=raw&s="
DRYER_STRING = "SECHE LINGE"
@ -48,15 +49,22 @@ STATE_CONVERSION_TABLE = {
TIME_RE = re.compile("^\d\d:\d\d$")
def download_page():
def get_json(code):
soup = BeautifulSoup(download_page(code), 'html.parser')
rows = get_rows(soup)
return get_parsed_data(rows)
def download_page(code):
"""
Downloads the page from proxiwash website
"""
url = WASHINSA_URL + code
try:
with urllib.request.urlopen(WASHINSA_URL) as response:
with urllib.request.urlopen(url) as response:
return response.read().decode()
except:
print("Error processing following url: " + WASHINSA_URL)
print("Error processing following url: " + url)
return ""
@ -78,6 +86,13 @@ def is_machine_dryer(row):
return DRYER_STRING in row.contents[0].text
def get_machine_weight(row):
"""
Find the maximum weight supported by the machine.
"""
return int(re.search("LINGE (.*?) KG", row.contents[0].text).group(1))
def get_machine_number(row):
"""
Gets the current machine number.
@ -177,6 +192,7 @@ def get_parsed_data(rows):
machine = {
"number": get_machine_number(row),
"state": state.value,
"maxWeight ": get_machine_weight(row),
"startTime": "",
"endTime": "",
"donePercent": "",
@ -206,13 +222,10 @@ def get_parsed_data(rows):
def main():
soup = BeautifulSoup(download_page(), 'html.parser')
rows = get_rows(soup)
with open(DUMP_FILE, 'w') as f:
json.dump(get_parsed_data(rows), f)
with open(DUMP_FILE_INSA, 'w') as f:
json.dump(get_json("cf4f39"), f)
with open(DUMP_FILE_TRIPODE_B, 'w') as f:
json.dump(get_json("b310b7"), f)
main()