Update Proxiwash
This commit is contained in:
parent
9f2ae77d2a
commit
15641f0c7b
2 changed files with 21 additions and 9 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,5 +1,6 @@
|
||||||
/facebook/token
|
/facebook/token
|
||||||
/washinsa/washinsa_data.json
|
/washinsa/washinsa_data.json
|
||||||
|
/washinsa/washinsa_data_V2.json
|
||||||
/facebook/facebook_data.json
|
/facebook/facebook_data.json
|
||||||
/dashboard/dashboard_data.json
|
/dashboard/dashboard_data.json
|
||||||
/menu/menu_data.json
|
/menu/menu_data.json
|
||||||
|
|
|
@ -22,7 +22,8 @@ Each machine row is composed of 6 columns
|
||||||
'''
|
'''
|
||||||
|
|
||||||
DUMP_FILE = "washinsa_data.json"
|
DUMP_FILE = "washinsa_data.json"
|
||||||
WASHINSA_URL = "https://www.proxiwash.com/weblaverie/component/weblaverie/?view=instancesfiche&format=raw&s=cf4f39"
|
DUMP_FILE_V2 = "washinsa_data_V2.json"
|
||||||
|
WASHINSA_URL = "https://www.proxiwash.com/weblaverie/component/weblaverie/?view=instancesfiche&format=raw&s="
|
||||||
DRYER_STRING = "SECHE LINGE"
|
DRYER_STRING = "SECHE LINGE"
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,12 +49,18 @@ STATE_CONVERSION_TABLE = {
|
||||||
TIME_RE = re.compile("^\d\d:\d\d$")
|
TIME_RE = re.compile("^\d\d:\d\d$")
|
||||||
|
|
||||||
|
|
||||||
def download_page():
|
def get_json(code):
|
||||||
|
soup = BeautifulSoup(download_page(code), 'html.parser')
|
||||||
|
rows = get_rows(soup)
|
||||||
|
return get_parsed_data(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def download_page(code):
|
||||||
"""
|
"""
|
||||||
Downloads the page from proxiwash website
|
Downloads the page from proxiwash website
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with urllib.request.urlopen(WASHINSA_URL) as response:
|
with urllib.request.urlopen(WASHINSA_URL + code) as response:
|
||||||
return response.read().decode()
|
return response.read().decode()
|
||||||
except:
|
except:
|
||||||
print("Error processing following url: " + WASHINSA_URL)
|
print("Error processing following url: " + WASHINSA_URL)
|
||||||
|
@ -206,13 +213,17 @@ def get_parsed_data(rows):
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
soup = BeautifulSoup(download_page(), 'html.parser')
|
insa = get_json("cf4f39")
|
||||||
rows = get_rows(soup)
|
|
||||||
with open(DUMP_FILE, 'w') as f:
|
with open(DUMP_FILE, 'w') as f:
|
||||||
json.dump(get_parsed_data(rows), f)
|
json.dump(insa, f)
|
||||||
|
|
||||||
|
tripodeB = get_json("b310b7")
|
||||||
|
washs = {
|
||||||
|
"insa": insa,
|
||||||
|
"tripodeB": tripodeB,
|
||||||
|
}
|
||||||
|
with open(DUMP_FILE_V2, 'w') as f:
|
||||||
|
json.dump(washs, f)
|
||||||
|
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue