This repository has been archived on 2023-01-13. You can view files and clone it, but cannot push or open issues or pull requests.
uwm/main.py

65 lines
2.4 KiB
Python
Raw Normal View History

2021-05-24 22:22:03 +02:00
from sys import argv
from requests import Session
from bs4 import BeautifulSoup
from requests.models import Response
class Universite:
def __init__(self, url, pseudo, motDePasse):
self.url = url
self.loginData = {
"username": pseudo,
"password": motDePasse,
"_eventId": "submit",
"submit": "SE CONNECTER"
}
2021-05-25 00:25:59 +02:00
def maSoupe(self, page):
2021-05-24 22:22:03 +02:00
return BeautifulSoup(page.content, "html.parser")
2021-05-25 00:25:59 +02:00
""" def ecrirePageHTML(self, texte): # Utilisé pour du DEBUG
with open("page.html", "w") as f:
f.write(texte) """
2021-05-24 22:22:03 +02:00
def recuperationNotes(self):
with Session() as session:
# login
reponse = session.get(self.url)
2021-05-25 00:25:59 +02:00
soup = self.maSoupe(reponse)
2021-05-24 22:22:03 +02:00
self.loginData["lt"] = soup.find("input", attrs = {"name": "lt"})["value"]
self.loginData["execution"] = soup.find("input", attrs = {"name": "execution"})["value"]
reponse = session.post(self.url, data = self.loginData)
# page des résultats intermédiaire
2021-05-25 00:25:59 +02:00
soup = self.maSoupe(reponse)
try:
url = soup.find("a", attrs = {"id": "service-407"})["href"]
except:
raise TimeoutError("Le site a prit trop de temps pour répondre, veuillez réessayez plus tard.")
2021-05-24 22:22:03 +02:00
reponse = session.get(url, allow_redirects = False)
url = reponse.headers["Location"]
reponse = session.get(url)
2021-05-25 00:25:59 +02:00
# page des notes
soup = self.maSoupe(reponse)
url = f"{url}?{soup.find('form', attrs = {'enctype': 'application/x-www-form-urlencoded'})['action'].split('?')[1].replace('welcome', 'detailnotes')}"
2021-05-24 22:22:03 +02:00
reponse = session.get(url)
2021-05-25 00:25:59 +02:00
# self.ecrirePageHTML(reponse.text)
2021-05-24 22:22:03 +02:00
2021-05-25 00:25:59 +02:00
# récupération tableaux des notes
soup = self.maSoupe(reponse)
for attrs in soup.findAll("table"):
try:
texte = str(attrs).split("thead")[1][2:-2]
while ' ' in texte:
texte = texte.replace(' ', ' ')
return texte
except:
pass
2021-05-24 22:22:03 +02:00
if __name__ == "__main__":
argv = argv[1:]
if len(argv) == 3:
print(Universite(*argv).recuperationNotes())
else:
print("Merci de renseigner l'URL, le pseudo et le mot de passe (avec des \").")