diff options
author | Alban Gruin | 2017-09-25 16:08:01 +0200 |
---|---|---|
committer | Alban Gruin | 2017-09-25 16:08:01 +0200 |
commit | aa7ccf13a8735f162e2ea859ad1d8ebe9f34f657 (patch) | |
tree | 686b7782c87b0848d7965d99031977f22110d9d3 /management/commands/_private.py | |
parent | 3d2fa6b15c58b775bc7e60e148f3a6bf1f2631d0 (diff) |
Utilisation de pylint pour améliorer la qualité du code
Diffstat (limited to 'management/commands/_private.py')
-rw-r--r-- | management/commands/_private.py | 20 |
1 files changed, 10 insertions, 10 deletions
diff --git a/management/commands/_private.py b/management/commands/_private.py index c140f51..2d01e67 100644 --- a/management/commands/_private.py +++ b/management/commands/_private.py @@ -13,15 +13,15 @@ # You should have received a copy of the GNU Affero General Public License # along with celcatsanitizer. If not, see <http://www.gnu.org/licenses/>. +import datetime +import re + from bs4 import BeautifulSoup from django.utils import timezone from edt.models import Group, Room, Course from edt.utils import get_week -import datetime -import re - import requests @@ -31,8 +31,8 @@ class Week: self.start = timezone.make_aware( datetime.datetime.strptime(start, "%d/%m/%Y")) - def get_day(self, id): - return self.start + datetime.timedelta(id) + def get_day(self, day_id): + return self.start + datetime.timedelta(day_id) @property def year(self): @@ -75,7 +75,7 @@ def consolidate_group(group): def consolidate_groups(groups): for group in groups: - if group.parent == None: + if group.parent is None: consolidate_group(group) def delete_courses_in_week(timetable, year, week): @@ -153,7 +153,7 @@ def get_update_date(soup): # (\d+) au moins un nombre # : un deux-points # (\d+) au moins un nombre - datetime_regex = re.compile("(\d+)/(\d+)/(\d+)\s+(\d+):(\d+):(\d+)") + datetime_regex = re.compile(r"(\d+)/(\d+)/(\d+)\s+(\d+):(\d+):(\d+)") search = datetime_regex.search(soup.footer.text) if search is None: return None @@ -171,8 +171,8 @@ def get_weeks(soup): return weeks def get_xml(url): - r = requests.get(url) - r.encoding = "utf8" + req = requests.get(url) + req.encoding = "utf8" - soup = BeautifulSoup(r.content, "html.parser") + soup = BeautifulSoup(req.content, "html.parser") return soup |