aboutsummaryrefslogtreecommitdiff
path: root/management/commands/_private.py
blob: 3cd23ca393d5c609c55d8ecd984c1012d70c0efa (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
#    Copyright (C) 2017  Alban Gruin
#
#    celcatsanitizer is free software; you can redistribute it and/or modify
#    it under the terms of the GNU General Public License as published by
#    the Free Software Foundation; either version 2 of the License, or
#    (at your option) any later version.
#
#    celcatsanitizer is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU General Public License for more details.
#
#    You should have received a copy of the GNU General Public License along
#    with celcatsanitizer; if not, write to the Free Software Foundation, Inc.,
#    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

from bs4 import BeautifulSoup
from django.utils import timezone

from edt.models import Group, Room, Course
from edt.utils import get_week

import datetime
import requests


class Week:
    def __init__(self, number, start):
        self.number = number
        self.start = timezone.make_aware(
            datetime.datetime.strptime(start, "%d/%m/%Y"))

    def get_day(self, id):
        return self.start + datetime.timedelta(id)

    @property
    def year(self):
        return self.start.year

def add_time(date, time):
    delta = datetime.timedelta(hours=time.hour, minutes=time.minute)
    return date + delta

def delete_courses_in_week(timetable, year, week):
    start, end = get_week(year, week)
    Course.objects.filter(begin__gte=start, begin__lt=end,
                          timetable=timetable).delete()

def get_from_db_or_create(cls, **kwargs):
    obj = cls.objects.all().filter(**kwargs)

    obj = obj.first()
    if obj is None:
        obj = cls(**kwargs)
        obj.save()

    return obj

def get_events(timetable, year, week, soup, weeks_in_soup):
    for event in soup.find_all("event"):
        title = None
        type_ = None
        groups = None
        rooms = None
        notes = None

        if weeks_in_soup[event.rawweeks.text].number == week and \
           weeks_in_soup[event.rawweeks.text].year == year and \
           event.resources.group is not None and \
           event.starttime is not None and event.endtime is not None:
            date = weeks_in_soup[event.rawweeks.text].get_day(int(
                event.day.text))

            begin = add_time(date, datetime.datetime.strptime(
                event.starttime.text, "%H:%M"))
            end = add_time(date, datetime.datetime.strptime(
                event.endtime.text, "%H:%M"))

            groups = [get_from_db_or_create(Group, timetable=timetable,
                                            celcat_name=item.text)
                      for item in event.resources.group.find_all("item")]

            if event.notes is not None:
                notes = event.notes.text

            if event.resources.module is not None:
                title = event.resources.module.item.text
            elif notes is not None:
                title = notes
                notes = None
            else:
                title = "Aucune information"

            if event.category is not None:
                type_ = event.category.text

            if event.resources.room is not None:
                rooms = [get_from_db_or_create(Room, name=item.text)
                         for item in event.resources.room.find_all("item")]

            yield title, type_, groups, rooms, notes, begin, end

def get_weeks(soup):
    weeks = {}
    for span in soup.find_all("span"):
        weeks[span.alleventweeks.text] = Week(int(span.title.text),
                                              span["date"])

    return weeks

def get_xml(url):
    r = requests.get(url)
    r.encoding = "utf8"

    soup = BeautifulSoup(r.text, "html.parser")
    return soup