From 583679b546bf0be30566a98b57ae563d6e107446 Mon Sep 17 00:00:00 2001 From: Jean-Marie Favreau Date: Sun, 2 Mar 2025 16:29:17 +0100 Subject: [PATCH] On applique black pour cleaner le code --- src/agenda_culturel/admin.py | 2 +- src/agenda_culturel/calendar.py | 177 ++- src/agenda_culturel/celery.py | 171 ++- src/agenda_culturel/db_importer.py | 12 +- src/agenda_culturel/filters.py | 226 ++-- src/agenda_culturel/forms.py | 440 +++---- .../amisdutempsdescerises.py | 27 +- .../custom_extractors/arachnee.py | 56 +- .../custom_extractors/billetterie_cf.py | 88 +- .../custom_extractors/lacomedie.py | 21 +- .../import_tasks/custom_extractors/lacoope.py | 3 +- .../custom_extractors/lapucealoreille.py | 5 +- .../custom_extractors/laraymonde.py | 62 +- .../custom_extractors/lefotomat.py | 4 +- .../import_tasks/custom_extractors/lerio.py | 70 +- .../custom_extractors/mille_formes.py | 216 ++-- .../import_tasks/downloader.py | 58 +- src/agenda_culturel/import_tasks/extractor.py | 81 +- .../generic_extractors/apidae_tourisme.py | 93 +- .../generic_extractors/fbevent.py | 95 +- .../generic_extractors/fbevents.py | 55 +- .../generic_extractors/ggcal_link.py | 53 +- .../import_tasks/generic_extractors/ical.py | 18 +- .../generic_extractors/iguana_agenda.py | 78 +- .../generic_extractors/mobilizon.py | 85 +- .../generic_extractors/wordpress_mec.py | 18 +- src/agenda_culturel/import_tasks/importer.py | 41 +- .../import_tasks/twosteps_extractor.py | 49 +- ...1_squashed_0150_alter_event_local_image.py | 4 +- src/agenda_culturel/models.py | 875 +++++++++----- src/agenda_culturel/settings/base.py | 92 +- src/agenda_culturel/settings/dev.py | 2 +- src/agenda_culturel/sitemaps.py | 18 +- src/agenda_culturel/templatetags/cat_extra.py | 89 +- .../templatetags/duplicated_extra.py | 2 - .../templatetags/event_extra.py | 66 +- .../templatetags/locations_extra.py | 26 +- .../templatetags/rimports_extra.py | 34 +- src/agenda_culturel/templatetags/tag_extra.py | 32 +- .../templatetags/utils_extra.py | 66 +- src/agenda_culturel/urls.py | 158 ++- src/agenda_culturel/utils.py | 62 +- src/agenda_culturel/views.py | 1028 +++++++++++------ src/scripts/create_categories.py | 4 +- src/scripts/create_reference_locations.py | 23 +- src/scripts/profiling.py | 9 +- src/scripts/set_pause.py | 3 +- 47 files changed, 3126 insertions(+), 1771 deletions(-) diff --git a/src/agenda_culturel/admin.py b/src/agenda_culturel/admin.py index 8c5d32a..56941ca 100644 --- a/src/agenda_culturel/admin.py +++ b/src/agenda_culturel/admin.py @@ -11,7 +11,7 @@ from .models import ( Place, Message, ReferenceLocation, - Organisation + Organisation, ) from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget diff --git a/src/agenda_culturel/calendar.py b/src/agenda_culturel/calendar.py index 1804b52..8836983 100644 --- a/src/agenda_culturel/calendar.py +++ b/src/agenda_culturel/calendar.py @@ -8,10 +8,10 @@ from django.http import Http404 from django.db.models import CharField from django.db.models.functions import Lower +import logging CharField.register_lookup(Lower) -import logging logger = logging.getLogger(__name__) @@ -43,7 +43,7 @@ class DayInCalendar: self.events_by_category = {} self.time_intervals = None - self.id = d.strftime('%Y-%m-%d') + self.id = d.strftime("%Y-%m-%d") def is_in_past(self): return self.in_past @@ -90,6 +90,7 @@ class DayInCalendar: def _add_event_internal(self, event): from .models import Category from copy import copy + # copy event local_event = copy(event) @@ -115,9 +116,9 @@ class DayInCalendar: def filter_events(self): self.events.sort( - key=lambda e: DayInCalendar.midnight - if e.start_time is None - else e.start_time + key=lambda e: ( + DayInCalendar.midnight if e.start_time is None else e.start_time + ) ) self.today_night = False if self.is_today(): @@ -126,8 +127,10 @@ class DayInCalendar: nday = now.date() ntime = now.time() found = False - for idx,e in enumerate(self.events): - if (nday < e.start_day) or (nday == e.start_day and e.start_time and ntime <= e.start_time): + for idx, e in enumerate(self.events): + if (nday < e.start_day) or ( + nday == e.start_day and e.start_time and ntime <= e.start_time + ): self.events[idx].is_first_after_now = True found = True break @@ -139,17 +142,32 @@ class DayInCalendar: def events_by_category_ordered(self): from .models import Category + if DayInCalendar.cats is None: - DayInCalendar.cats = Category.objects.order_by('position') + DayInCalendar.cats = Category.objects.order_by("position") result = [] for c in DayInCalendar.cats: if c.name in self.events_by_category: result.append((c.name, self.events_by_category[c.name])) return result - def build_time_intervals(self, all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers): - self.time_intervals = [IntervalInDay(self.date, i, n[0], n[1]) for i, n in - enumerate(zip([all_day_name] + interval_names, [all_day_short_name] + interval_short_names))] + def build_time_intervals( + self, + all_day_name, + all_day_short_name, + interval_names, + interval_short_names, + interval_markers, + ): + self.time_intervals = [ + IntervalInDay(self.date, i, n[0], n[1]) + for i, n in enumerate( + zip( + [all_day_name] + interval_names, + [all_day_short_name] + interval_short_names, + ) + ) + ] for e in self.events: if e.start_time is None: @@ -168,20 +186,49 @@ class DayInCalendar: def get_time_intervals(self): if self.time_intervals is None: if self.is_today(): - all_day_name = _('All day today') - interval_names = [_('This morning'), _('This noon'), _('This afternoon'), _('This evening')] + all_day_name = _("All day today") + interval_names = [ + _("This morning"), + _("This noon"), + _("This afternoon"), + _("This evening"), + ] elif self.is_tomorrow(): name = _("Tomorrow") - all_day_name = _('All day tomorrow') - interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name] + all_day_name = _("All day tomorrow") + interval_names = [ + _("%s morning") % name, + _("%s noon") % name, + _("%s afternoon") % name, + _("%s evening") % name, + ] else: name = _date(self.date, "l") - all_day_name = _('All day %s') % name - interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name] - all_day_short_name = _('All day') - interval_short_names = [_('Morning'), _('Noon'), _('Afternoon'), _('Evening')] - interval_markers = [datetime.combine(self.date, time(h, m)) for h, m in [(11, 30), (13, 0), (18, 0)]] - self.build_time_intervals(all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers) + all_day_name = _("All day %s") % name + interval_names = [ + _("%s morning") % name, + _("%s noon") % name, + _("%s afternoon") % name, + _("%s evening") % name, + ] + all_day_short_name = _("All day") + interval_short_names = [ + _("Morning"), + _("Noon"), + _("Afternoon"), + _("Evening"), + ] + interval_markers = [ + datetime.combine(self.date, time(h, m)) + for h, m in [(11, 30), (13, 0), (18, 0)] + ] + self.build_time_intervals( + all_day_name, + all_day_short_name, + interval_names, + interval_short_names, + interval_markers, + ) return self.time_intervals @@ -192,10 +239,13 @@ class IntervalInDay(DayInCalendar): self.name = name self.short_name = short_name super().__init__(d) - self.id = self.id + '-' + str(id) + self.id = self.id + "-" + str(id) + class CalendarList: - def __init__(self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None): + def __init__( + self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None + ): self.firstdate = firstdate self.lastdate = lastdate self.now = date.today() @@ -231,7 +281,7 @@ class CalendarList: def get_calendar_days(self): if self.calendar_days is None: self.build_internal() - + return self.calendar_days def today_in_calendar(self): @@ -253,29 +303,55 @@ class CalendarList: if self.ignore_dup: qs = qs.exclude(other_versions=self.ignore_dup) - startdatetime = timezone.make_aware(datetime.combine(self.c_firstdate, time.min), timezone.get_default_timezone()) - lastdatetime = timezone.make_aware(datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone()) - qs = qs.filter( - (Q(recurrences__isnull=False) & - (Q(recurrence_dtend__isnull=True) & Q(recurrence_dtstart__isnull=False) & Q(recurrence_dtstart__lte=lastdatetime)) + startdatetime = timezone.make_aware( + datetime.combine(self.c_firstdate, time.min), + timezone.get_default_timezone(), + ) + lastdatetime = timezone.make_aware( + datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone() + ) + qs = ( + qs.filter( + ( + Q(recurrences__isnull=False) + & ( + Q(recurrence_dtend__isnull=True) + & Q(recurrence_dtstart__isnull=False) + & Q(recurrence_dtstart__lte=lastdatetime) + ) + | ( + Q(recurrence_dtend__isnull=False) + & ~( + Q(recurrence_dtstart__gt=lastdatetime) + | Q(recurrence_dtend__lt=startdatetime) + ) + ) + ) | ( - Q(recurrence_dtend__isnull=False) - & ~( - Q(recurrence_dtstart__gt=lastdatetime) - | Q(recurrence_dtend__lt=startdatetime) + Q( + start_day__lte=self.c_lastdate + ) # start before the end of the desired period + & ( + ( + Q(end_day__isnull=True) & Q(start_day__gte=self.c_firstdate) + ) # end after the begining of desired period + | (Q(end_day__isnull=False) & Q(end_day__gte=self.c_firstdate)) ) ) ) - | (Q(start_day__lte=self.c_lastdate) & # start before the end of the desired period - ((Q(end_day__isnull=True) & Q(start_day__gte=self.c_firstdate)) # end after the begining of desired period - | (Q(end_day__isnull=False) & Q(end_day__gte=self.c_firstdate)))) - ).filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True) - ).order_by("start_time", "title__unaccent__lower") - - qs = qs.select_related("category").select_related("other_versions").select_related("other_versions__representative") + .filter( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + .order_by("start_time", "title__unaccent__lower") + ) + + qs = ( + qs.select_related("category") + .select_related("other_versions") + .select_related("other_versions__representative") + ) self.events = qs firstdate = datetime.fromordinal(self.c_firstdate.toordinal()) @@ -292,12 +368,16 @@ class CalendarList: if e.start_day >= self.firstdate and e.start_day <= self.lastdate: self.calendar_days[e.start_day.__str__()].add_event(e) else: - for d in daterange(max(e.start_day, self.firstdate), min(e.end_day, self.lastdate)): + for d in daterange( + max(e.start_day, self.firstdate), min(e.end_day, self.lastdate) + ): self.calendar_days[d.__str__()].add_event(e) else: for e_rec in e.get_recurrences_between(firstdate, lastdate): end = e_rec.start_day if e_rec.end_day is None else e_rec.end_day - for d in daterange(max(e_rec.start_day, self.firstdate), min(end, self.lastdate)): + for d in daterange( + max(e_rec.start_day, self.firstdate), min(end, self.lastdate) + ): self.calendar_days[d.__str__()].add_event(e_rec) def create_calendar_days(self): @@ -337,7 +417,10 @@ class CalendarList: def export_to_ics(self, request): from .models import Event - events = [event for day in self.get_calendar_days().values() for event in day.events] + + events = [ + event for day in self.get_calendar_days().values() for event in day.events + ] return Event.export_to_ics(events, request) @@ -370,7 +453,7 @@ class CalendarWeek(CalendarList): try: first = date.fromisocalendar(self.year, self.week, 1) last = date.fromisocalendar(self.year, self.week, 7) - except: + except Exception: raise Http404() super().__init__(first, last, filter, qs) diff --git a/src/agenda_culturel/celery.py b/src/agenda_culturel/celery.py index acc1bb6..bbd426e 100644 --- a/src/agenda_culturel/celery.py +++ b/src/agenda_culturel/celery.py @@ -11,12 +11,13 @@ from celery.signals import worker_ready from contextlib import contextmanager -from .import_tasks.downloader import * -from .import_tasks.extractor import * -from .import_tasks.importer import * +from .import_tasks.extractor import Extractor +from .import_tasks.importer import URL2Events +from .import_tasks.downloader import SimpleDownloader, ChromiumHeadlessDownloader from .import_tasks.custom_extractors import * from .import_tasks.generic_extractors import * +from django.core.cache import cache # Set the default Django settings module for the 'celery' program. APP_ENV = os.getenv("APP_ENV", "dev") @@ -24,7 +25,6 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", f"agenda_culturel.settings.{APP_ app = Celery("agenda_culturel") -from django.core.cache import cache logger = get_task_logger(__name__) @@ -40,6 +40,7 @@ app.autodiscover_tasks() LOCK_EXPIRE = 60 * 10 # Lock expires in 10 minutes + @contextmanager def memcache_chromium_lock(oid): lock_id = "chromium-lock" @@ -58,6 +59,7 @@ def memcache_chromium_lock(oid): # also don't release the lock if we didn't acquire it cache.delete(lock_id) + @contextmanager def free_memecache_chromium_lock(): lock_id = "chromium-lock" @@ -105,7 +107,6 @@ def import_events_from_json(self, json): close_import_task(self.request.id, success, error_message, importer) - class ChromiumTask(Task): _chm = None @@ -116,20 +117,18 @@ class ChromiumTask(Task): return self._chm def restartDownloader(self): - logger.warning('Restart selenium') - if not self._chm is None: + logger.info("Restart selenium") + if self._chm is not None: del self._chm self._chm = ChromiumHeadlessDownloader() - def run_recurrent_import_internal(rimport, downloader, req_id): from agenda_culturel.models import RecurrentImport, BatchImportation from .db_importer import DBImporterEvents logger.info("Run recurrent import: {}".format(req_id)) - # create a batch importation importation = BatchImportation(recurrentImport=rimport, celery_id=req_id) # save batch importation @@ -138,7 +137,6 @@ def run_recurrent_import_internal(rimport, downloader, req_id): # create an importer importer = DBImporterEvents(req_id) - if rimport.processor == RecurrentImport.PROCESSOR.ICAL: extractor = ical.ICALExtractor() elif rimport.processor == RecurrentImport.PROCESSOR.ICALNOBUSY: @@ -192,21 +190,28 @@ def run_recurrent_import_internal(rimport, downloader, req_id): location = rimport.defaultLocation tags = rimport.defaultTags published = rimport.defaultPublished - organisers = [] if rimport.defaultOrganiser is None else [rimport.defaultOrganiser.pk] + organisers = ( + [] if rimport.defaultOrganiser is None else [rimport.defaultOrganiser.pk] + ) try: # get events from website events = u2e.process( url, browsable_url, - default_values={"category": category, "location": location, "tags": tags, "organisers": organisers}, + default_values={ + "category": category, + "location": location, + "tags": tags, + "organisers": organisers, + }, published=published, ) # force location if required if rimport.forceLocation and location: - for i, e in enumerate(events['events']): - events['events'][i]["location"] = location + for i, e in enumerate(events["events"]): + events["events"][i]["location"] = location # convert it to json json_events = json.dumps(events, default=str) @@ -249,10 +254,15 @@ def run_recurrent_import(self, pklist): # only one thread using Chromium can run at a time, # to prevent from errors (including strange Facebook errors) - if rimport.downloader in [RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS, RecurrentImport.DOWNLOADER.CHROMIUMHEADLESSPAUSE]: + if rimport.downloader in [ + RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS, + RecurrentImport.DOWNLOADER.CHROMIUMHEADLESSPAUSE, + ]: with memcache_chromium_lock(self.app.oid) as acquired: if acquired: - valid = run_recurrent_import_internal(rimport, downloader, self.request.id) + valid = run_recurrent_import_internal( + rimport, downloader, self.request.id + ) if not valid: self.restartDownloader() return pklist[1:] if is_list else True @@ -272,10 +282,14 @@ def run_recurrent_import(self, pklist): def run_recurrent_imports_from_list(pklist): - - tasks = chain(run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s() for i in range(len(pklist))) + + tasks = chain( + run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s() + for i in range(len(pklist)) + ) tasks.delay() + @app.task(bind=True) def daily_imports(self): from agenda_culturel.models import RecurrentImport @@ -288,29 +302,35 @@ def daily_imports(self): run_recurrent_imports_from_list([imp.pk for imp in imports]) -SCREENSHOT_FILE = settings.MEDIA_ROOT + '/screenshot.png' +SCREENSHOT_FILE = settings.MEDIA_ROOT + "/screenshot.png" + @app.task(bind=True) def screenshot(self): downloader = ChromiumHeadlessDownloader(noimage=False) downloader.screenshot("https://pommesdelune.fr", SCREENSHOT_FILE) + @worker_ready.connect def at_start(sender, **k): logger.info("Worker is ready") - # create screenshot + # create screenshot if not os.path.isfile(SCREENSHOT_FILE): logger.info("Init screenshot file") with sender.app.connection() as conn: - sender.app.send_task('agenda_culturel.celery.screenshot', None, connection=conn) + sender.app.send_task( + "agenda_culturel.celery.screenshot", None, connection=conn + ) else: logger.info("Screenshot file already exists") - # cancel running tasks + # cancel running tasks from agenda_culturel.models import BatchImportation - logger.info("Cancel running importation tasks") - running_tasks = BatchImportation.objects.filter(status=BatchImportation.STATUS.RUNNING).update(status=BatchImportation.STATUS.CANCELED) + logger.info("Cancel running importation tasks") + BatchImportation.objects.filter(status=BatchImportation.STATUS.RUNNING).update( + status=BatchImportation.STATUS.CANCELED + ) @app.task(bind=True) @@ -319,7 +339,9 @@ def run_all_recurrent_imports(self, only_fb=False): logger.info("Run all imports") if only_fb: - imports = RecurrentImport.objects.filter(processor=RecurrentImport.PROCESSOR.FBEVENTS).order_by("pk") + imports = RecurrentImport.objects.filter( + processor=RecurrentImport.PROCESSOR.FBEVENTS + ).order_by("pk") else: imports = RecurrentImport.objects.all().order_by("pk") @@ -334,7 +356,14 @@ def run_all_recurrent_imports_failed(self): imports = RecurrentImport.objects.all().order_by("pk") imports = [(imp.pk, imp.last_import()) for imp in imports] - run_recurrent_imports_from_list([imp[0] for imp in imports if (not imp[1] is None) and imp[1].status == BatchImportation.STATUS.FAILED]) + run_recurrent_imports_from_list( + [ + imp[0] + for imp in imports + if (imp[1] is not None) and imp[1].status == BatchImportation.STATUS.FAILED + ] + ) + @app.task(bind=True) def run_all_recurrent_imports_canceled(self): @@ -344,7 +373,14 @@ def run_all_recurrent_imports_canceled(self): imports = RecurrentImport.objects.all().order_by("pk") imports = [(imp.pk, imp.last_import()) for imp in imports] - run_recurrent_imports_from_list([imp[0] for imp in imports if (not imp[1] is None) and imp[1].status == BatchImportation.STATUS.CANCELED]) + run_recurrent_imports_from_list( + [ + imp[0] + for imp in imports + if (imp[1] is not None) + and imp[1].status == BatchImportation.STATUS.CANCELED + ] + ) @app.task(bind=True) @@ -358,11 +394,21 @@ def weekly_imports(self): run_recurrent_imports_from_list([imp.pk for imp in imports]) + @app.task(base=ChromiumTask, bind=True) -def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id=None, email=None, comments=None): +def import_events_from_url( + self, + urls, + cat=None, + tags=None, + force=False, + user_id=None, + email=None, + comments=None, +): from .db_importer import DBImporterEvents - from agenda_culturel.models import RecurrentImport, BatchImportation - from agenda_culturel.models import Event, Category + from agenda_culturel.models import BatchImportation + from agenda_culturel.models import Event if isinstance(urls, list): url = urls[0] @@ -374,9 +420,9 @@ def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id with memcache_chromium_lock(self.app.oid) as acquired: if acquired: - - logger.info("URL import: {}".format(self.request.id) + " force " + str(force)) - + logger.info( + "URL import: {}".format(self.request.id) + " force " + str(force) + ) # clean url url = Extractor.clean_url(url) @@ -390,7 +436,9 @@ def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id importer = DBImporterEvents(self.request.id) # create a batch importation - importation = BatchImportation(url_source=url, celery_id=self.request.id) + importation = BatchImportation( + url_source=url, celery_id=self.request.id + ) # save batch importation importation.save() @@ -409,40 +457,48 @@ def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id values["comments"] = comments # get event - events = u2e.process( - url, published=False, default_values=values - ) + events = u2e.process(url, published=False, default_values=values) if events: # convert it to json json_events = json.dumps(events, default=str) # import events (from json) - success, error_message = importer.import_events(json_events, user_id) + success, error_message = importer.import_events( + json_events, user_id + ) # finally, close task - close_import_task(self.request.id, success, error_message, importer) + close_import_task( + self.request.id, success, error_message, importer + ) else: - close_import_task(self.request.id, False, "Cannot find any event", importer) + close_import_task( + self.request.id, False, "Cannot find any event", importer + ) except Exception as e: logger.error(e) close_import_task(self.request.id, False, e, importer) return urls[1:] if is_list else True - + # if chromium is locked, we wait 30 seconds before retrying raise self.retry(countdown=30) @app.task(base=ChromiumTask, bind=True) -def import_events_from_urls(self, urls_cat_tags, user_id=None, email=None, comments=None): +def import_events_from_urls( + self, urls_cat_tags, user_id=None, email=None, comments=None +): for ucat in urls_cat_tags: if ucat is not None: url = ucat[0] cat = ucat[1] tags = ucat[2] - import_events_from_url.delay(url, cat, tags, user_id=user_id, email=email, comments=comments) + import_events_from_url.delay( + url, cat, tags, user_id=user_id, email=email, comments=comments + ) @app.task(base=ChromiumTask, bind=True) @@ -455,20 +511,35 @@ def update_orphan_pure_import_events(self): srcs = RecurrentImport.objects.all().values_list("source") today = date.today() # get all events in future with a source and not related to a recurrent import - urls = Event.objects.filter(Q(start_day__gte=today)).filter( - (Q(import_sources__isnull=False) & - (Q(modified_date__isnull=True) | - Q(modified_date__lte=F('imported_date')))) - & ~Q(import_sources__overlap=srcs)).values_list("import_sources", flat=True) + urls = ( + Event.objects.filter(Q(start_day__gte=today)) + .filter( + ( + Q(import_sources__isnull=False) + & ( + Q(modified_date__isnull=True) + | Q(modified_date__lte=F("imported_date")) + ) + ) + & ~Q(import_sources__overlap=srcs) + ) + .values_list("import_sources", flat=True) + ) # get urls urls = [url_l[0] for url_l in urls if len(url_l) > 0] # run tasks as a chain - tasks = chain(import_events_from_url.s(urls, force=True) if i == 0 else import_events_from_url.s(force=True) for i in range(len(urls))) + tasks = chain( + ( + import_events_from_url.s(urls, force=True) + if i == 0 + else import_events_from_url.s(force=True) + ) + for i in range(len(urls)) + ) tasks.delay() - app.conf.beat_schedule = { "daily_orphans_update": { "task": "agenda_culturel.celery.update_orphan_pure_import_events", diff --git a/src/agenda_culturel/db_importer.py b/src/agenda_culturel/db_importer.py index 6c69b55..405a25d 100644 --- a/src/agenda_culturel/db_importer.py +++ b/src/agenda_culturel/db_importer.py @@ -41,7 +41,7 @@ class DBImporterEvents: try: structure = json.loads(json_structure) - except: + except Exception: return (False, "JSON file is not correctly structured") if len(structure) == 0: @@ -73,7 +73,9 @@ class DBImporterEvents: if not self.load_event(event): return (False, self.error_message) else: - logger.warning("Event in the past, will not be imported: {}".format(event)) + logger.warning( + "Event in the past, will not be imported: {}".format(event) + ) # finally save the loaded events in database self.save_imported() @@ -98,7 +100,9 @@ class DBImporterEvents: def save_imported(self): self.db_event_objects, self.nb_updated, self.nb_removed = Event.import_events( - self.event_objects, remove_missing_from_source=self.url, user_id=self.user_id + self.event_objects, + remove_missing_from_source=self.url, + user_id=self.user_id, ) def is_valid_event_structure(self, event): @@ -116,7 +120,7 @@ class DBImporterEvents: def load_event(self, event): if self.is_valid_event_structure(event): - logger.warning( + logger.info( "Valid event: {} {}".format(event["last_modified"], event["title"]) ) event_obj = Event.from_structure(event, self.url) diff --git a/src/agenda_culturel/filters.py b/src/agenda_culturel/filters.py index 1418814..b8a940b 100644 --- a/src/agenda_culturel/filters.py +++ b/src/agenda_culturel/filters.py @@ -2,43 +2,13 @@ import django_filters from django.utils.translation import gettext_lazy as _ from django import forms from django.contrib.postgres.search import SearchQuery, SearchHeadline -from django.db.models import Count, Q, F +from django.db.models import Q, F from datetime import date, timedelta from urllib.parse import urlparse, parse_qs, urlencode from django.http import QueryDict from django.contrib.gis.measure import D -from django.forms import ( - ModelForm, - ValidationError, - TextInput, - Form, - URLField, - MultipleHiddenInput, - Textarea, - CharField, - ChoiceField, - RadioSelect, - MultipleChoiceField, - BooleanField, - HiddenInput, - ModelChoiceField, -) - -from .forms import ( - URLSubmissionForm, - EventForm, - BatchImportationForm, - FixDuplicates, - SelectEventInList, - MergeDuplicates, - RecurrentImportForm, - CategorisationRuleImportForm, - CategorisationForm, - EventAddPlaceForm, - PlaceForm, -) from .models import ( ReferenceLocation, @@ -47,7 +17,7 @@ from .models import ( Event, Category, Message, - DuplicatedEvents + DuplicatedEvents, ) @@ -63,8 +33,10 @@ class EventFilter(django_filters.FilterSet): label="À proximité de", method="no_filter", empty_label=_("Select a location"), - to_field_name='slug', - queryset=ReferenceLocation.objects.filter(main__gt=0).order_by("-main", "name__unaccent") + to_field_name="slug", + queryset=ReferenceLocation.objects.filter(main__gt=0).order_by( + "-main", "name__unaccent" + ), ) radius = django_filters.ChoiceFilter( @@ -72,7 +44,7 @@ class EventFilter(django_filters.FilterSet): method="no_filter", choices=[(x, str(x) + " km") for x in DISTANCE_CHOICES], null_label=None, - empty_label=None + empty_label=None, ) exclude_tags = django_filters.MultipleChoiceFilter( @@ -114,14 +86,20 @@ class EventFilter(django_filters.FilterSet): # urls were using pk, now we moved to slug if len(args) > 0 and "position" in args[0] and args[0]["position"].isdigit(): args[0]._mutable = True - el = ReferenceLocation.objects.filter(pk=int(args[0]["position"])).values("slug").first() + el = ( + ReferenceLocation.objects.filter(pk=int(args[0]["position"])) + .values("slug") + .first() + ) args[0]["position"] = None if el is None else el["slug"] args[0]._mutable = False super().__init__(*args, **kwargs) if not kwargs["request"].user.is_authenticated: self.form.fields.pop("status") - self.form.fields["exclude_tags"].choices = Tag.get_tag_groups(exclude=True, nb_suggestions=0) + self.form.fields["exclude_tags"].choices = Tag.get_tag_groups( + exclude=True, nb_suggestions=0 + ) self.form.fields["tags"].choices = Tag.get_tag_groups(include=True) def has_category_parameters(self): @@ -135,10 +113,10 @@ class EventFilter(django_filters.FilterSet): else: parsed_url = urlparse(url) params = parse_qs(parsed_url.query) - if len(params['category']) == 0: + if len(params["category"]) == 0: return url else: - cat_id = params['category'][0] + cat_id = params["category"][0] del params["category"] url = parsed_url._replace(query=urlencode(params, doseq=True)).geturl() if cat_id.isdigit(): @@ -150,7 +128,6 @@ class EventFilter(django_filters.FilterSet): else: return url - def filter_recurrences(self, queryset, name, value): # construct the full lookup expression lookup = "__".join([name, "isnull"]) @@ -162,7 +139,10 @@ class EventFilter(django_filters.FilterSet): @property def qs(self): parent = super().qs - if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None: + if ( + self.get_cleaned_data("position") is None + or self.get_cleaned_data("radius") is None + ): return parent d = self.get_cleaned_data("radius") p = self.get_cleaned_data("position") @@ -177,7 +157,9 @@ class EventFilter(django_filters.FilterSet): p = p.location - return parent.exclude(exact_location=False).filter(exact_location__location__distance_lt=(p, D(km=d))) + return parent.exclude(exact_location=False).filter( + exact_location__location__distance_lt=(p, D(km=d)) + ) def has_location(self): d = self.get_cleaned_data("radius") @@ -229,9 +211,13 @@ class EventFilter(django_filters.FilterSet): def get_radius(self): return self.get_cleaned_data("radius") - def to_str(self, prefix=''): + def to_str(self, prefix=""): self.form.full_clean() - result = ' '.join([t for t in self.get_tags()] + ["~" + t for t in self.get_exclude_tags()] + [str(self.get_position()), str(self.get_radius())]) + result = " ".join( + [t for t in self.get_tags()] + + ["~" + t for t in self.get_exclude_tags()] + + [str(self.get_position()), str(self.get_radius())] + ) if len(result) > 0: result = prefix + result return result @@ -259,20 +245,20 @@ class EventFilter(django_filters.FilterSet): if self.request.user.is_authenticated: if ( len(self.get_cleaned_data("status")) != 1 - or - self.get_cleaned_data("status")[0] != Event.STATUS.PUBLISHED + or self.get_cleaned_data("status")[0] != Event.STATUS.PUBLISHED ): return True else: - if ( - len(self.get_cleaned_data("status")) != 0 - ): + if len(self.get_cleaned_data("status")) != 0: return True return ( len(self.get_cleaned_data("tags")) != 0 or len(self.get_cleaned_data("exclude_tags")) != 0 or len(self.get_cleaned_data("recurrences")) != 0 - or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None)) + or ( + (self.get_cleaned_data("position") is not None) + and (self.get_cleaned_data("radius") is not None) + ) ) def is_active(self, only_categories=False): @@ -284,16 +270,21 @@ class EventFilter(django_filters.FilterSet): or len(self.get_cleaned_data("tags")) != 0 or len(self.get_cleaned_data("exclude_tags")) != 0 or len(self.get_cleaned_data("recurrences")) != 0 - or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None)) + or ( + (self.get_cleaned_data("position") is not None) + and (self.get_cleaned_data("radius") is not None) + ) ) def is_selected_tag(self, tag): - return "tags" in self.form.cleaned_data and tag in self.form.cleaned_data["tags"] + return ( + "tags" in self.form.cleaned_data and tag in self.form.cleaned_data["tags"] + ) def get_url_add_tag(self, tag): full_path = self.request.get_full_path() - result = full_path + ('&' if '?' in full_path else '?') + 'tags=' + str(tag) + result = full_path + ("&" if "?" in full_path else "?") + "tags=" + str(tag) return result @@ -302,25 +293,43 @@ class EventFilter(django_filters.FilterSet): def set_default_values(request): if request.user.is_authenticated: - if request.GET.get('status', None) is None: + if request.GET.get("status", None) is None: tempdict = request.GET.copy() - tempdict['status'] = 'published' + tempdict["status"] = "published" request.GET = tempdict return request return request def get_position_radius(self): - if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None: + if ( + self.get_cleaned_data("position") is None + or self.get_cleaned_data("radius") is None + ): return "" else: - return str(self.get_cleaned_data("position")) + ' (' + str(self.get_cleaned_data("radius")) + ' km)' + return ( + str(self.get_cleaned_data("position")) + + " (" + + str(self.get_cleaned_data("radius")) + + " km)" + ) def is_filtered_by_position_radius(self): - return not self.get_cleaned_data("position") is None and not self.get_cleaned_data("radius") is None + return ( + self.get_cleaned_data("position") is not None + and self.get_cleaned_data("radius") is not None + ) def get_url_add_suggested_position(self, location): result = self.request.get_full_path() - return result + ('&' if '?' in result else '?') + 'position=' + str(location.slug) + "&radius=" + str(location.suggested_distance) + return ( + result + + ("&" if "?" in result else "?") + + "position=" + + str(location.slug) + + "&radius=" + + str(location.suggested_distance) + ) class EventFilterAdmin(django_filters.FilterSet): @@ -330,21 +339,24 @@ class EventFilterAdmin(django_filters.FilterSet): representative = django_filters.MultipleChoiceFilter( label=_("Representative version"), - choices=[(True, _("Yes")), (False, _("No"))], + choices=[(True, _("Yes")), (False, _("No"))], method="filter_by_representative", - widget=forms.CheckboxSelectMultiple) + widget=forms.CheckboxSelectMultiple, + ) pure_import = django_filters.MultipleChoiceFilter( label=_("Pure import"), - choices=[(True, _("Yes")), (False, _("No"))], + choices=[(True, _("Yes")), (False, _("No"))], method="filter_by_pure_import", - widget=forms.CheckboxSelectMultiple) + widget=forms.CheckboxSelectMultiple, + ) in_recurrent_import = django_filters.MultipleChoiceFilter( label=_("In recurrent import"), - choices=[(True, _("Yes")), (False, _("No"))], + choices=[(True, _("Yes")), (False, _("No"))], method="filter_by_in_recurrent_import", - widget=forms.CheckboxSelectMultiple) + widget=forms.CheckboxSelectMultiple, + ) o = django_filters.ChoiceFilter( label=_("Sort by"), @@ -353,20 +365,26 @@ class EventFilterAdmin(django_filters.FilterSet): ("modified_date", _("last modified first")), ("imported_date", _("last imported first")), ("created_date", _("last created first")), - ], - method="sort_on_date") + ], + method="sort_on_date", + ) import_sources = django_filters.ModelChoiceFilter( label=_("Imported from"), method="filter_by_source", - queryset=RecurrentImport.objects.all().order_by("name__unaccent") + queryset=RecurrentImport.objects.all().order_by("name__unaccent"), ) def sort_on_date(self, queryset, name, value): print(name, value) - if value in ['created_date', 'imported_date', 'modified_date', 'moderated_date']: - notnull = value + '__isnull' - return queryset.filter(**{notnull: False}).order_by('-' + value) + if value in [ + "created_date", + "imported_date", + "modified_date", + "moderated_date", + ]: + notnull = value + "__isnull" + return queryset.filter(**{notnull: False}).order_by("-" + value) else: return queryset @@ -380,7 +398,7 @@ class EventFilterAdmin(django_filters.FilterSet): else: srcs = RecurrentImport.objects.all().values_list("source") q = Q(import_sources__overlap=srcs) - if value[0] == 'True': + if value[0] == "True": print(q) return queryset.filter(q) else: @@ -390,29 +408,29 @@ class EventFilterAdmin(django_filters.FilterSet): if value is None or len(value) != 1: return queryset else: - q = (Q(import_sources__isnull=False) & - (Q(modified_date__isnull=True) | - Q(modified_date__lte=F('imported_date')))) - if value[0] == 'True': + q = Q(import_sources__isnull=False) & ( + Q(modified_date__isnull=True) | Q(modified_date__lte=F("imported_date")) + ) + if value[0] == "True": print(q) return queryset.filter(q) else: return queryset.exclude(q) - def filter_by_representative(self, queryset, name, value): if value is None or len(value) != 1: return queryset else: - q = (Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)) - if value[0] == 'True': + q = ( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + if value[0] == "True": return queryset.filter(q) else: return queryset.exclude(q) - class Meta: model = Event fields = ["status"] @@ -441,13 +459,17 @@ class MessagesFilterAdmin(django_filters.FilterSet): fields = ["closed", "spam", "message_type"] def is_contact_messages(self): - return "message_type" in self.form.cleaned_data and "contact_form" in self.form.cleaned_data["message_type"] + return ( + "message_type" in self.form.cleaned_data + and "contact_form" in self.form.cleaned_data["message_type"] + ) class SimpleSearchEventFilter(django_filters.FilterSet): - q = django_filters.CharFilter(method="custom_filter", + q = django_filters.CharFilter( + method="custom_filter", label=_("Search"), - widget=forms.TextInput(attrs={"type": "search"}) + widget=forms.TextInput(attrs={"type": "search"}), ) status = django_filters.MultipleChoiceFilter( @@ -458,23 +480,27 @@ class SimpleSearchEventFilter(django_filters.FilterSet): ) past = django_filters.ChoiceFilter( - label=_("In the past"), - choices=[(False, _("No")), (True, _("Yes"))], - null_label=None, - empty_label=None, - method="in_past", - widget=forms.Select) + label=_("In the past"), + choices=[(False, _("No")), (True, _("Yes"))], + null_label=None, + empty_label=None, + method="in_past", + widget=forms.Select, + ) def in_past(self, queryset, name, value): if value and value == "True": now = date.today() - qs = queryset.filter(start_day__lt=now).order_by("-start_day", "-start_time") + qs = queryset.filter(start_day__lt=now).order_by( + "-start_day", "-start_time" + ) else: start = date.today() + timedelta(days=-2) - qs = queryset.filter(start_day__gte=start).order_by("start_day", "start_time") + qs = queryset.filter(start_day__gte=start).order_by( + "start_day", "start_time" + ) return qs - def custom_filter(self, queryset, name, value): search_query = SearchQuery(value, config="french") qs = queryset.filter( @@ -508,7 +534,6 @@ class SimpleSearchEventFilter(django_filters.FilterSet): self.form.fields.pop("status") - class SearchEventFilter(django_filters.FilterSet): tags = django_filters.CharFilter(lookup_expr="icontains") title = django_filters.CharFilter(method="hl_filter_contains") @@ -564,8 +589,8 @@ class SearchEventFilter(django_filters.FilterSet): class DuplicatedEventsFilter(django_filters.FilterSet): fixed = django_filters.BooleanFilter( - label="Résolu", - field_name='representative', method="fixed_qs") + label="Résolu", field_name="representative", method="fixed_qs" + ) class Meta: model = DuplicatedEvents @@ -587,11 +612,9 @@ class DuplicatedEventsFilter(django_filters.FilterSet): if self.form.cleaned_data["fixed"]: return "fixed=true" else: - return "fixed=false" + return "fixed=false" else: return "" - - def fixed_qs(self, queryset, name, value): return DuplicatedEvents.not_fixed_qs(queryset, value) @@ -602,10 +625,9 @@ class RecurrentImportFilter(django_filters.FilterSet): name = django_filters.ModelMultipleChoiceFilter( label="Filtrer par nom", field_name="name", - queryset=RecurrentImport.objects.all().order_by("name__unaccent") + queryset=RecurrentImport.objects.all().order_by("name__unaccent"), ) class Meta: model = RecurrentImport fields = ["name"] - diff --git a/src/agenda_culturel/forms.py b/src/agenda_culturel/forms.py index b152d91..c7acd99 100644 --- a/src/agenda_culturel/forms.py +++ b/src/agenda_culturel/forms.py @@ -13,13 +13,12 @@ from django.forms import ( BooleanField, HiddenInput, ModelChoiceField, - EmailField + EmailField, ) from django.forms import formset_factory from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget -from .utils import PlaceGuesser from .models import ( Event, RecurrentImport, @@ -27,7 +26,7 @@ from .models import ( Place, Category, Tag, - Message + Message, ) from django.conf import settings from django.core.files import File @@ -44,13 +43,16 @@ import logging logger = logging.getLogger(__name__) + class GroupFormMixin: - template_name = 'agenda_culturel/forms/div_group.html' + template_name = "agenda_culturel/forms/div_group.html" class FieldGroup: - def __init__(self, id, label, display_label=False, maskable=False, default_masked=True): + def __init__( + self, id, label, display_label=False, maskable=False, default_masked=True + ): self.id = id self.label = label self.display_label = display_label @@ -58,7 +60,7 @@ class GroupFormMixin: self.default_masked = default_masked def toggle_field_name(self): - return 'group_' + self.id + return "group_" + self.id def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -67,17 +69,32 @@ class GroupFormMixin: def add_group(self, *args, **kwargs): self.groups.append(GroupFormMixin.FieldGroup(*args, **kwargs)) if self.groups[-1].maskable: - self.fields[self.groups[-1].toggle_field_name()] = BooleanField(required=False) + self.fields[self.groups[-1].toggle_field_name()] = BooleanField( + required=False + ) self.fields[self.groups[-1].toggle_field_name()].toggle_group = True def get_fields_in_group(self, g): - return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and hasattr(f.field, "group_id") and f.field.group_id == g.id] + return [ + f + for f in self.visible_fields() + if not hasattr(f.field, "toggle_group") + and hasattr(f.field, "group_id") + and f.field.group_id == g.id + ] def get_no_group_fields(self): - return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and (not hasattr(f.field, "group_id") or f.field.group_id == None)] + return [ + f + for f in self.visible_fields() + if not hasattr(f.field, "toggle_group") + and (not hasattr(f.field, "group_id") or f.field.group_id is None) + ] def fields_by_group(self): - return [(g, self.get_fields_in_group(g)) for g in self.groups] + [(GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())] + return [(g, self.get_fields_in_group(g)) for g in self.groups] + [ + (GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields()) + ] def clean(self): result = super().clean() @@ -86,19 +103,26 @@ class GroupFormMixin: data = dict(self.data) # for each masked group, we remove data for g in self.groups: - if g.maskable and not g.toggle_field_name() in data: + if g.maskable and g.toggle_field_name() not in data: fields = self.get_fields_in_group(g) for f in fields: self.cleaned_data[f.name] = None - + return result + class TagForm(ModelForm): - required_css_class = 'required' + required_css_class = "required" class Meta: model = Tag - fields = ["name", "description", "in_included_suggestions", "in_excluded_suggestions", "principal"] + fields = [ + "name", + "description", + "in_included_suggestions", + "in_excluded_suggestions", + "principal", + ] def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -107,45 +131,45 @@ class TagForm(ModelForm): class TagRenameForm(Form): - required_css_class = 'required' + required_css_class = "required" - name = CharField( - label=_('Name of new tag'), - required=True - ) + name = CharField(label=_("Name of new tag"), required=True) force = BooleanField( - label=_('Force renaming despite the existence of events already using the chosen tag.'), + label=_( + "Force renaming despite the existence of events already using the chosen tag." + ), ) def __init__(self, *args, **kwargs): force = kwargs.pop("force", False) name = kwargs.pop("name", None) super().__init__(*args, **kwargs) - if not (force or (not len(args) == 0 and 'force' in args[0])): + if not (force or (not len(args) == 0 and "force" in args[0])): del self.fields["force"] - if not name is None and self.fields["name"].initial is None: + if name is not None and self.fields["name"].initial is None: self.fields["name"].initial = name - def is_force(self): - return "force" in self.fields and self.cleaned_data["force"] == True + return "force" in self.fields and self.cleaned_data["force"] is True class SimpleContactForm(GroupFormMixin, Form): - email = EmailField( + email = EmailField( label=_("Your email"), help_text=_("Your email address"), max_length=254, - required=False + required=False, ) comments = CharField( label=_("Comments"), - help_text=_("Your message for the moderation team (comments, clarifications, requests...)"), + help_text=_( + "Your message for the moderation team (comments, clarifications, requests...)" + ), widget=Textarea, max_length=2048, - required=False + required=False, ) def __init__(self, *args, **kwargs): @@ -153,21 +177,23 @@ class SimpleContactForm(GroupFormMixin, Form): super().__init__(*args, **kwargs) if not is_authenticated: - self.add_group('communication', - _('Receive notification of publication or leave a message for moderation'), - maskable=True, - default_masked=True) - self.fields["email"].group_id = 'communication' - self.fields["comments"].group_id = 'communication' + self.add_group( + "communication", + _( + "Receive notification of publication or leave a message for moderation" + ), + maskable=True, + default_masked=True, + ) + self.fields["email"].group_id = "communication" + self.fields["comments"].group_id = "communication" else: del self.fields["email"] del self.fields["comments"] - - class URLSubmissionForm(GroupFormMixin, Form): - required_css_class = 'required' + required_css_class = "required" url = URLField(max_length=512) category = ModelChoiceField( @@ -177,28 +203,27 @@ class URLSubmissionForm(GroupFormMixin, Form): required=False, ) tags = MultipleChoiceField( - label=_("Tags"), - initial=None, - choices=[], - required=False + label=_("Tags"), initial=None, choices=[], required=False ) def __init__(self, *args, **kwargs): - is_authenticated = kwargs.pop("is_authenticated", False) + kwargs.pop("is_authenticated", False) super().__init__(*args, **kwargs) self.fields["tags"].choices = Tag.get_tag_groups(all=True) - self.add_group('event', _('Event')) - self.fields["url"].group_id = 'event' - self.fields["category"].group_id = 'event' - self.fields["tags"].group_id = 'event' + self.add_group("event", _("Event")) + self.fields["url"].group_id = "event" + self.fields["category"].group_id = "event" + self.fields["tags"].group_id = "event" class URLSubmissionFormWithContact(SimpleContactForm, URLSubmissionForm): pass + URLSubmissionFormSet = formset_factory(URLSubmissionForm, extra=9, min_num=1) + class DynamicArrayWidgetURLs(DynamicArrayWidget): template_name = "agenda_culturel/widgets/widget-urls.html" @@ -208,13 +233,10 @@ class DynamicArrayWidgetTags(DynamicArrayWidget): class RecurrentImportForm(ModelForm): - required_css_class = 'required' + required_css_class = "required" defaultTags = MultipleChoiceField( - label=_("Tags"), - initial=None, - choices=[], - required=False + label=_("Tags"), initial=None, choices=[], required=False ) class Meta: @@ -227,7 +249,7 @@ class RecurrentImportForm(ModelForm): class CategorisationRuleImportForm(ModelForm): - required_css_class = 'required' + required_css_class = "required" class Meta: model = CategorisationRule @@ -235,20 +257,16 @@ class CategorisationRuleImportForm(ModelForm): class EventForm(GroupFormMixin, ModelForm): - required_css_class = 'required' + required_css_class = "required" old_local_image = CharField(widget=HiddenInput(), required=False) simple_cloning = CharField(widget=HiddenInput(), required=False) cloning = CharField(widget=HiddenInput(), required=False) tags = MultipleChoiceField( - label=_("Tags"), - initial=None, - choices=[], - required=False + label=_("Tags"), initial=None, choices=[], required=False ) - class Meta: model = Event exclude = [ @@ -260,7 +278,7 @@ class EventForm(GroupFormMixin, ModelForm): "moderated_by_user", "modified_by_user", "created_by_user", - "imported_by_user" + "imported_by_user", ] widgets = { "start_day": TextInput( @@ -292,62 +310,68 @@ class EventForm(GroupFormMixin, ModelForm): if not is_authenticated: del self.fields["status"] del self.fields["organisers"] - self.fields['category'].queryset = self.fields['category'].queryset.order_by('name') - self.fields['category'].empty_label = None - self.fields['category'].initial = Category.get_default_category() - self.fields['tags'].choices = Tag.get_tag_groups(all=True) + self.fields["category"].queryset = self.fields["category"].queryset.order_by( + "name" + ) + self.fields["category"].empty_label = None + self.fields["category"].initial = Category.get_default_category() + self.fields["tags"].choices = Tag.get_tag_groups(all=True) # set groups - self.add_group('main', _('Main fields')) - self.fields['title'].group_id = 'main' + self.add_group("main", _("Main fields")) + self.fields["title"].group_id = "main" - self.add_group('start', _('Start of event')) - self.fields['start_day'].group_id = 'start' - self.fields['start_time'].group_id = 'start' + self.add_group("start", _("Start of event")) + self.fields["start_day"].group_id = "start" + self.fields["start_time"].group_id = "start" - self.add_group('end', _('End of event')) - self.fields['end_day'].group_id = 'end' - self.fields['end_time'].group_id = 'end' + self.add_group("end", _("End of event")) + self.fields["end_day"].group_id = "end" + self.fields["end_time"].group_id = "end" - self.add_group('recurrences', - _('This is a recurring event'), - maskable=True, - default_masked=not (self.instance and - self.instance.recurrences and - self.instance.recurrences.rrules and - len(self.instance.recurrences.rrules) > 0)) + self.add_group( + "recurrences", + _("This is a recurring event"), + maskable=True, + default_masked=not ( + self.instance + and self.instance.recurrences + and self.instance.recurrences.rrules + and len(self.instance.recurrences.rrules) > 0 + ), + ) - self.fields['recurrences'].group_id = 'recurrences' + self.fields["recurrences"].group_id = "recurrences" - self.add_group('details', _('Details')) - self.fields['description'].group_id = 'details' + self.add_group("details", _("Details")) + self.fields["description"].group_id = "details" if is_authenticated: - self.fields['organisers'].group_id = 'details' + self.fields["organisers"].group_id = "details" - self.add_group('location', _('Location')) - self.fields['location'].group_id = 'location' - self.fields['exact_location'].group_id = 'location' + self.add_group("location", _("Location")) + self.fields["location"].group_id = "location" + self.fields["exact_location"].group_id = "location" - self.add_group('illustration', _('Illustration')) - self.fields['local_image'].group_id = 'illustration' - self.fields['image_alt'].group_id = 'illustration' + self.add_group("illustration", _("Illustration")) + self.fields["local_image"].group_id = "illustration" + self.fields["image_alt"].group_id = "illustration" - self.add_group('urls', _('URLs')) - self.fields["reference_urls"].group_id = 'urls' + self.add_group("urls", _("URLs")) + self.fields["reference_urls"].group_id = "urls" if is_authenticated: - self.add_group('meta-admin', _('Meta information')) - self.fields['category'].group_id = 'meta-admin' - self.fields['tags'].group_id = 'meta-admin' - self.fields['status'].group_id = 'meta-admin' + self.add_group("meta-admin", _("Meta information")) + self.fields["category"].group_id = "meta-admin" + self.fields["tags"].group_id = "meta-admin" + self.fields["status"].group_id = "meta-admin" else: - self.add_group('meta', _('Meta information')) - self.fields['category'].group_id = 'meta' - self.fields['tags'].group_id = 'meta' + self.add_group("meta", _("Meta information")) + self.fields["category"].group_id = "meta" + self.fields["tags"].group_id = "meta" def is_clone_from_url(self): return self.cloning - + def is_simple_clone_from_url(self): return self.simple_cloning @@ -381,58 +405,61 @@ class EventForm(GroupFormMixin, ModelForm): super().clean() # when cloning an existing event, we need to copy the local image - if ((not 'local_image' in self.cleaned_data) or (self.cleaned_data['local_image'] is None)) and \ - not self.cleaned_data['old_local_image'] is None and \ - self.cleaned_data['old_local_image'] != "": - basename = self.cleaned_data['old_local_image'] + if ( + ( + ("local_image" not in self.cleaned_data) + or (self.cleaned_data["local_image"] is None) + ) + and self.cleaned_data["old_local_image"] is not None + and self.cleaned_data["old_local_image"] != "" + ): + basename = self.cleaned_data["old_local_image"] old = settings.MEDIA_ROOT + "/" + basename if os.path.isfile(old): - self.cleaned_data['local_image'] = File(name=basename, file=open(old, "rb")) + self.cleaned_data["local_image"] = File( + name=basename, file=open(old, "rb") + ) class EventFormWithContact(SimpleContactForm, EventForm): pass + class MultipleChoiceFieldAcceptAll(MultipleChoiceField): def validate(self, value): pass class EventModerateForm(ModelForm): - required_css_class = 'required' + required_css_class = "required" tags = MultipleChoiceField( - label=_("Tags"), - help_text=_('Select tags from existing ones.'), - required=False + label=_("Tags"), help_text=_("Select tags from existing ones."), required=False ) new_tags = MultipleChoiceFieldAcceptAll( label=_("New tags"), - help_text=_('Create new labels (sparingly). Note: by starting your tag with the characters “TW:”, you''ll create a “trigger warning” tag, and the associated events will be announced as such.'), + help_text=_( + "Create new labels (sparingly). Note: by starting your tag with the characters “TW:”, you" + "ll create a “trigger warning” tag, and the associated events will be announced as such." + ), widget=DynamicArrayWidget(), - required=False + required=False, ) class Meta: model = Event - fields = [ - "status", - "category", - "organisers", - "exact_location", - "tags" - ] - widgets = { - "status": RadioSelect - } + fields = ["status", "category", "organisers", "exact_location", "tags"] + widgets = {"status": RadioSelect} def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.fields['category'].queryset = self.fields['category'].queryset.order_by('name') - self.fields['category'].empty_label = None - self.fields['category'].initial = Category.get_default_category() - self.fields['tags'].choices = Tag.get_tag_groups(all=True) + self.fields["category"].queryset = self.fields["category"].queryset.order_by( + "name" + ) + self.fields["category"].empty_label = None + self.fields["category"].initial = Category.get_default_category() + self.fields["tags"].choices = Tag.get_tag_groups(all=True) def clean_new_tags(self): return list(set(self.cleaned_data.get("new_tags"))) @@ -440,17 +467,17 @@ class EventModerateForm(ModelForm): def clean(self): super().clean() - if self.cleaned_data['tags'] is None: - self.cleaned_data['tags'] = [] + if self.cleaned_data["tags"] is None: + self.cleaned_data["tags"] = [] - if not self.cleaned_data.get('new_tags') is None: - self.cleaned_data['tags'] += self.cleaned_data.get('new_tags') - - self.cleaned_data['tags'] = list(set(self.cleaned_data['tags'])) + if self.cleaned_data.get("new_tags") is not None: + self.cleaned_data["tags"] += self.cleaned_data.get("new_tags") + + self.cleaned_data["tags"] = list(set(self.cleaned_data["tags"])) class BatchImportationForm(Form): - required_css_class = 'required' + required_css_class = "required" json = CharField( label="JSON", @@ -461,14 +488,14 @@ class BatchImportationForm(Form): class FixDuplicates(Form): - required_css_class = 'required' + required_css_class = "required" action = ChoiceField() def __init__(self, *args, **kwargs): edup = kwargs.pop("edup", None) events = edup.get_duplicated() - nb_events = len(events) + len(events) super().__init__(*args, **kwargs) choices = [] @@ -485,7 +512,7 @@ class FixDuplicates(Form): choices += [ ( "Select-" + str(e.pk), - _("Select {} as representative version.").format(auc[i] + msg) + _("Select {} as representative version.").format(auc[i] + msg), ) ] @@ -494,11 +521,12 @@ class FixDuplicates(Form): choices += [ ( "Update-" + str(e.pk), - _("Update {} using some fields from other versions (interactive mode).").format(auc[i]) + _( + "Update {} using some fields from other versions (interactive mode)." + ).format(auc[i]), ) ] - extra = "" if edup.has_local_version(): extra = _(" Warning: a version is already locally modified.") @@ -511,9 +539,7 @@ class FixDuplicates(Form): for i, e in enumerate(events): if e.status != Event.STATUS.TRASH: choices += [ - ( - "Remove-" + str(e.pk), - _("Make {} independent.").format(auc[i])) + ("Remove-" + str(e.pk), _("Make {} independent.").format(auc[i])) ] choices += [("NotDuplicates", _("Make all versions independent."))] @@ -533,7 +559,11 @@ class FixDuplicates(Form): return self.cleaned_data["action"].startswith("Remove") def get_selected_event_code(self): - if self.is_action_select() or self.is_action_remove() or self.is_action_update(): + if ( + self.is_action_select() + or self.is_action_remove() + or self.is_action_update() + ): return int(self.cleaned_data["action"].split("-")[-1]) else: return None @@ -547,21 +577,27 @@ class FixDuplicates(Form): class SelectEventInList(Form): - required_css_class = 'required' + required_css_class = "required" - event = ChoiceField(label=_('Event')) + event = ChoiceField(label=_("Event")) def __init__(self, *args, **kwargs): events = kwargs.pop("events", None) super().__init__(*args, **kwargs) self.fields["event"].choices = [ - (e.pk, (e.start_time.strftime('%H:%M') + " : " if e.start_time else "") + e.title + ((", " + e.location) if e.location else "")) for e in events + ( + e.pk, + (e.start_time.strftime("%H:%M") + " : " if e.start_time else "") + + e.title + + ((", " + e.location) if e.location else ""), + ) + for e in events ] class MergeDuplicates(Form): - required_css_class = 'required' + required_css_class = "required" checkboxes_fields = ["reference_urls", "description", "tags"] @@ -569,20 +605,23 @@ class MergeDuplicates(Form): self.duplicates = kwargs.pop("duplicates", None) self.event = kwargs.pop("event", None) self.events = list(self.duplicates.get_duplicated()) - nb_events = len(self.events) + len(self.events) super().__init__(*args, **kwargs) - if self.event: choices = [ - ("event_" + str(e.pk), _("Value of version {}").format(e.pk)) if e != self.event else - ("event_" + str(e.pk), _("Value of the selected version")) + ( + ("event_" + str(e.pk), _("Value of version {}").format(e.pk)) + if e != self.event + else ("event_" + str(e.pk), _("Value of the selected version")) + ) for e in self.events ] initial = "event_" + str(self.event.pk) else: choices = [ - ("event_" + str(e.pk), _("Value of version {}").format(e.pk)) for e in self.events + ("event_" + str(e.pk), _("Value of version {}").format(e.pk)) + for e in self.events ] initial = choices[0][0] for f in self.duplicates.get_items_comparison(): @@ -605,29 +644,41 @@ class MergeDuplicates(Form): result += ( '
  • ' + e.title + "
  • " ) - for step in e.chronology_dates(): + for step in e.chronology_dates(): if step["data"] == "created_date": - result += '
  • Création le ' + localize(step["timestamp"]) + ' par ' + str(step["user"]) + '
  • ' + result += ( + "
  • Création le " + + localize(step["timestamp"]) + + " par " + + str(step["user"]) + + "
  • " + ) if step["data"] == "modified_date": - result += '
  • Dernière modification le ' + localize(step["timestamp"]) - if e.modified_by_user: - result += ' par ' + e.modified_by_user.username + result += "
  • Dernière modification le " + localize( + step["timestamp"] + ) + if e.modified_by_user: + result += " par " + e.modified_by_user.username else: - result += ' par import récurrent' - result += '
  • ' - + result += " par import récurrent" + result += "" + if step["data"] == "moderated_date": - result += '
  • Dernière modération le ' + localize(step["timestamp"]) + result += "
  • Dernière modération le " + localize( + step["timestamp"] + ) if e.moderated_by_user: - result += ' par ' + e.moderated_by_user.username - result += '
  • ' + result += " par " + e.moderated_by_user.username + result += "" if step["data"] == "imported_date": - result += '
  • Dernière importation le ' + localize(step["timestamp"]) + result += "
  • Dernière importation le " + localize( + step["timestamp"] + ) if e.imported_by_user: - result += ' par ' + e.imported_by_user.username + result += " par " + e.imported_by_user.username else: - result += ' par import récurrent' - result += '
  • ' + result += " par import récurrent" + result += "" result += "" result += "" @@ -655,7 +706,9 @@ class MergeDuplicates(Form): else: checked = self.fields[key].initial - for i, (v, radio, ev) in enumerate(zip(e["values"], self.fields[e["key"]].choices, self.events)): + for i, (v, radio, ev) in enumerate( + zip(e["values"], self.fields[e["key"]].choices, self.events) + ): result += self.comparison_item(key, i, v, radio, ev, checked) result += "" @@ -677,20 +730,16 @@ class MergeDuplicates(Form): result += " checked" result += ' value="' + value + '"' result += ">" - result += ( - '
    ' - + int_to_abc(i) - + "
    ") + result += '
    ' + int_to_abc(i) + "
    " result += "
    " if key == "image": result += str(field_to_html(ev.local_image, "local_image")) + "
    " result += "
    Lien d'import : " - result += (str(field_to_html(v, key)) + "
    ") + result += str(field_to_html(v, key)) + "" result += "" return result - def get_selected_events(self, key): value = self.cleaned_data.get(key) if key not in self.fields: @@ -704,7 +753,7 @@ class MergeDuplicates(Form): if e.pk == s: result.append(e) break - return result + return result else: selected = int(value.split("_")[-1]) for e in self.duplicates.get_duplicated(): @@ -715,7 +764,7 @@ class MergeDuplicates(Form): class CategorisationForm(Form): - required_css_class = 'required' + required_css_class = "required" def __init__(self, *args, **kwargs): if "events" in kwargs: @@ -747,7 +796,7 @@ class CategorisationForm(Form): class EventAddPlaceForm(Form): - required_css_class = 'required' + required_css_class = "required" place = ModelChoiceField( label=_("Place"), @@ -786,7 +835,7 @@ class EventAddPlaceForm(Form): class PlaceForm(GroupFormMixin, ModelForm): - required_css_class = 'required' + required_css_class = "required" apply_to_all = BooleanField( initial=True, @@ -804,26 +853,26 @@ class PlaceForm(GroupFormMixin, ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.add_group('header', _('Header')) - self.fields['name'].group_id = 'header' - - - self.add_group('address', _('Address')) - self.fields['address'].group_id = 'address' - self.fields['postcode'].group_id = 'address' - self.fields['city'].group_id = 'address' - self.fields['location'].group_id = 'address' + self.add_group("header", _("Header")) + self.fields["name"].group_id = "header" - self.add_group('meta', _('Meta')) - self.fields['aliases'].group_id = 'meta' + self.add_group("address", _("Address")) + self.fields["address"].group_id = "address" + self.fields["postcode"].group_id = "address" + self.fields["city"].group_id = "address" + self.fields["location"].group_id = "address" - self.add_group('information', _('Information')) - self.fields['description'].group_id = 'information' + self.add_group("meta", _("Meta")) + self.fields["aliases"].group_id = "meta" + + self.add_group("information", _("Information")) + self.fields["description"].group_id = "information" def as_grid(self): - result = ('
    ' + result = ( + '
    ' + super().as_p() - + '''
    + + """

    Cliquez pour ajuster la position GPS

    Verrouiller la position @@ -836,29 +885,32 @@ class PlaceForm(GroupFormMixin, ModelForm): field.removeAttribute("readonly"); } -
    ''') - +
    """ + ) + return mark_safe(result) def apply(self): return self.cleaned_data.get("apply_to_all") + class MessageForm(ModelForm): class Meta: model = Message fields = ["subject", "name", "email", "message", "related_event"] - widgets = {"related_event": HiddenInput(), "user": HiddenInput() } + widgets = {"related_event": HiddenInput(), "user": HiddenInput()} def __init__(self, *args, **kwargs): self.event = kwargs.pop("event", False) self.internal = kwargs.pop("internal", False) super().__init__(*args, **kwargs) - self.fields['related_event'].required = False + self.fields["related_event"].required = False if self.internal: self.fields.pop("name") self.fields.pop("email") + class MessageEventForm(ModelForm): class Meta: @@ -867,4 +919,4 @@ class MessageEventForm(ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.fields["message"].label = _("Add a comment") \ No newline at end of file + self.fields["message"].label = _("Add a comment") diff --git a/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py b/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py index e5c26e4..8b6014c 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py @@ -1,9 +1,10 @@ -from ..extractor import * +from ..extractor import Extractor import json from bs4 import BeautifulSoup -from urllib.parse import urlparse, unquote +from urllib.parse import urlparse import pytz import html +from datetime import datetime # A class dedicated to get events from les amis du temps des cerises @@ -13,8 +14,7 @@ class CExtractor(Extractor): def __init__(self): super().__init__() self.data = b'------toto\r\nContent-Disposition: form-data; name="p"\r\n\r\nfutur\r\n------toto--\r\n' - self.content_type = 'multipart/form-data; boundary=----toto' - + self.content_type = "multipart/form-data; boundary=----toto" def extract( self, content, url, url_human=None, default_values=None, published=False @@ -30,28 +30,28 @@ class CExtractor(Extractor): events = json.loads(content) for e in events: tags = [] - start_day = e["ev_date"].split(' ')[0] + start_day = e["ev_date"].split(" ")[0] start_time = e["ev_time"] title = html.unescape(e["ev_titre"]) - if "ev_sstitre" in e and e["ev_sstitre"] != '': - title = title + ' - ' + html.unescape(e["ev_sstitre"]) + if "ev_sstitre" in e and e["ev_sstitre"] != "": + title = title + " - " + html.unescape(e["ev_sstitre"]) soup = BeautifulSoup(e["ev_info"], "html.parser") description = soup.text location = html.unescape(e["li_nom"]) if "li_nom" in e else None - if "ev_canceled" in e and e["ev_canceled"] != '0': + if "ev_canceled" in e and e["ev_canceled"] != "0": tags += ["annulé"] image = None - if "ev_img" in e and e["ev_img"] != '': + if "ev_img" in e and e["ev_img"] != "": image = images_basename + e["ev_img"] - + naive_dt = datetime.strptime(e["ev_date"], "%Y-%m-%d %H:%M:%S") from_dt = from_timezone.localize(naive_dt) dt = to_timezone.normalize(from_dt) ts = int(datetime.timestamp(dt)) * 1000 - + event_url = root_url + "#" + str(ts) self.add_event( @@ -67,6 +67,7 @@ class CExtractor(Extractor): url_human=event_url, start_time=start_time, published=published, - image=image ) - + image=image, + ) + return self.get_structure() diff --git a/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py b/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py index cf17be7..695b14b 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py @@ -1,5 +1,9 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractorNoPause +from ..extractor import Extractor from bs4 import BeautifulSoup +import re +from datetime import datetime, timedelta, date + # A class dedicated to get events from Arachnée Concert # URL: https://www.arachnee-concerts.com/agenda-des-concerts/ @@ -18,27 +22,31 @@ class CExtractor(TwoStepsExtractorNoPause): default_values=None, published=False, only_future=True, - ignore_404=True + ignore_404=True, ): match = re.match(r".*\&theatres=([^&]*)&.*", url) if match: self.theater = match[1] - return super().extract(content, url, url_human, default_values, published, only_future, ignore_404) + return super().extract( + content, url, url_human, default_values, published, only_future, ignore_404 + ) def build_event_url_list(self, content, infuture_days=180): - + soup = BeautifulSoup(content, "html.parser") containers = soup.select("ul.event_container>li") if containers: for c in containers: d = Extractor.parse_french_date(c.select_one(".date").text) - l = c.select_one(".event_auditory").text - if (self.theater is None or (l.startswith(self.theater))) and d < datetime.date.today() + timedelta(days=infuture_days): + la = c.select_one(".event_auditory").text + if ( + self.theater is None or (la.startswith(self.theater)) + ) and d < date.today() + timedelta(days=infuture_days): t = Extractor.parse_french_time(c.select_one(".time").text) e_url = c.select_one(".info a")["href"] - if not e_url in self.possible_dates: + if e_url not in self.possible_dates: self.possible_dates[e_url] = [] self.possible_dates[e_url].append((str(d) + " " + str(t))) self.add_event_url(e_url) @@ -53,10 +61,18 @@ class CExtractor(TwoStepsExtractorNoPause): ): soup = BeautifulSoup(event_content, "html.parser") - title = ", ".join([x.text for x in [soup.select_one(y) for y in [".page_title", ".artiste-subtitle"]] if x]) - + title = ", ".join( + [ + x.text + for x in [ + soup.select_one(y) for y in [".page_title", ".artiste-subtitle"] + ] + if x + ] + ) + image = soup.select_one(".entry-image .image_wrapper img") - if not image is None: + if image is not None: image = image["src"] descs = soup.select(".entry-content p") @@ -74,10 +90,22 @@ class CExtractor(TwoStepsExtractorNoPause): elif first_cat in ["theatre", "humour / one man show"]: category = "Spectacles" tags.append("🎭 théâtre") - elif first_cat in ["chanson francaise", "musique du monde", "pop / rock", "rap", "rnb", "raggae", "variete"]: + elif first_cat in [ + "chanson francaise", + "musique du monde", + "pop / rock", + "rap", + "rnb", + "raggae", + "variete", + ]: category = "Fêtes & Concerts" tags.append("🎵 concert") - elif first_cat in ["comedie musicale", "humour / one man show", "spectacle equestre"]: + elif first_cat in [ + "comedie musicale", + "humour / one man show", + "spectacle equestre", + ]: category = "Spectacles" elif first_cat in ["spectacle pour enfant"]: tags = ["🎈 jeune public"] @@ -87,12 +115,12 @@ class CExtractor(TwoStepsExtractorNoPause): dates = soup.select("#event_ticket_content>ul>li") for d in dates: - dt = datetime.datetime.fromisoformat(d.select_one(".date")["content"]) + dt = datetime.fromisoformat(d.select_one(".date")["content"]) date = dt.date() time = dt.time() if str(date) + " " + str(time) in self.possible_dates[event_url]: location = d.select_one(".event_auditory").text - + self.add_event_with_props( default_values, event_url, diff --git a/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py b/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py index 24e78fb..452a1c3 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py @@ -1,6 +1,10 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractor +from ..extractor import Extractor from bs4 import BeautifulSoup -from datetime import timedelta +import re +from datetime import datetime, timedelta +from urllib.parse import urlparse + # A class dedicated to get events from La Cour des 3 Coquins and Graines de spectacle # URL: https://billetterie-c3c.clermont-ferrand.fr// @@ -14,15 +18,26 @@ class CExtractor(TwoStepsExtractor): default_values=None, published=False, only_future=True, - ignore_404=True): + ignore_404=True, + ): self.root_address = "https://" + urlparse(url).netloc + "/" - return super().extract(content, url, url_human, default_values, published, only_future, ignore_404) + return super().extract( + content, url, url_human, default_values, published, only_future, ignore_404 + ) def category_agenda(self, category): if not category: return None - mapping = {"Théâtre": "Spectacles", "Concert": "Fêtes & Concerts", "Projection": "Cinéma"} - mapping_tag = {"Théâtre": "🎭 théâtre", "Concert": "🎵 concert", "Projection": None} + mapping = { + "Théâtre": "Spectacles", + "Concert": "Fêtes & Concerts", + "Projection": "Cinéma", + } + mapping_tag = { + "Théâtre": "🎭 théâtre", + "Concert": "🎵 concert", + "Projection": None, + } if category in mapping: return mapping[category], mapping_tag[category] else: @@ -58,14 +73,14 @@ class CExtractor(TwoStepsExtractor): image = image["src"] else: image = None - + description = soup.select_one(".presentation").get_text() duration = soup.select_one("#criteres .DUREE-V .valeur-critere li") - if not duration is None: + if duration is not None: duration = Extractor.parse_french_time(duration.text) location = soup.select_one("#criteres .LIEU-V .valeur-critere li") - if not location is None: + if location is not None: location = location.text categories = [] @@ -84,47 +99,74 @@ class CExtractor(TwoStepsExtractor): # TODO: parser les dates, récupérer les heures () dates = [o.get("value") for o in soup.select("select.datedleb_resa option")] - - patternCodeSite = re.compile(r'.*gsw_vars\["CODEPRESTATAIRE"\] = "(.*?)";.*', flags=re.DOTALL) - patternCodeObject = re.compile(r'.*gsw_vars\["CODEPRESTATION"\] = "(.*?)";.*', flags=re.DOTALL) - patternCodeMoteur = re.compile(r'.*Resa.init_moteur_resa\(\'([0-9]+)\'\);.*', flags=re.DOTALL) - scripts = soup.find_all('script') + + patternCodeSite = re.compile( + r'.*gsw_vars\["CODEPRESTATAIRE"\] = "(.*?)";.*', flags=re.DOTALL + ) + patternCodeObject = re.compile( + r'.*gsw_vars\["CODEPRESTATION"\] = "(.*?)";.*', flags=re.DOTALL + ) + patternCodeMoteur = re.compile( + r".*Resa.init_moteur_resa\(\'([0-9]+)\'\);.*", flags=re.DOTALL + ) + scripts = soup.find_all("script") codeSite = "" idObject = "" moteur = "" for script in scripts: - if(patternCodeSite.match(str(script.string))): + if patternCodeSite.match(str(script.string)): data = patternCodeSite.match(script.string) codeSite = data.groups()[0] - if(patternCodeObject.match(str(script.string))): + if patternCodeObject.match(str(script.string)): data = patternCodeObject.match(script.string) idObject = data.groups()[0] - if(patternCodeMoteur.match(str(script.string))): + if patternCodeMoteur.match(str(script.string)): data = patternCodeMoteur.match(script.string) moteur = data.groups()[0] - pause = self.downloader.pause self.downloader.pause = False # get exact schedule need two supplementary requests datetimes = [] - if codeSite != "" and idObject != "" and moteur != "": + if codeSite != "" and idObject != "" and moteur != "": for date in dates: # the first page is required such that the server knows the selected date - page1 = self.downloader.get_content(self.root_address + "/booking?action=searchAjax&cid=" + moteur + "&afficheDirectDispo=" + date + "&type_prestataire=V&cle_fiche=PRESTATION-V-" + codeSite + "-" + idObject + "&datedeb=" + date) + self.downloader.get_content( + self.root_address + + "/booking?action=searchAjax&cid=" + + moteur + + "&afficheDirectDispo=" + + date + + "&type_prestataire=V&cle_fiche=PRESTATION-V-" + + codeSite + + "-" + + idObject + + "&datedeb=" + + date + ) # then we get the form with hours - page2 = self.downloader.get_content(self.root_address + "/booking?action=detailTarifsPrestationAjax&prestation=V-" + codeSite + "-" + idObject) + page2 = self.downloader.get_content( + self.root_address + + "/booking?action=detailTarifsPrestationAjax&prestation=V-" + + codeSite + + "-" + + idObject + ) soup2 = BeautifulSoup(page2, "html.parser") times = [o.text for o in soup2.select("#quart_en_cours_spec option")] for t in times: startdate = Extractor.parse_french_date(date) starttime = Extractor.parse_french_time(t) - start = datetime.datetime.combine(startdate, starttime) + start = datetime.combine(startdate, starttime) enddate = None endtime = None if duration is not None: - end = start + timedelta(hours=duration.hour, minutes=duration.minute, seconds=duration.second) + end = start + timedelta( + hours=duration.hour, + minutes=duration.minute, + seconds=duration.second, + ) enddate = end.date() endtime = end.time() datetimes.append((startdate, starttime, enddate, endtime)) diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py b/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py index ab6747e..12719a5 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py @@ -1,6 +1,8 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractor import json5 from bs4 import BeautifulSoup +from datetime import datetime, date + # A class dedicated to get events from La Coopérative de Mai: # URL: https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes @@ -10,7 +12,17 @@ class CExtractor(TwoStepsExtractor): url_referer = "https://lacomediedeclermont.com/saison24-25/" def is_to_import_from_url(self, url): - if any(keyword in url for keyword in ["podcast", "on-debriefe", "popcorn", "rencontreautour","rencontre-autour","les-cles-du-spectacle"]): + if any( + keyword in url + for keyword in [ + "podcast", + "on-debriefe", + "popcorn", + "rencontreautour", + "rencontre-autour", + "les-cles-du-spectacle", + ] + ): return False else: return True @@ -40,11 +52,11 @@ class CExtractor(TwoStepsExtractor): url = self.url.split("?")[0] for d in list(set(dates)): - if not self.only_future or self.now <= datetime.date.fromisoformat(d): + if not self.only_future or self.now <= date.fromisoformat(d): events = self.downloader.get_content( url, post={"action": "load_evenements_jour", "jour": d}, - referer="https://lacomediedeclermont.com/saison24-25/" + referer="https://lacomediedeclermont.com/saison24-25/", ) if events: events = json5.loads(events) @@ -102,7 +114,6 @@ class CExtractor(TwoStepsExtractor): else: image = None - description = soup.select("#descspec") if description and len(description) > 0: description = description[0].get_text().replace("Lire plus...", "") diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py b/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py index 71f672d..dd13557 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py @@ -1,9 +1,10 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractor from ..generic_extractors.ggcal_link import GGCalendar import re import json5 from bs4 import BeautifulSoup + # A class dedicated to get events from La Coopérative de Mai: # URL: https://www.lacoope.org/concerts-calendrier/ class CExtractor(TwoStepsExtractor): diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py b/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py index c9bb1db..6655548 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py @@ -1,7 +1,8 @@ -from ..twosteps_extractor import * -import re +from ..twosteps_extractor import TwoStepsExtractor +from ..extractor import Extractor from bs4 import BeautifulSoup + # A class dedicated to get events from La puce à l'oreille # URL: https://www.lapucealoreille63.fr/ class CExtractor(TwoStepsExtractor): diff --git a/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py b/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py index ceb62df..710eb16 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py @@ -1,6 +1,6 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractorNoPause from bs4 import BeautifulSoup -from datetime import datetime + # A class dedicated to get events from Raymond Bar # URL: https://www.raymondbar.net/ @@ -10,15 +10,13 @@ class CExtractor(TwoStepsExtractorNoPause): super().__init__() def build_event_url_list(self, content, infuture_days=180): - + soup = BeautifulSoup(content, "html.parser") links = soup.select(".showsList .showMore") if links: - for l in links: - print(l["href"]) - self.add_event_url(l["href"]) - + for lk in links: + self.add_event_url(lk["href"]) def add_event_from_content( self, @@ -29,19 +27,19 @@ class CExtractor(TwoStepsExtractorNoPause): published=False, ): soup = BeautifulSoup(event_content, "html.parser") - + title = soup.select_one(".showDesc h4 a.summary").text start_day = soup.select_one(".showDate .value-title") start_time = None - if not start_day is None: + if start_day is not None: start_day = start_day["title"] - if not start_day is None: + if start_day is not None: start_day = start_day.split("T")[0] - - description = soup.select_one('.showDetails.description').text - image = soup.select('.showDetails.description img') - if not image is None: + + description = soup.select_one(".showDetails.description").text + image = soup.select(".showDetails.description img") + if image is not None: image_alt = image[-1]["alt"] image = image[-1]["src"] @@ -49,21 +47,21 @@ class CExtractor(TwoStepsExtractorNoPause): title += " - Attention: l'heure n'a pu être extraite" self.add_event_with_props( - default_values, - event_url, - title, - None, - start_day, - None, - description, - [], - recurrences=None, - uuids=[event_url], - url_human=event_url, - start_time=start_time, - end_day=None, - end_time=None, - published=published, - image=image, - image_alt=image_alt - ) \ No newline at end of file + default_values, + event_url, + title, + None, + start_day, + None, + description, + [], + recurrences=None, + uuids=[event_url], + url_human=event_url, + start_time=start_time, + end_day=None, + end_time=None, + published=published, + image=image, + image_alt=image_alt, + ) diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py b/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py index c385662..4d910b4 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py @@ -1,6 +1,8 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractor +from ..extractor import Extractor from bs4 import BeautifulSoup + # A class dedicated to get events from Le Fotomat' # URL: https://www.lefotomat.com/ class CExtractor(TwoStepsExtractor): diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lerio.py b/src/agenda_culturel/import_tasks/custom_extractors/lerio.py index 9420dea..3fb9c2f 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/lerio.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/lerio.py @@ -1,7 +1,9 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractorNoPause +from ..extractor import Extractor from bs4 import BeautifulSoup from datetime import datetime + # A class dedicated to get events from Cinéma Le Rio (Clermont-Ferrand) # URL: https://www.cinemalerio.com/evenements/ class CExtractor(TwoStepsExtractorNoPause): @@ -12,13 +14,13 @@ class CExtractor(TwoStepsExtractorNoPause): self.theater = None def build_event_url_list(self, content, infuture_days=180): - + soup = BeautifulSoup(content, "html.parser") links = soup.select("td.seance_link a") if links: - for l in links: - self.add_event_url(l["href"]) + for lk in links: + self.add_event_url(lk["href"]) def to_text_select_one(soup, filter): e = soup.select_one(filter) @@ -37,7 +39,7 @@ class CExtractor(TwoStepsExtractorNoPause): ): soup = BeautifulSoup(event_content, "html.parser") - + title = soup.select_one("h1").text alerte_date = CExtractor.to_text_select_one(soup, ".alerte_date") @@ -45,9 +47,9 @@ class CExtractor(TwoStepsExtractorNoPause): return dh = alerte_date.split("à") # if date is not found, we skip - if len(dh) != 2: + if len(dh) != 2: return - + date = Extractor.parse_french_date(dh[0], default_year=datetime.now().year) time = Extractor.parse_french_time(dh[1]) @@ -56,35 +58,43 @@ class CExtractor(TwoStepsExtractorNoPause): special = CExtractor.to_text_select_one(soup, ".alerte_text") # it's not a specific event: we skip it - special_lines = None if special is None else special.split('\n') - if special is None or len(special_lines) == 0 or \ - (len(special_lines) == 1 and special_lines[0].strip().startswith('En partenariat')): + special_lines = None if special is None else special.split("\n") + if ( + special is None + or len(special_lines) == 0 + or ( + len(special_lines) == 1 + and special_lines[0].strip().startswith("En partenariat") + ) + ): return - description = "\n\n".join([x for x in [synopsis, special_titre, special] if not x is None]) + description = "\n\n".join( + [x for x in [synopsis, special_titre, special] if x is not None] + ) image = soup.select_one(".col1 img") image_alt = None - if not image is None: + if image is not None: image_alt = image["alt"] image = image["src"] self.add_event_with_props( - default_values, - event_url, - title, - None, - date, - None, - description, - [], - recurrences=None, - uuids=[event_url], - url_human=event_url, - start_time=time, - end_day=None, - end_time=None, - published=published, - image=image, - image_alt=image_alt - ) \ No newline at end of file + default_values, + event_url, + title, + None, + date, + None, + description, + [], + recurrences=None, + uuids=[event_url], + url_human=event_url, + start_time=time, + end_day=None, + end_time=None, + published=published, + image=image, + image_alt=image_alt, + ) diff --git a/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py b/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py index 038cee8..e7fce3d 100644 --- a/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py +++ b/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py @@ -1,6 +1,9 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractorNoPause +from ..extractor import Extractor from bs4 import BeautifulSoup -from datetime import datetime, date +from datetime import date +from urllib.parse import urlparse + # A class dedicated to get events from Mille formes # URL: https://www.milleformes.fr/programme @@ -14,50 +17,54 @@ class CExtractor(TwoStepsExtractorNoPause): default_values=None, published=False, only_future=True, - ignore_404=True): + ignore_404=True, + ): self.root_address = "https://" + urlparse(url).netloc + "/" self.today = date.today() - return super().extract(content, url, url_human, default_values, published, only_future, ignore_404) - + return super().extract( + content, url, url_human, default_values, published, only_future, ignore_404 + ) def parse_category(self, cat): cat = cat.replace("\n", "").strip() if "exposition" in cat or "dispositif artistique interactif" in cat: - result = 'Visites & Expositions' + result = "Visites & Expositions" elif "atelier" in cat: - result = 'Animations & Ateliers' + result = "Animations & Ateliers" elif cat in ["buffet"]: - result = 'Rendez-vous locaux' + result = "Rendez-vous locaux" elif "ciné" in cat: - result = 'Cinéma' + result = "Cinéma" elif "concert" in cat: - result = 'Fêtes & Concerts' + result = "Fêtes & Concerts" elif "rencontre" in cat: - result = 'Rencontres & Débats' + result = "Rencontres & Débats" elif "spectacle" in cat: - result = 'Spectacles' + result = "Spectacles" else: - result = 'Sans catégorie' + result = "Sans catégorie" return result # this method is not perfect, but dates and hours are not structured def parse_dates(self, date): - dl = date.replace(' à ', '\n').split('\n') + dl = date.replace(" à ", "\n").split("\n") result = [] for d in dl: # only lines with a digit if sum(c.isdigit() for c in d) != 0: # split subparts - for d2 in d.replace(' et ', ', ').split(', '): + for d2 in d.replace(" et ", ", ").split(", "): d2 = d2.strip() - dd = Extractor.parse_french_date(d2, default_year_by_proximity=self.today) + dd = Extractor.parse_french_date( + d2, default_year_by_proximity=self.today + ) if dd is None: hh = Extractor.parse_french_time(d2) for i, r in enumerate(result): result[i][1].append(hh) - else: + else: result.append([dd, []]) if "De" in date and " à " in date: @@ -67,12 +74,11 @@ class CExtractor(TwoStepsExtractorNoPause): return result def build_event_url_list(self, content, infuture_days=180): - - soup = BeautifulSoup(content, "html.parser") - links = soup.select('.cell a.evenement') - for l in links: - self.add_event_url(self.root_address + l["href"]) + soup = BeautifulSoup(content, "html.parser") + links = soup.select(".cell a.evenement") + for lk in links: + self.add_event_url(self.root_address + lk["href"]) def add_event_from_content( self, @@ -83,39 +89,44 @@ class CExtractor(TwoStepsExtractorNoPause): published=False, ): soup = BeautifulSoup(event_content, "html.parser") - title = soup.select_one('h1').text.replace("\n", "").strip().title() + title = soup.select_one("h1").text.replace("\n", "").strip().title() - image = soup.select_one('.slide img') + image = soup.select_one(".slide img") if image is None: - image_alt = '' + image_alt = "" else: image_alt = image["alt"] image = self.root_address + image["src"] - - soustitre = soup.select_one('.sous-titre') - if not soustitre is None: + + soustitre = soup.select_one(".sous-titre") + if soustitre is not None: soustitre = soustitre.text.strip() - description = soup.select_one('.texte-full').text.strip() - infos = soup.select_one('.champ .infos') - if not infos is None: + description = soup.select_one(".texte-full").text.strip() + infos = soup.select_one(".champ .infos") + if infos is not None: infos = infos.text - location = soup.select_one('.champ .taxo.espace').text.strip() + location = soup.select_one(".champ .taxo.espace").text.strip() - age = soup.select_one('.champ.taxo-age').text - category = self.parse_category(soup.select_one('.champ.categorie').text) + soup.select_one(".champ.taxo-age").text + category = self.parse_category(soup.select_one(".champ.categorie").text) + date = soup.select_one(".champ.date-libre").text - date = soup.select_one('.champ.date-libre').text + description = "\n\n".join( + [x for x in [soustitre, description, date, infos] if x is not None] + ) - description = '\n\n'.join([x for x in [soustitre, description, date, infos] if not x is None]) - - if " au " in date or date.startswith("Du") or date.lower().strip() == "en continu" or date.startswith("Les"): + if ( + " au " in date + or date.startswith("Du") + or date.lower().strip() == "en continu" + or date.startswith("Les") + ): return - + dates = self.parse_dates(date) - end_day = None for d in dates: if len(d) >= 2: @@ -124,70 +135,81 @@ class CExtractor(TwoStepsExtractorNoPause): if len(d) == 3 and len(d[1]) == 2: start_time = d[1][0] end_time = d[1][1] - uuid = event_url + "?date=" + str(start_day) + "&hour=" + str(start_time) + uuid = ( + event_url + + "?date=" + + str(start_day) + + "&hour=" + + str(start_time) + ) self.add_event_with_props( - default_values, - event_url, - title, - category, - start_day, - location, - description, - [], - recurrences=None, - uuids=[uuid], - url_human=event_url, - start_time=start_time, - end_day=start_day, - end_time=end_time, - published=published, - image=image, - image_alt=image_alt - ) + default_values, + event_url, + title, + category, + start_day, + location, + description, + [], + recurrences=None, + uuids=[uuid], + url_human=event_url, + start_time=start_time, + end_day=start_day, + end_time=end_time, + published=published, + image=image, + image_alt=image_alt, + ) else: end_time = None if len(d[1]) == 0: start_time = None uuid = event_url + "?date=" + str(start_day) self.add_event_with_props( - default_values, - event_url, - title, - category, - start_day, - location, - description, - [], - recurrences=None, - uuids=[uuid], - url_human=event_url, - start_time=start_time, - end_day=start_day, - end_time=end_time, - published=published, - image=image, - image_alt=image_alt - ) + default_values, + event_url, + title, + category, + start_day, + location, + description, + [], + recurrences=None, + uuids=[uuid], + url_human=event_url, + start_time=start_time, + end_day=start_day, + end_time=end_time, + published=published, + image=image, + image_alt=image_alt, + ) for t in d[1]: start_time = t - uuid = event_url + "?date=" + str(start_day) + "&hour=" + str(start_time) + uuid = ( + event_url + + "?date=" + + str(start_day) + + "&hour=" + + str(start_time) + ) self.add_event_with_props( - default_values, - event_url, - title, - category, - start_day, - location, - description, - [], - recurrences=None, - uuids=[uuid], - url_human=event_url, - start_time=start_time, - end_day=start_day, - end_time=end_time, - published=published, - image=image, - image_alt=image_alt - ) - + default_values, + event_url, + title, + category, + start_day, + location, + description, + [], + recurrences=None, + uuids=[uuid], + url_human=event_url, + start_time=start_time, + end_day=start_day, + end_time=end_time, + published=published, + image=image, + image_alt=image_alt, + ) diff --git a/src/agenda_culturel/import_tasks/downloader.py b/src/agenda_culturel/import_tasks/downloader.py index 65b29ea..6d8c17f 100644 --- a/src/agenda_culturel/import_tasks/downloader.py +++ b/src/agenda_culturel/import_tasks/downloader.py @@ -5,10 +5,17 @@ import os from selenium import webdriver from selenium.webdriver.chrome.service import Service from selenium.webdriver.chrome.options import Options -from selenium.common.exceptions import * +from selenium.common.exceptions import ( + StaleElementReferenceException, + NoSuchElementException, + TimeoutException, + WebDriverException, + SessionNotCreatedException, +) from abc import ABC, abstractmethod import time + class Downloader(ABC): def __init__(self): self.support_2nd_extract = False @@ -17,13 +24,17 @@ class Downloader(ABC): def download(self, url, post=None): pass - def get_content(self, url, cache=None, referer=None, post=None, content_type=None, data=None): + def get_content( + self, url, cache=None, referer=None, post=None, content_type=None, data=None + ): if cache and os.path.exists(cache): print("Loading cache ({})".format(cache)) with open(cache) as f: content = "\n".join(f.readlines()) else: - content = self.download(url, referer=referer, post=post, content_type=content_type, data=data) + content = self.download( + url, referer=referer, post=post, content_type=content_type, data=data + ) if cache: print("Saving cache ({})".format(cache)) @@ -64,7 +75,7 @@ class SimpleDownloader(Downloader): except Exception as e: print(e) - raise Exception("Error during download: " + str(e)[:64] + '...') + raise Exception("Error during download: " + str(e)[:64] + "...") class ChromiumHeadlessDownloader(Downloader): @@ -88,10 +99,11 @@ class ChromiumHeadlessDownloader(Downloader): if noimage: self.options.add_experimental_option( - "prefs", { + "prefs", + { # block image loading "profile.managed_default_content_settings.images": 2, - } + }, ) self.service = Service("/usr/bin/chromedriver") @@ -107,21 +119,25 @@ class ChromiumHeadlessDownloader(Downloader): if self.pause: time.sleep(2) self.driver.save_screenshot(path_image) - except: - print(f">> Exception: {URL}") + except Exception: + print(f">> Exception: {url}") return False - + return True def download(self, url, referer=None, post=None, content_type=None, data=None): if post: raise Exception("POST method with Chromium headless not yet implemented") if referer: - raise Exception("Referer parameter with Chromium headless not yet implemented") + raise Exception( + "Referer parameter with Chromium headless not yet implemented" + ) if data: raise Exception("Data content with Chromium headless not yet implemented") if content_type: - raise Exception("Content-type parameter with Chromium headless not yet implemented") + raise Exception( + "Content-type parameter with Chromium headless not yet implemented" + ) print("Download {}".format(url)) try: @@ -130,27 +146,25 @@ class ChromiumHeadlessDownloader(Downloader): time.sleep(2) doc = self.driver.page_source - except StaleElementReferenceException as e: print(f">> {type(e).__name__}: {e.args}") - raise Exception("Error during download: " + str(e)[:64] + '...') + raise Exception("Error during download: " + str(e)[:64] + "...") except NoSuchElementException as e: print(f">> {type(e).__name__}: {e.args}") - raise Exception("Error during download: " + str(e)[:64] + '...') + raise Exception("Error during download: " + str(e)[:64] + "...") except TimeoutException as e: print(f">> {type(e).__name__}: {e.args}") - raise Exception("Error during download: " + str(e)[:64] + '...') + raise Exception("Error during download: " + str(e)[:64] + "...") except WebDriverException as e: print(f">> {type(e).__name__}: {e.args}") - raise Exception("Error during download: " + str(e)[:64] + '...') + raise Exception("Error during download: " + str(e)[:64] + "...") except SessionNotCreatedException as e: print(f">> {type(e).__name__}: {e.args}") - raise Exception("Error during download: " + str(e)[:64] + '...') + raise Exception("Error during download: " + str(e)[:64] + "...") except Exception as e: - print(f">> {type(e).__name__} line {e.__traceback__.tb_lineno} of {__file__}: {e.args}") - raise Exception("Error during download: " + str(e)[:64] + '...') - except: - print(f">> General Exception: {URL}") - raise Exception("Error during download: " + str(e)[:64] + '...') + print( + f">> {type(e).__name__} line {e.__traceback__.tb_lineno} of {__file__}: {e.args}" + ) + raise Exception("Error during download: " + str(e)[:64] + "...") return doc diff --git a/src/agenda_culturel/import_tasks/extractor.py b/src/agenda_culturel/import_tasks/extractor.py index 3a71410..173fce5 100644 --- a/src/agenda_culturel/import_tasks/extractor.py +++ b/src/agenda_culturel/import_tasks/extractor.py @@ -4,7 +4,7 @@ from datetime import datetime, time, date, timedelta import re import unicodedata from django.utils import timezone -import logging +from django.utils.translation import gettext_lazy as _ class Extractor(ABC): @@ -13,7 +13,7 @@ class Extractor(ABC): NO_START_DATE = 2 NOT_FOUND = 3 - url_referer=None + url_referer = None def __init__(self): self.header = {} @@ -25,12 +25,12 @@ class Extractor(ABC): # avoid the importer to use the downloader on the url # (used for extractors that are self-sufficient) self.no_downloader = False - + # parameters used by the downloader to get the content self.referer = "" self.data = None self.content_type = None - + def prepare_2nd_extract(self): pass @@ -106,16 +106,18 @@ class Extractor(ABC): return None try: day = int(day) - if not year is None: + if year is not None: year = int(year) - except: + except Exception: return None if day >= 32: return None # by proximity - if year is None and not default_year_by_proximity is None: - dates = [date(default_year_by_proximity.year + x, month, day) for x in [-1, 0, 1]] + if year is None and default_year_by_proximity is not None: + dates = [ + date(default_year_by_proximity.year + x, month, day) for x in [-1, 0, 1] + ] dates = [(abs((d - default_year_by_proximity).days), d) for d in dates] d = min(dates, key=lambda x: x[0]) return d[1] @@ -162,7 +164,7 @@ class Extractor(ABC): h = int(h) m = int(m) s = int(s) - except: + except Exception: return None if h >= 24 or m >= 60 or s >= 60: return None @@ -177,10 +179,6 @@ class Extractor(ABC): def set_downloader(self, downloader): self.downloader = downloader - @abstractmethod - def clean_url(url): - return url - def is_known_url(url): return False @@ -210,14 +208,14 @@ class Extractor(ABC): published=False, image=None, image_alt=None, - not_found=False + not_found=False, ): - comments = '' + comments = "" warnings = [] if title is None: print("WARNING: cannot publish an event without name") published = False - title = _('Unknown title') + title = _("Unknown title") warnings.append(Extractor.Warning.NO_TITLE) if start_day is None: print("WARNING: cannot publish an event without start day") @@ -233,10 +231,18 @@ class Extractor(ABC): event = { "title": title, - "category": category if category else self.default_value_if_exists(default_values, "category"), + "category": ( + category + if category + else self.default_value_if_exists(default_values, "category") + ), "start_day": start_day, "uuids": uuids, - "location": location if location else self.default_value_if_exists(default_values, "location"), + "location": ( + location + if location + else self.default_value_if_exists(default_values, "location") + ), "organisers": self.default_value_if_exists(default_values, "organisers"), "description": description, "tags": tags + tags_default, @@ -250,7 +256,7 @@ class Extractor(ABC): if event["comments"] is None: event["comments"] = comments else: - event["comments"] += '\n' + comments + event["comments"] += "\n" + comments # TODO: pourquoi url_human et non reference_url if url_human is not None: @@ -295,12 +301,23 @@ class Extractor(ABC): def get_default_extractors(single_event=False): from .generic_extractors.ical import ICALExtractor from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor - from .generic_extractors.ggcal_link import CExtractor as GoogleCalendarLinkEventExtractor + from .generic_extractors.ggcal_link import ( + CExtractor as GoogleCalendarLinkEventExtractor, + ) if single_event: - return [FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()] + return [ + FacebookEventExtractor(), + GoogleCalendarLinkEventExtractor(), + EventNotFoundExtractor(), + ] else: - return [ICALExtractor(), FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()] + return [ + ICALExtractor(), + FacebookEventExtractor(), + GoogleCalendarLinkEventExtractor(), + EventNotFoundExtractor(), + ] # A class that only produce a not found event @@ -312,14 +329,22 @@ class EventNotFoundExtractor(Extractor): self.set_header(url) self.clear_events() - self.add_event(default_values, "événement sans titre depuis " + url, - None, timezone.now().date(), None, - "l'import a échoué, la saisie doit se faire manuellement à partir de l'url source " + url, - [], [url], published=False, url_human=url, not_found=True) + self.add_event( + default_values, + "événement sans titre depuis " + url, + None, + timezone.now().date(), + None, + "l'import a échoué, la saisie doit se faire manuellement à partir de l'url source " + + url, + [], + [url], + published=False, + url_human=url, + not_found=True, + ) return self.get_structure() - def clean_url(url): return url - diff --git a/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py b/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py index ec5adf1..a761eac 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py @@ -1,27 +1,33 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractorNoPause +from ..extractor import Extractor from bs4 import BeautifulSoup -from datetime import datetime + # A class dedicated to get events from apidae-tourisme widgets class CExtractor(TwoStepsExtractorNoPause): - def build_event_url_list(self, content, infuture_days=180): - + # Get line starting with wrapper.querySelector(".results_agenda").innerHTML = " # split using "=" and keep the end # strip it, and remove the first character (") and the two last ones (";) # remove the escapes and parse the contained html for line in content.split("\n"): - if line.startswith('wrapper.querySelector(".results_agenda").innerHTML = "'): - html = ('"'.join(line.split('"')[3:])).replace('\\"', '"').replace('\\n', "\n").replace('\\/', '/') + if line.startswith( + 'wrapper.querySelector(".results_agenda").innerHTML = "' + ): + html = ( + ('"'.join(line.split('"')[3:])) + .replace('\\"', '"') + .replace("\\n", "\n") + .replace("\\/", "/") + ) soup = BeautifulSoup(html, "html.parser") - links = soup.select('a.widgit_result') - for l in links: - self.add_event_url(l["data-w-href"]) + links = soup.select("a.widgit_result") + for lk in links: + self.add_event_url(lk["data-w-href"]) break - def add_event_from_content( self, event_content, @@ -38,17 +44,22 @@ class CExtractor(TwoStepsExtractorNoPause): # check for content for line in event_content.split("\n"): - if line.startswith('detailsWrapper.innerHTML ='): - html = ('"'.join(line.split('"')[1:])).replace('\\"', '"').replace('\\n', "\n").replace('\\/', '/') + if line.startswith("detailsWrapper.innerHTML ="): + html = ( + ('"'.join(line.split('"')[1:])) + .replace('\\"', '"') + .replace("\\n", "\n") + .replace("\\/", "/") + ) soup = BeautifulSoup(html, "html.parser") - title = soup.select_one('h2.widgit_title').text.strip() - image = soup.select_one('img') + title = soup.select_one("h2.widgit_title").text.strip() + image = soup.select_one("img") image_alt = image["alt"] image = image["src"] - description = soup.select('div.desc') - description = '\n'.join([d.text for d in description]) - openings = soup.select_one('.openings .mts').text.strip().split("\n")[0] + description = soup.select("div.desc") + description = "\n".join([d.text for d in description]) + openings = soup.select_one(".openings .mts").text.strip().split("\n")[0] start_time = None end_time = None if "tous les" in openings: @@ -61,43 +72,43 @@ class CExtractor(TwoStepsExtractorNoPause): start_time = Extractor.parse_french_time(hours[0]) if len(hours) > 1: end_time = Extractor.parse_french_time(hours[1]) - + contact = soup.select_one(".contact") sa = False location = [] for c in contact.children: - if c.name == 'h2' and c.text.strip() == "Adresse": + if c.name == "h2" and c.text.strip() == "Adresse": sa = True else: - if c.name == 'h2' and sa: + if c.name == "h2" and sa: break - if c.name == 'p' and sa: + if c.name == "p" and sa: e = c.text.strip() if e != "": location.append(e) - location = ', '.join(location) + location = ", ".join(location) - websites = soup.select("a.website") + soup.select("a.website") event_url = url_human + "#" + ref self.add_event_with_props( - default_values, - event_url, - title, - None, - start_day, - location, - description, - [], - recurrences=None, - uuids=[event_url], - url_human=event_url, - start_time=start_time, - end_day=start_day, - end_time=end_time, - published=published, - image=image, - image_alt=image_alt - ) + default_values, + event_url, + title, + None, + start_day, + location, + description, + [], + recurrences=None, + uuids=[event_url], + url_human=event_url, + start_time=start_time, + end_day=start_day, + end_time=end_time, + published=published, + image=image, + image_alt=image_alt, + ) return diff --git a/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py b/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py index cddbc76..4016094 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py @@ -3,15 +3,16 @@ from bs4 import BeautifulSoup from urllib.parse import urlparse import time as t from django.utils.translation import gettext_lazy as _ +import re - -from ..extractor import * +from ..extractor import Extractor import json import logging logger = logging.getLogger(__name__) + class SimpleFacebookEvent: def __init__(self, data): self.elements = {} @@ -20,9 +21,8 @@ class SimpleFacebookEvent: self.elements[key] = data[key] if key in data else None if "parent_event" in data: - self.parent = SimpleFacebookEvent( - data["parent_event"] - ) + self.parent = SimpleFacebookEvent(data["parent_event"]) + class FacebookEvent: name = "event" @@ -48,17 +48,17 @@ class FacebookEvent: # each pair in the associated list is a key of our model and a path within FB data to # get the corresponding field rules = { - "event_description": [("description", ["text"])], + "event_description": [("description", ["text"])], "cover_media_renderer": [ ("image_alt", ["cover_photo", "photo", "accessibility_caption"]), ("image", ["cover_photo", "photo", "full_image", "uri"]), ("image", ["cover_media", 0, "full_image", "uri"]), ("image_alt", ["cover_media", 0, "accessibility_caption"]), - ], - "event_creator": - [("event_creator_name", ["name"]), - ("event_creator_url", ["url"]), - ], + ], + "event_creator": [ + ("event_creator_name", ["name"]), + ("event_creator_url", ["url"]), + ], "event_place": [("event_place_name", ["name"])], } @@ -82,9 +82,7 @@ class FacebookEvent: def get_element_date(self, key): v = self.get_element(key) - return ( - datetime.fromtimestamp(v).date() if v is not None and v != 0 else None - ) + return datetime.fromtimestamp(v).date() if v is not None and v != 0 else None def get_element_time(self, key): v = self.get_element(key) @@ -111,7 +109,11 @@ class FacebookEvent: error = False c = event[k] for ki in rule: - if c is not None and ki in c or (isinstance(c, list) and ki < len(c)): + if ( + c is not None + and ki in c + or (isinstance(c, list) and ki < len(c)) + ): c = c[ki] else: error = True @@ -127,9 +129,7 @@ class FacebookEvent: ) def get_neighbor_events(self, data): - self.neighbor_events = [ - SimpleFacebookEvent(d) for d in data - ] + self.neighbor_events = [SimpleFacebookEvent(d) for d in data] def __str__(self): return ( @@ -148,9 +148,7 @@ class FacebookEvent: id = self.elements["id"] for ne in self.neighbor_events: if ne.elements["id"] == id: - self.elements["end_timestamp"] = ne.elements[ - "end_timestamp" - ] + self.elements["end_timestamp"] = ne.elements["end_timestamp"] if ( "end_timestamp" not in self.elements @@ -185,9 +183,7 @@ class FacebookEvent: ) elif isinstance(array, list): for e in array: - event = FacebookEvent.find_event_fragment_in_array( - e, event, False - ) + event = FacebookEvent.find_event_fragment_in_array(e, event, False) if event is not None and first: event.consolidate_current_event() @@ -213,7 +209,6 @@ class FacebookEvent: def get_parent_id(self): return self.get_element("parent_if_exists_or_self")["id"] - def build_events(self, url): if self.neighbor_events is None or len(self.neighbor_events) == 0: @@ -231,13 +226,14 @@ class FacebookEvent: result.append(clone.build_event(url_base + nb_e.elements["id"] + "/")) return result + class CExtractor(Extractor): def __init__(self): super().__init__() self.has_2nd_method = True - def prepare_2nd_extract_dler(downloader): + def prepare_2nd_extract_dler(downloader): if downloader.support_2nd_extract: from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait @@ -245,30 +241,54 @@ class CExtractor(Extractor): path = './/div[not(@aria-hidden)]/div[@aria-label="Allow all cookies"]' try: - element = WebDriverWait(downloader.driver, 15).until(EC.visibility_of_element_located((By.XPATH, path))) + WebDriverWait(downloader.driver, 15).until( + EC.visibility_of_element_located((By.XPATH, path)) + ) except Exception as e: - raise Exception(_("Error while waiting for the cookie button to be visible: " + e.__class__.__name__ + ' ' + str(e))) + raise Exception( + _( + "Error while waiting for the cookie button to be visible: " + + e.__class__.__name__ + + " " + + str(e) + ) + ) try: button = downloader.driver.find_element(By.XPATH, path) except Exception as e: - raise Exception(_("Error while getting the cookie button to be visible: " + e.__class__.__name__ + ' ' + str(e))) + raise Exception( + _( + "Error while getting the cookie button to be visible: " + + e.__class__.__name__ + + " " + + str(e) + ) + ) try: button.click() except Exception as e: - raise Exception(_("Error while clicking on the cookie button to be visible: " + e.__class__.__name__ + ' ' + str(e))) + raise Exception( + _( + "Error while clicking on the cookie button to be visible: " + + e.__class__.__name__ + + " " + + str(e) + ) + ) t.sleep(5) def prepare_2nd_extract(self): CExtractor.prepare_2nd_extract_dler(self.downloader) - def clean_url(url): if CExtractor.is_known_url(url, False): u = urlparse(url) result = "https://www.facebook.com" + u.path # remove name in the url - match = re.match(r"(.*/events)/s/([a-zA-Z-][a-zA-Z-0-9-]+)/([0-9/]*)", result) + match = re.match( + r"(.*/events)/s/([a-zA-Z-][a-zA-Z-0-9-]+)/([0-9/]*)", result + ) if match: result = match[1] + "/" + match[3] @@ -279,7 +299,6 @@ class CExtractor(Extractor): else: return url - def is_known_url(url, include_links=True): u = urlparse(url) url_list = ["facebook.com", "www.facebook.com", "m.facebook.com"] @@ -298,14 +317,12 @@ class CExtractor(Extractor): for json_script in soup.find_all("script", type="application/json"): json_txt = json_script.get_text() json_struct = json.loads(json_txt) - fevent = FacebookEvent.find_event_fragment_in_array( - json_struct, fevent - ) + fevent = FacebookEvent.find_event_fragment_in_array(json_struct, fevent) if fevent is not None: self.set_header(url) for event in fevent.build_events(url): - logger.warning("published: " + str(published)) + logger.info("published: " + str(published)) event["published"] = published if default_values and "category" in default_values: @@ -314,8 +331,6 @@ class CExtractor(Extractor): return self.get_structure() else: logger.warning("cannot find any event in page") - raise Exception( - _("Cannot get Facebook event from {}").format(url) - ) + raise Exception(_("Cannot get Facebook event from {}").format(url)) return None diff --git a/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py b/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py index f806060..ebeb46b 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py @@ -1,6 +1,5 @@ -from ..twosteps_extractor import * -from .fbevent import FacebookEvent -import json5 +from ..twosteps_extractor import TwoStepsExtractor +from .fbevent import FacebookEvent, CExtractor as FacebookEventExtractor from bs4 import BeautifulSoup import json import os @@ -22,10 +21,15 @@ class CExtractor(TwoStepsExtractor): self.has_2nd_method_in_list = True def find_event_id_fragment_in_array(self, array): - found = False if isinstance(array, dict): - if "__typename" in array and array["__typename"] == "Event" and "id" in array: - self.add_event_url("https://www.facebook.com/events/" + array["id"] + "/") + if ( + "__typename" in array + and array["__typename"] == "Event" + and "id" in array + ): + self.add_event_url( + "https://www.facebook.com/events/" + array["id"] + "/" + ) self.found = True else: for k in array: @@ -36,7 +40,6 @@ class CExtractor(TwoStepsExtractor): for e in array: self.find_event_id_fragment_in_array(e) - def find_in_js(self, soup): for json_script in soup.find_all("script", type="application/json"): @@ -44,11 +47,9 @@ class CExtractor(TwoStepsExtractor): json_struct = json.loads(json_txt) self.find_event_id_fragment_in_array(json_struct) - def prepare_2nd_extract_in_list(self): FacebookEventExtractor.prepare_2nd_extract_dler(self.downloader) - def build_event_url_list(self, content): soup = BeautifulSoup(content, "html.parser") @@ -57,23 +58,27 @@ class CExtractor(TwoStepsExtractor): self.found = False links = soup.find_all("a") for link in links: - href = link.get('href') - if not href is None and href.startswith('https://www.facebook.com/events/'): - self.add_event_url(href.split('?')[0]) + href = link.get("href") + if href is not None and href.startswith("https://www.facebook.com/events/"): + self.add_event_url(href.split("?")[0]) self.found = True self.has_page_items = False self.find_in_js(soup) - if not self.found: - logger.warning("cannot find any event link in events page. Save content page") + logger.warning( + "cannot find any event link in events page. Save content page" + ) if debug: CExtractor.dump_content_for_debug(content, self.url) if not self.has_page_items: - raise Exception(_("the page was not yet populated with events, so the loading time was probably too short")) - + raise Exception( + _( + "the page was not yet populated with events, so the loading time was probably too short" + ) + ) def dump_content_for_debug(content, url): directory = "errors/" @@ -85,9 +90,6 @@ class CExtractor(TwoStepsExtractor): text_file.write("\n\n") text_file.write(content) - - - def add_event_from_content( self, event_content, @@ -103,22 +105,19 @@ class CExtractor(TwoStepsExtractor): for json_script in soup.find_all("script", type="application/json"): json_txt = json_script.get_text() json_struct = json.loads(json_txt) - fevent = FacebookEvent.find_event_fragment_in_array( - json_struct, fevent - ) + fevent = FacebookEvent.find_event_fragment_in_array(json_struct, fevent) if fevent is not None: for event in fevent.build_events(event_url): event["published"] = published # only add the event if its unknown - if len([e for e in self.events if event["uuids"][0] in e["uuids"]]) == 0: + if ( + len([e for e in self.events if event["uuids"][0] in e["uuids"]]) + == 0 + ): self.add_event(default_values, **event) else: if debug: CExtractor.dump_content_for_debug(event_content, event_url) - raise Exception( - _("Cannot get Facebook event from {}").format(event_url) - ) - - + raise Exception(_("Cannot get Facebook event from {}").format(event_url)) diff --git a/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py b/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py index 4eb21c9..aba9edb 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py @@ -1,16 +1,15 @@ from datetime import datetime from bs4 import BeautifulSoup -from urllib.parse import urlparse - -from ..extractor import * -from ..twosteps_extractor import * - -import json +from urllib.parse import urlparse, parse_qs +import dateutil.parser +from ..extractor import Extractor +import bbcode import logging logger = logging.getLogger(__name__) + class GGCalendar: def __init__(self, url): self.url = url @@ -20,8 +19,8 @@ class GGCalendar: result = {} for k, v in params.items(): - if k.startswith('e[0]'): - result[k.replace('e[0][', '')[:-1]] = v + if k.startswith("e[0]"): + result[k.replace("e[0][", "")[:-1]] = v else: result[k] = v @@ -37,29 +36,37 @@ class GGCalendar: params = GGCalendar.filter_keys(params) self.location = params["location"][0] if "location" in params else "" - self.title = params["text"][0] if "text" in params else params["title"][0] if "title" in params else "" - self.description = params["description"][0] if "description" in params else params["details"][0] if "details" in params else "" + self.title = ( + params["text"][0] + if "text" in params + else params["title"][0] if "title" in params else "" + ) + self.description = ( + params["description"][0] + if "description" in params + else params["details"][0] if "details" in params else "" + ) if self.description != "": self.description = BeautifulSoup(self.description, "html.parser").text if "dates" in params: dates = [x.replace(" ", "+") for x in params["dates"][0].split("/")] if len(dates) > 0: - date = parser.parse(dates[0]) + date = dateutil.parser.parse(dates[0]) self.start_day = date.date() self.start_time = date.time() if len(dates) == 2: - date = parser.parse(dates[1]) + date = dateutil.parser.parse(dates[1]) self.end_day = date.date() self.end_time = date.time() else: self.end_day = None self.end_time = None elif "date_start" in params: - date = parser.parse(params["date_start"][0]) + date = dateutil.parser.parse(params["date_start"][0]) self.start_day = date.date() self.start_time = date.time() if "date_end" in params: - dateend = parser.parse(params["date_end"][0]) + dateend = dateutil.parser.parse(params["date_end"][0]) if dateend != date: self.end_day = dateend.date() self.end_time = dateend.time() @@ -80,19 +87,21 @@ class GGCalendar: self.end_time = None - class CExtractor(Extractor): def __init__(self): super().__init__() - self.possible_urls = ["https://calendar.google.com/calendar/", "https://addtocalendar.com/", "https://www.google.com/calendar/event"] - + self.possible_urls = [ + "https://calendar.google.com/calendar/", + "https://addtocalendar.com/", + "https://www.google.com/calendar/event", + ] def guess_image(self, soup, url): image = soup.find("meta", property="og:image") if image is None: - for img in soup.select('img'): - if img.find_parent(name='nav'): + for img in soup.select("img"): + if img.find_parent(name="nav"): continue image = img["src"] break @@ -105,7 +114,6 @@ class CExtractor(Extractor): return image - def extract( self, content, url, url_human=None, default_values=None, published=False ): @@ -121,7 +129,7 @@ class CExtractor(Extractor): if gg_cal.is_valid_event(): start_day = gg_cal.start_day start_time = gg_cal.start_time - description = gg_cal.description.replace(' ', '') + description = gg_cal.description.replace(" ", "") end_day = gg_cal.end_day end_time = gg_cal.end_time location = gg_cal.location @@ -154,5 +162,4 @@ class CExtractor(Extractor): break - - return self.get_structure() \ No newline at end of file + return self.get_structure() diff --git a/src/agenda_culturel/import_tasks/generic_extractors/ical.py b/src/agenda_culturel/import_tasks/generic_extractors/ical.py index 9b85211..f02d0ec 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/ical.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/ical.py @@ -8,7 +8,7 @@ from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning import pytz -from ..extractor import * +from ..extractor import Extractor from celery.utils.log import get_task_logger @@ -21,7 +21,6 @@ class ICALExtractor(Extractor): self.naive_timezone = False self.to_timezone = pytz.timezone("Europe/Paris") - def get_item_from_vevent(self, event, name, raw=False): try: r = event.decoded(name) @@ -29,21 +28,21 @@ class ICALExtractor(Extractor): return r else: return r.decode() - except: + except Exception: return None def guess_image_from_vevent(self, event): - item = self.get_item_from_vevent(event, 'ATTACH', raw=True) + item = self.get_item_from_vevent(event, "ATTACH", raw=True) if item is None: return None # it seems that FMTTYPE is not available through python-icalendar if isinstance(item, list): for i in item: - if '.jpg' in str(i).lower(): + if ".jpg" in str(i).lower(): return str(i) else: - if '.jpg' in str(item).lower(): + if ".jpg" in str(item).lower(): return str(item) return None @@ -98,7 +97,7 @@ class ICALExtractor(Extractor): end_day = end_day + timedelta(days=-1) location = self.get_item_from_vevent(event, "LOCATION") - if (not location is None) and location.replace(" ", "") == "": + if (location is not None) and location.replace(" ", "") == "": location = None description = self.get_item_from_vevent(event, "DESCRIPTION") @@ -184,7 +183,7 @@ class ICALExtractor(Extractor): end_time=end_time, last_modified=last_modified, published=published, - image=image + image=image, ) return self.get_structure() @@ -293,7 +292,8 @@ class ICALNoVCExtractor(ICALExtractor): image_alt, ) + class ICALNaiveTimezone(ICALExtractor): def __init__(self): super().__init__() - self.naive_timezone = True \ No newline at end of file + self.naive_timezone = True diff --git a/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py b/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py index ac184bd..87008ef 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py @@ -1,8 +1,9 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractorNoPause +from ..extractor import Extractor from bs4 import BeautifulSoup -from datetime import datetime from urllib.parse import urlparse + # A class dedicated to get events from Raymond Bar # URL: https://www.raymondbar.net/ class CExtractor(TwoStepsExtractorNoPause): @@ -24,7 +25,6 @@ class CExtractor(TwoStepsExtractorNoPause): return "Sans catégorie" - def guess_tags_from_category(self, category): tags = [] if "Lecture" in category: @@ -35,26 +35,35 @@ class CExtractor(TwoStepsExtractorNoPause): return tags def build_event_url_list(self, content, infuture_days=180): - + soup = BeautifulSoup(content, "html.parser") - root_address_human = self.url_human.split('?')[0] - root_address = self.url.split('Service')[0] + root_address_human = self.url_human.split("?")[0] + root_address = self.url.split("Service")[0] items = soup.select("li.listItem") if items: for item in items: elems = item["onclick"].split('"') - v = elems[3].split('^')[1] + v = elems[3].split("^")[1] contentItem = elems[1] - multidate = item.select_one('.until.maindate').text != '' + multidate = item.select_one(".until.maindate").text != "" if not multidate: - url_human = root_address_human + '?p=*&v=' + v + "#contentitem=" + contentItem - url = root_address + 'Service.PubItem.cls?action=get&instance=*&uuid=' + contentItem + url_human = ( + root_address_human + + "?p=*&v=" + + v + + "#contentitem=" + + contentItem + ) + url = ( + root_address + + "Service.PubItem.cls?action=get&instance=*&uuid=" + + contentItem + ) self.add_event_url(url) self.add_event_url_human(url, url_human) - def add_event_from_content( self, event_content, @@ -68,7 +77,6 @@ class CExtractor(TwoStepsExtractorNoPause): soup = BeautifulSoup(event_content, "xml") - title = soup.select_one("Title").text content = soup.select_one("Content").text @@ -78,11 +86,15 @@ class CExtractor(TwoStepsExtractorNoPause): description = soup.select_one(".rightcolumn .content").text location = soup.select_one(".infos .location").text public = soup.select_one(".infos .public").text - start_day = Extractor.parse_french_date(soup.select_one(".infos .date .from").text) - start_time = Extractor.parse_french_time(soup.select_one(".infos .date .time").text) + start_day = Extractor.parse_french_date( + soup.select_one(".infos .date .from").text + ) + start_time = Extractor.parse_french_time( + soup.select_one(".infos .date .time").text + ) acces = soup.select_one(".infos .acces").text category = soup.select_one(".rightcolumn .category").text - infos = soup.select_one('.infos').text + infos = soup.select_one(".infos").text description = description + "\n" + infos @@ -94,21 +106,21 @@ class CExtractor(TwoStepsExtractorNoPause): tags.append("💶 gratuit") self.add_event_with_props( - default_values, - event_url, - title, - category, - start_day, - location, - description, - tags, - recurrences=None, - uuids=[event_url], - url_human=event_url, - start_time=start_time, - end_day=None, - end_time=None, - published=published, - image=image, - image_alt="" - ) \ No newline at end of file + default_values, + event_url, + title, + category, + start_day, + location, + description, + tags, + recurrences=None, + uuids=[event_url], + url_human=event_url, + start_time=start_time, + end_day=None, + end_time=None, + published=published, + image=image, + image_alt="", + ) diff --git a/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py b/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py index 7202256..087b1c2 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py @@ -1,5 +1,4 @@ -from ..extractor import * -import json +from ..extractor import Extractor import dateutil.parser from datetime import datetime, timezone import requests @@ -10,6 +9,7 @@ import logging logger = logging.getLogger(__name__) + # A class dedicated to get events from les amis du temps des cerises # Website https://amisdutempsdescerises.org/ class CExtractor(Extractor): @@ -21,22 +21,30 @@ class CExtractor(Extractor): # Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python def _request(self, body, data): - headers = {} + headers = {} - response = requests.post(url=self._api_end_point, json={ "query": body, "variables": data }, headers=headers) + response = requests.post( + url=self._api_end_point, + json={"query": body, "variables": data}, + headers=headers, + ) - if response.status_code == 200: - response_json = response.json() - if 'errors' in response_json: - raise Exception(f'Errors while requesting { body }. { str(response_json["errors"]) }') + if response.status_code == 200: + response_json = response.json() + if "errors" in response_json: + raise Exception( + f'Errors while requesting { body }. { str(response_json["errors"]) }' + ) - return response_json['data'] - else: - raise Exception(f'Error while requesting. Status code: { response.status_code }') + return response_json["data"] + else: + raise Exception( + f"Error while requesting. Status code: { response.status_code }" + ) def _oncoming_events_number(self): - query = ''' + query = """ query($preferredUsername: String!, $afterDatetime: DateTime) { group(preferredUsername: $preferredUsername) { organizedEvents(afterDatetime: $afterDatetime) { @@ -44,19 +52,15 @@ query($preferredUsername: String!, $afterDatetime: DateTime) { } } } - ''' + """ today = datetime.now(timezone.utc).isoformat() - data = { - 'preferredUsername': self._group_id, - 'afterDatetime': today - } + data = {"preferredUsername": self._group_id, "afterDatetime": today} r = self._request(query, data) - return r['group']['organizedEvents']['total'] - + return r["group"]["organizedEvents"]["total"] def _oncoming_events(self): def _oncoming_events_page(page): - query = ''' + query = """ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) { group(preferredUsername: $preferredUsername) { organizedEvents(afterDatetime: $afterDatetime, page: $page) { @@ -98,16 +102,16 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) { } } } - ''' + """ today = datetime.now(timezone.utc).isoformat() data = { - 'preferredUsername': self._group_id, - 'afterDatetime': today, - 'page': page + "preferredUsername": self._group_id, + "afterDatetime": today, + "page": page, } r = self._request(query, data) - return r['group']['organizedEvents']['elements'] + return r["group"]["organizedEvents"]["elements"] number_events = self._oncoming_events_number() @@ -132,9 +136,9 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) { # https://mobilizon.extinctionrebellion.fr/@xr_clermont_ferrand/events # split url to identify server url and actor id - elems = [x for x in url.split('/') if len(x) > 0 and x[0] == "@"] + elems = [x for x in url.split("/") if len(x) > 0 and x[0] == "@"] if len(elems) == 1: - params = elems[0].split('@') + params = elems[0].split("@") if len(params) == 2: self._api_end_point = "https://" + urlparse(url).netloc + "/api" self._group_id = params[1] @@ -144,24 +148,34 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) { events = self._oncoming_events() - for e in events: title = e["title"] event_url = e["url"] image = e["picture"]["url"] - location = e["physicalAddress"]["description"] + ', ' + e["physicalAddress"]["locality"] + location = ( + e["physicalAddress"]["description"] + + ", " + + e["physicalAddress"]["locality"] + ) soup = BeautifulSoup(e["description"], "html.parser") - + description = soup.text - start = dateutil.parser.isoparse(e["beginsOn"]).replace(tzinfo=timezone.utc).astimezone(tz=None) - end = dateutil.parser.isoparse(e["endsOn"]).replace(tzinfo=timezone.utc).astimezone(tz=None) + start = ( + dateutil.parser.isoparse(e["beginsOn"]) + .replace(tzinfo=timezone.utc) + .astimezone(tz=None) + ) + end = ( + dateutil.parser.isoparse(e["endsOn"]) + .replace(tzinfo=timezone.utc) + .astimezone(tz=None) + ) start_day = start.date() start_time = start.time() if e["options"]["showStartTime"] else None end_day = end.date() end_time = end.time() if e["options"]["showEndTime"] else None - self.add_event( default_values, title, @@ -177,6 +191,7 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) { published=published, image=image, end_day=end_day, - end_time=end_time) - + end_time=end_time, + ) + return self.get_structure() diff --git a/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py b/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py index 7ecd8a9..6db93e5 100644 --- a/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py +++ b/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py @@ -1,11 +1,12 @@ -from ..twosteps_extractor import * +from ..twosteps_extractor import TwoStepsExtractor +from ..extractor import Extractor from bs4 import BeautifulSoup # A class dedicated to get events from MEC Wordpress plugin # URL: https://webnus.net/modern-events-calendar/ class CExtractor(TwoStepsExtractor): - + def local2agendaCategory(self, category): mapping = { "Musique": "Fêtes & Concerts", @@ -25,7 +26,7 @@ class CExtractor(TwoStepsExtractor): "Atelier": "atelier", "Projection": None, } - + if category in mapping: return mapping[category], mapping_tag[category] else: @@ -40,7 +41,7 @@ class CExtractor(TwoStepsExtractor): if len(link) == 1: url = link[0]["href"] title = link[0].get_text() - + if self.add_event_url(url): print(url, title) self.add_event_title(url, title) @@ -55,7 +56,6 @@ class CExtractor(TwoStepsExtractor): if tag: self.add_event_category(url, tag) - def add_event_from_content( self, event_content, @@ -65,7 +65,7 @@ class CExtractor(TwoStepsExtractor): published=False, ): soup = BeautifulSoup(event_content, "xml") - + start_day = soup.select(".mec-start-date-label") if start_day and len(start_day) > 0: start_day = Extractor.parse_french_date(start_day[0].get_text()) @@ -82,13 +82,15 @@ class CExtractor(TwoStepsExtractor): else: start_time = None end_time = None - + image = soup.select(".mec-events-event-image img") if image: image = image[0]["src"] else: image = None - description = soup.select(".mec-event-content .mec-single-event-description")[0].get_text(separator=" ") + description = soup.select(".mec-event-content .mec-single-event-description")[ + 0 + ].get_text(separator=" ") url_human = event_url diff --git a/src/agenda_culturel/import_tasks/importer.py b/src/agenda_culturel/import_tasks/importer.py index 150f0c9..d2a3e40 100644 --- a/src/agenda_culturel/import_tasks/importer.py +++ b/src/agenda_culturel/import_tasks/importer.py @@ -1,5 +1,5 @@ -from .downloader import * -from .extractor import * +from .downloader import SimpleDownloader +from .extractor import Extractor from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor import logging @@ -7,7 +7,6 @@ import logging logger = logging.getLogger(__name__) - class URL2Events: def __init__( self, downloader=SimpleDownloader(), extractor=None, single_event=False @@ -17,8 +16,13 @@ class URL2Events: self.single_event = single_event def process( - self, url, url_human=None, cache=None, default_values=None, published=False, - first=True + self, + url, + url_human=None, + cache=None, + default_values=None, + published=False, + first=True, ): referer = "" data = None @@ -29,10 +33,12 @@ class URL2Events: data = self.extractor.data content_type = self.extractor.content_type if self.extractor.no_downloader: - content = '' + content = "" if content is None: - content = self.downloader.get_content(url, cache, referer=referer, content_type=content_type, data=data) + content = self.downloader.get_content( + url, cache, referer=referer, content_type=content_type, data=data + ) if content is None: return None @@ -45,16 +51,25 @@ class URL2Events: else: # if the extractor is not defined, use a list of default extractors for e in Extractor.get_default_extractors(self.single_event): - logger.warning('Extractor::' + type(e).__name__) + logger.info("Extractor::" + type(e).__name__) e.set_downloader(self.downloader) try: - events = e.extract(content, url, url_human, default_values, published) + events = e.extract( + content, url, url_human, default_values, published + ) if events is not None: if len(events) > 0: return events - except Exception as ex: - if first and FacebookEventExtractor.is_known_url(url) and self.downloader.support_2nd_extract and e.has_2nd_method: - logger.info('Using cookie trick on a facebook event') + except Exception: + if ( + first + and FacebookEventExtractor.is_known_url(url) + and self.downloader.support_2nd_extract + and e.has_2nd_method + ): + logger.info("Using cookie trick on a facebook event") e.prepare_2nd_extract() - return self.process(url, url_human, cache, default_values, published, False) + return self.process( + url, url_human, cache, default_values, published, False + ) return None diff --git a/src/agenda_culturel/import_tasks/twosteps_extractor.py b/src/agenda_culturel/import_tasks/twosteps_extractor.py index 5300a4c..e6c05d0 100644 --- a/src/agenda_culturel/import_tasks/twosteps_extractor.py +++ b/src/agenda_culturel/import_tasks/twosteps_extractor.py @@ -1,19 +1,12 @@ from abc import abstractmethod -from urllib.parse import urlparse -from urllib.parse import parse_qs -from bs4 import BeautifulSoup - import logging +from .extractor import Extractor +import datetime logger = logging.getLogger(__name__) -from .extractor import * -from django.utils.translation import gettext_lazy as _ -from dateutil import parser -import datetime - # A class to extract events from URL with two steps: # - first build a list of urls where the events will be found # - then for each document downloaded from these urls, build the events @@ -43,7 +36,7 @@ class TwoStepsExtractor(Extractor): def add_event_url_human(self, url, url_human): self.add_event_property(url, "url_human", url_human) - + def add_event_start_day(self, url, start_day): self.add_event_property(url, "start_day", start_day) @@ -150,8 +143,7 @@ class TwoStepsExtractor(Extractor): published=False, only_future=True, ignore_404=True, - first=True - + first=True, ): first = True @@ -192,22 +184,34 @@ class TwoStepsExtractor(Extractor): ) except Exception as e: # some website (FB) sometime need a second step - if first and self.has_2nd_method_in_list and self.downloader.support_2nd_extract: - logger.info('Using cookie trick on a facebook event') + if ( + first + and self.has_2nd_method_in_list + and self.downloader.support_2nd_extract + ): + logger.info("Using cookie trick on a facebook event") first = False # TMP: on trace ce qui se passe - from agenda_culturel.import_tasks.generic_extractors import fbevents - fbevents.CExtractor.dump_content_for_debug(content_event, event_url) + from agenda_culturel.import_tasks.generic_extractors import ( + fbevents, + ) + + fbevents.CExtractor.dump_content_for_debug( + content_event, event_url + ) self.prepare_2nd_extract_in_list() content_event = self.downloader.get_content(event_url) - if not content_event is None: + if content_event is not None: self.add_event_from_content( - content_event, event_url, url_human, default_values, published + content_event, + event_url, + url_human, + default_values, + published, ) else: raise e - return self.get_structure() @@ -221,15 +225,16 @@ class TwoStepsExtractorNoPause(TwoStepsExtractor): default_values=None, published=False, only_future=True, - ignore_404=True + ignore_404=True, ): if hasattr(self.downloader, "pause"): pause = self.downloader.pause else: pause = False self.downloader.pause = False - result = super().extract(content, url, url_human, default_values, published, only_future, ignore_404) + result = super().extract( + content, url, url_human, default_values, published, only_future, ignore_404 + ) self.downloader.pause = pause return result - diff --git a/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py b/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py index d71f97e..ba20cbf 100644 --- a/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py +++ b/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py @@ -57,7 +57,7 @@ def set_fixed_masked_from_representative(apps, cats): # for each event to_update = [] for d in duplicated: - d.fixed = not d.representative is None + d.fixed = d.representative is not None to_update.append(d) DuplicatedEvents.objects.bulk_update(to_update, fields=["fixed"]) @@ -68,7 +68,7 @@ def strip_place_aliases(apps, schema_editor): places = Place.objects.all() for p in places: - if not p.aliases is None: + if p.aliases is not None: p.aliases = [a.strip() for a in p.aliases] Place.objects.bulk_update(places, fields=["aliases"]) diff --git a/src/agenda_culturel/models.py b/src/agenda_culturel/models.py index c975ff3..554b523 100644 --- a/src/agenda_culturel/models.py +++ b/src/agenda_culturel/models.py @@ -1,8 +1,6 @@ from django.db import models, connection -from django.core.exceptions import FieldDoesNotExist from django_better_admin_arrayfield.models.fields import ArrayField from django.utils.translation import gettext_lazy as _ -from django.utils.safestring import mark_safe from django.template.defaultfilters import slugify from django.utils.dateparse import parse_date from django.urls import reverse @@ -11,7 +9,7 @@ from django_ckeditor_5.fields import CKEditor5Field from urllib.parse import urlparse from django.core.cache import cache from django.core.cache.utils import make_template_fragment_key -from django.contrib.auth.models import User, AnonymousUser +from django.contrib.auth.models import User import emoji from django.core.files.storage import default_storage from django.contrib.sites.models import Site @@ -30,24 +28,24 @@ from django.utils import timezone from django.contrib.postgres.search import TrigramSimilarity from django.db.models import Q, Count, F, Subquery, OuterRef, Func from django.db.models.functions import Lower -from django.contrib.postgres.lookups import Unaccent import recurrence.fields import recurrence import copy import unicodedata from collections import defaultdict -from .import_tasks.generic_extractors.fbevent import CExtractor as FacebookEventExtractor +from .import_tasks.generic_extractors.fbevent import ( + CExtractor as FacebookEventExtractor, +) from .import_tasks.extractor import Extractor from django.template.defaultfilters import date as _date from datetime import time, timedelta, date from django.utils.timezone import datetime -from django.utils import timezone from location_field.models.spatial import LocationField from django.contrib.gis.geos import Point -from .calendar import CalendarList, CalendarDay +from .calendar import CalendarDay from icalendar import Calendar as icalCal from icalendar import Event as icalEvent @@ -71,8 +69,9 @@ class StaticContent(models.Model): unique=True, ) text = CKEditor5Field( - verbose_name=_("Content"), help_text=_("Text as shown to the visitors"), - blank=True + verbose_name=_("Content"), + help_text=_("Text as shown to the visitors"), + blank=True, ) url_path = models.CharField( verbose_name=_("URL path"), @@ -83,7 +82,7 @@ class StaticContent(models.Model): verbose_name = _("Static content") verbose_name_plural = _("Static contents") indexes = [ - models.Index(fields=['name']), + models.Index(fields=["name"]), ] def __str__(self): @@ -114,7 +113,7 @@ class Category(models.Model): verbose_name=_("Name"), help_text=_("Category name"), max_length=512 ) - slug = AutoSlugField(null=True, default=None, unique=True, populate_from='name') + slug = AutoSlugField(null=True, default=None, unique=True, populate_from="name") color = ColorField( verbose_name=_("Color"), @@ -135,7 +134,6 @@ class Category(models.Model): verbose_name=_("Position for ordering categories"), default=0 ) - def save(self, *args, **kwargs): if self.color is None: existing_colors = [c.color for c in Category.objects.all()] @@ -157,7 +155,7 @@ class Category(models.Model): default = Category.objects.get(name=Category.default_name) return default - except: + except Exception: # if it does not exist, return it default, created = Category.objects.get_or_create( name=Category.default_name, @@ -176,7 +174,7 @@ class Category(models.Model): return "cat-" + str(self.id) def get_absolute_url(self): - return reverse('home_category', kwargs={"cat": self.slug}) + return reverse("home_category", kwargs={"cat": self.slug}) def __str__(self): return self.name @@ -185,15 +183,14 @@ class Category(models.Model): verbose_name = _("Category") verbose_name_plural = _("Categories") indexes = [ - models.Index(fields=['name']), + models.Index(fields=["name"]), ] class Tag(models.Model): name = models.CharField( - verbose_name=_("Name"), help_text=_("Tag name"), max_length=512, - unique=True + verbose_name=_("Name"), help_text=_("Tag name"), max_length=512, unique=True ) description = CKEditor5Field( @@ -205,17 +202,19 @@ class Tag(models.Model): principal = models.BooleanField( verbose_name=_("Principal"), - help_text=_("This tag is highlighted as a main tag for visitors, particularly in the filter."), + help_text=_( + "This tag is highlighted as a main tag for visitors, particularly in the filter." + ), default=False, ) - in_excluded_suggestions = models.BooleanField( + in_excluded_suggestions = models.BooleanField( verbose_name=_("In excluded suggestions"), help_text=_("This tag will be part of the excluded suggestions."), default=False, ) - in_included_suggestions = models.BooleanField( + in_included_suggestions = models.BooleanField( verbose_name=_("In included suggestions"), help_text=_("This tag will be part of the included suggestions."), default=False, @@ -225,24 +224,31 @@ class Tag(models.Model): verbose_name = _("Tag") verbose_name_plural = _("Tags") indexes = [ - models.Index(fields=['name']), + models.Index(fields=["name"]), ] - def get_absolute_url(self): return reverse("view_tag", kwargs={"t": self.name}) - def clear_cache(): for exclude in [False, True]: for include in [False, True]: for nb_suggestions in [10]: - id_cache = 'all_tags ' + str(exclude) + ' ' + str(include) + ' ' + str(nb_suggestions) + id_cache = ( + "all_tags " + + str(exclude) + + " " + + str(include) + + " " + + str(nb_suggestions) + ) id_cache = hashlib.md5(id_cache.encode("utf8")).hexdigest() cache.delete(id_cache) def get_tag_groups(nb_suggestions=10, exclude=False, include=False, all=False): - id_cache = 'all_tags ' + str(exclude) + ' ' + str(include) + ' ' + str(nb_suggestions) + id_cache = ( + "all_tags " + str(exclude) + " " + str(include) + " " + str(nb_suggestions) + ) id_cache = hashlib.md5(id_cache.encode("utf8")).hexdigest() result = cache.get(id_cache) @@ -254,13 +260,19 @@ class Tag(models.Model): obj_tags = Tag.objects if all: - obj_tags = obj_tags.filter(Q(in_excluded_suggestions=True)|Q(in_included_suggestions=True)|Q(principal=True)) + obj_tags = obj_tags.filter( + Q(in_excluded_suggestions=True) + | Q(in_included_suggestions=True) + | Q(principal=True) + ) else: if exclude: obj_tags = obj_tags.filter(Q(in_excluded_suggestions=True)) if include: - obj_tags = obj_tags.filter(Q(in_included_suggestions=True)|Q(principal=True)) - + obj_tags = obj_tags.filter( + Q(in_included_suggestions=True) | Q(principal=True) + ) + if not exclude and not include: obj_tags = obj_tags.filter(principal=True) @@ -268,36 +280,56 @@ class Tag(models.Model): if len(obj_tags) > nb_suggestions: nb_suggestions = len(obj_tags) - - tags = [{"tag": t["tag"], "count": 1000000 if t["tag"] in obj_tags else t["count"]} for t in free_tags] - tags += [{"tag": o, "count": 0} for o in Tag.objects.filter(~Q(name__in=f_tags)).values_list("name", flat=True)] + tags = [ + { + "tag": t["tag"], + "count": 1000000 if t["tag"] in obj_tags else t["count"], + } + for t in free_tags + ] + tags += [ + {"tag": o, "count": 0} + for o in Tag.objects.filter(~Q(name__in=f_tags)).values_list( + "name", flat=True + ) + ] tags.sort(key=lambda x: -x["count"]) tags1 = tags[0:nb_suggestions] - tags1.sort(key=lambda x: emoji.demojize(remove_accents(x["tag"]).lower(), delimiters=('000', ''))) + tags1.sort( + key=lambda x: emoji.demojize( + remove_accents(x["tag"]).lower(), delimiters=("000", "") + ) + ) tags2 = tags[nb_suggestions:] - tags2.sort(key=lambda x: emoji.demojize(remove_accents(x["tag"]).lower(), delimiters=('000', ''))) + tags2.sort( + key=lambda x: emoji.demojize( + remove_accents(x["tag"]).lower(), delimiters=("000", "") + ) + ) - result = ((_('Suggestions'), [(t["tag"], t["tag"]) for t in tags1]), - (_('Others'), [(t["tag"], t["tag"]) for t in tags2])) - - cache.set(id_cache, result, 3000) # 50mn + result = ( + (_("Suggestions"), [(t["tag"], t["tag"]) for t in tags1]), + (_("Others"), [(t["tag"], t["tag"]) for t in tags2]), + ) + + cache.set(id_cache, result, 3000) # 50mn return result def __str__(self): return self.name - class DuplicatedEvents(models.Model): - representative = models.ForeignKey( "Event", verbose_name=_("Representative event"), - help_text=_("This event is the representative event of the duplicated events group"), + help_text=_( + "This event is the representative event of the duplicated events group" + ), null=True, default=None, on_delete=models.SET_DEFAULT, @@ -307,10 +339,9 @@ class DuplicatedEvents(models.Model): verbose_name = _("Duplicated events") verbose_name_plural = _("Duplicated events") indexes = [ - models.Index(fields=['representative']), + models.Index(fields=["representative"]), ] - def __init__(self, *args, **kwargs): self.events = None super().__init__(*args, **kwargs) @@ -330,7 +361,7 @@ class DuplicatedEvents(models.Model): return self.representative def fixed(self): - return not self.representative is None + return self.representative is not None def is_published(self): return len([e for e in self.get_duplicated() if e.is_published()]) > 0 @@ -345,12 +376,12 @@ class DuplicatedEvents(models.Model): if self.representative and self.representative.local_version(): return self.representative - l = [e for e in self.get_duplicated() if e.local_version()] - if len(l) == 0: + lv = [e for e in self.get_duplicated() if e.local_version()] + if len(lv) == 0: return None else: - l.sort(key=lambda x: x.modified_date, reverse=True) - return l[0] + lv.sort(key=lambda x: x.modified_date, reverse=True) + return lv[0] def merge_into(self, other): # for all objects associated to this group @@ -366,7 +397,7 @@ class DuplicatedEvents(models.Model): self.delete() # this method fixes the duplicated events by using the given event - # as the representative one. + # as the representative one. # if no event is given, the last one (by creation date) is selected. def fix(self, event=None): events = self.get_duplicated() @@ -375,13 +406,12 @@ class DuplicatedEvents(models.Model): for e in events: if event is None: event = e - if not event is None: + if event is not None: event.status = Event.STATUS.PUBLISHED self.representative = event Event.objects.bulk_update(events, fields=["status"]) self.save() return len(events) - def merge_groups(groups): if len(groups) == 0: @@ -404,20 +434,21 @@ class DuplicatedEvents(models.Model): nb, d = singletons.delete() return nb - def not_fixed_qs(qs=None, fixed=False): if not qs: qs = DuplicatedEvents.objects - - qs = qs.annotate(nb_no_trash=Count("event", filter=~Q(event__status=Event.STATUS.TRASH))) - q = ~Q(representative__isnull=True)|Q(nb_no_trash__lte=1) + + qs = qs.annotate( + nb_no_trash=Count("event", filter=~Q(event__status=Event.STATUS.TRASH)) + ) + q = ~Q(representative__isnull=True) | Q(nb_no_trash__lte=1) if fixed: return qs.filter(q) else: return qs.exclude(q) def save(self, *args, **kwargs): - if self.representative and not self.representative in self.event_set.all(): + if self.representative and self.representative not in self.event_set.all(): self.representative = None super().save(*args, **kwargs) @@ -425,7 +456,12 @@ class DuplicatedEvents(models.Model): def get_import_messages(self): msgs = [] for e in self.get_duplicated(): - for m in e.message_set.filter(message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS]).order_by("date"): + for m in e.message_set.filter( + message_type__in=[ + Message.TYPE.IMPORT_PROCESS, + Message.TYPE.UPDATE_PROCESS, + ] + ).order_by("date"): msgs.append(m) return msgs @@ -438,35 +474,44 @@ class DuplicatedEvents(models.Model): class ReferenceLocation(models.Model): - name = models.CharField(verbose_name=_("Name"), help_text=_("Name of the location"), unique=True, null=False) - location = LocationField(based_fields=["name"], zoom=12, default=Point(3.08333, 45.783329), srid=4326) + name = models.CharField( + verbose_name=_("Name"), + help_text=_("Name of the location"), + unique=True, + null=False, + ) + location = LocationField( + based_fields=["name"], zoom=12, default=Point(3.08333, 45.783329), srid=4326 + ) main = models.IntegerField( verbose_name=_("Main"), - help_text=_("This location is one of the main locations (shown first higher values)."), + help_text=_( + "This location is one of the main locations (shown first higher values)." + ), default=0, ) suggested_distance = models.IntegerField( verbose_name=_("Suggested distance (km)"), - help_text=_("If this distance is given, this location is part of the suggested filters."), + help_text=_( + "If this distance is given, this location is part of the suggested filters." + ), null=True, - default=None + default=None, ) - slug = AutoSlugField(null=True, default=None, unique=True, populate_from='name') + slug = AutoSlugField(null=True, default=None, unique=True, populate_from="name") class Meta: verbose_name = _("Reference location") verbose_name_plural = _("Reference locations") indexes = [ - models.Index(fields=['name']), + models.Index(fields=["name"]), ] def __str__(self): return self.name - - class Place(models.Model): name = models.CharField(verbose_name=_("Name"), help_text=_("Name of the place")) address = models.CharField( @@ -475,9 +520,20 @@ class Place(models.Model): blank=True, null=True, ) - postcode = models.CharField(verbose_name=_("Postcode"), help_text=_("The post code is not displayed, but makes it easier to find an address when you enter it."), blank=True, null=True) + postcode = models.CharField( + verbose_name=_("Postcode"), + help_text=_( + "The post code is not displayed, but makes it easier to find an address when you enter it." + ), + blank=True, + null=True, + ) city = models.CharField(verbose_name=_("City"), help_text=_("City name")) - location = LocationField(based_fields=["name", "address", "postcode", "city"], zoom=12, default=Point(3.08333, 45.783329)) + location = LocationField( + based_fields=["name", "address", "postcode", "city"], + zoom=12, + default=Point(3.08333, 45.783329), + ) description = CKEditor5Field( verbose_name=_("Description"), @@ -501,9 +557,9 @@ class Place(models.Model): verbose_name_plural = _("Places") ordering = ["name"] indexes = [ - models.Index(fields=['name']), - models.Index(fields=['city']), - models.Index(fields=['location']), + models.Index(fields=["name"]), + models.Index(fields=["city"]), + models.Index(fields=["location"]), ] def __str__(self): @@ -513,13 +569,19 @@ class Place(models.Model): return self.name + ", " + self.city def get_absolute_url(self): - return reverse("view_place_fullname", kwargs={"pk": self.pk, "extra": slugify(self.name)}) + return reverse( + "view_place_fullname", kwargs={"pk": self.pk, "extra": slugify(self.name)} + ) def nb_events(self): return Event.objects.filter(exact_location=self).count() def nb_events_future(self): - return Event.objects.filter(start_day__gte=datetime.now()).filter(exact_location=self).count() + return ( + Event.objects.filter(start_day__gte=datetime.now()) + .filter(exact_location=self) + .count() + ) def match(self, event): if self.aliases and event.location: @@ -549,14 +611,19 @@ class Place(models.Model): for p in Place.objects.values("city").distinct().order_by("city") ] ) - except: + except Exception: tags = [] return tags + class Organisation(models.Model): name = models.CharField( - verbose_name=_("Name"), help_text=_("Organisation name"), max_length=512, null=False, unique=True - ) + verbose_name=_("Name"), + help_text=_("Organisation name"), + max_length=512, + null=False, + unique=True, + ) website = models.URLField( verbose_name=_("Website"), @@ -576,7 +643,9 @@ class Organisation(models.Model): principal_place = models.ForeignKey( Place, verbose_name=_("Principal place"), - help_text=_("Place mainly associated with this organizer. Mainly used if there is a similarity in the name, to avoid redundant displays."), + help_text=_( + "Place mainly associated with this organizer. Mainly used if there is a similarity in the name, to avoid redundant displays." + ), null=True, on_delete=models.SET_NULL, blank=True, @@ -590,8 +659,7 @@ class Organisation(models.Model): return self.name def get_absolute_url(self): - return reverse("view_organisation", kwargs={'pk': self.pk, "extra": self.name}) - + return reverse("view_organisation", kwargs={"pk": self.pk, "extra": self.name}) class Event(models.Model): @@ -612,7 +680,7 @@ class Event(models.Model): blank=True, default=None, on_delete=models.SET_DEFAULT, - related_name="created_events" + related_name="created_events", ) imported_by_user = models.ForeignKey( User, @@ -621,7 +689,7 @@ class Event(models.Model): blank=True, default=None, on_delete=models.SET_DEFAULT, - related_name="imported_events" + related_name="imported_events", ) modified_by_user = models.ForeignKey( User, @@ -630,7 +698,7 @@ class Event(models.Model): blank=True, default=None, on_delete=models.SET_DEFAULT, - related_name="modified_events" + related_name="modified_events", ) moderated_by_user = models.ForeignKey( User, @@ -639,15 +707,13 @@ class Event(models.Model): blank=True, default=None, on_delete=models.SET_DEFAULT, - related_name="moderated_events" + related_name="moderated_events", ) recurrence_dtstart = models.DateTimeField(editable=False, blank=True, null=True) recurrence_dtend = models.DateTimeField(editable=False, blank=True, null=True) - title = models.CharField( - verbose_name=_("Title"), max_length=512 - ) + title = models.CharField(verbose_name=_("Title"), max_length=512) status = models.CharField( _("Status"), max_length=20, choices=STATUS.choices, default=STATUS.DRAFT @@ -661,9 +727,7 @@ class Event(models.Model): on_delete=models.SET_DEFAULT, ) - start_day = models.DateField( - verbose_name=_("Start day") - ) + start_day = models.DateField(verbose_name=_("Start day")) start_time = models.TimeField( verbose_name=_("Start time"), blank=True, @@ -675,9 +739,7 @@ class Event(models.Model): blank=True, null=True, ) - end_time = models.TimeField( - verbose_name=_("End time"), blank=True, null=True - ) + end_time = models.TimeField(verbose_name=_("End time"), blank=True, null=True) recurrences = recurrence.fields.RecurrenceField( verbose_name=_("Recurrence"), include_dtstart=False, blank=True, null=True @@ -698,7 +760,7 @@ class Event(models.Model): max_length=512, default="", null=True, - blank=True + blank=True, ) description = models.TextField( @@ -707,14 +769,14 @@ class Event(models.Model): null=True, ) - - organisers = models.ManyToManyField(Organisation, - related_name='organised_events', + organisers = models.ManyToManyField( + Organisation, + related_name="organised_events", verbose_name=_("Organisers"), help_text=_( "list of event organisers. Organizers will only be displayed if one of them does not normally use the venue." ), - blank=True + blank=True, ) local_image = ResizedImageField( @@ -782,7 +844,9 @@ class Event(models.Model): self._messages = [] def get_import_messages(self): - return self.message_set.filter(message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS]).order_by("date") + return self.message_set.filter( + message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS] + ).order_by("date") def get_consolidated_end_day(self, intuitive=True): if intuitive: @@ -869,7 +933,14 @@ class Event(models.Model): models.Index(fields=["recurrences"]), models.Index(fields=["recurrence_dtstart", "recurrence_dtend"]), models.Index("start_time", Lower("title"), name="start_time title"), - models.Index("start_time", "start_day", "end_day", "end_time", Lower("title"), name="datetimes title") + models.Index( + "start_time", + "start_day", + "end_day", + "end_time", + Lower("title"), + name="datetimes title", + ), ] def chronology_dates(self): @@ -877,31 +948,69 @@ class Event(models.Model): def chronology(self, simple=False): c = [] - if self.modified_date: - c.append({ "timestamp": self.modified_date, "data": "modified_date", "user": self.modified_by_user, "is_date": True }) + if self.modified_date: + c.append( + { + "timestamp": self.modified_date, + "data": "modified_date", + "user": self.modified_by_user, + "is_date": True, + } + ) if self.moderated_date: - c.append({ "timestamp": self.moderated_date, "data": "moderated_date", "user" : self.moderated_by_user, "is_date": True}) + c.append( + { + "timestamp": self.moderated_date, + "data": "moderated_date", + "user": self.moderated_by_user, + "is_date": True, + } + ) if self.imported_date: - c.append({ "timestamp": self.imported_date, "data": "imported_date", "user": self.imported_by_user, "is_date": True }) + c.append( + { + "timestamp": self.imported_date, + "data": "imported_date", + "user": self.imported_by_user, + "is_date": True, + } + ) if self.created_date: if self.created_by_user: user = self.created_by_user else: if self.in_recurrent_import(): - user = _('recurrent import') + user = _("recurrent import") else: - user = _('a non authenticated user') + user = _("a non authenticated user") - c.append({ "timestamp": self.created_date + timedelta(milliseconds=-1), "data": "created_date", "user": user, "is_date": True}) + c.append( + { + "timestamp": self.created_date + timedelta(milliseconds=-1), + "data": "created_date", + "user": user, + "is_date": True, + } + ) if not simple: - c += [{ "timestamp": m.date, "data": m, "user": m.user, "is_date": False} for m in self.message_set.filter(spam=False)] + c += [ + {"timestamp": m.date, "data": m, "user": m.user, "is_date": False} + for m in self.message_set.filter(spam=False) + ] if self.other_versions: for o in self.other_versions.get_duplicated(): if o != self: - c += [{ "timestamp": m.date, "data": m, "user": m.user, "is_date": False} for m in o.message_set.filter(spam=False)] - + c += [ + { + "timestamp": m.date, + "data": m, + "user": m.user, + "is_date": False, + } + for m in o.message_set.filter(spam=False) + ] c.sort(key=lambda x: x["timestamp"]) @@ -912,7 +1021,11 @@ class Event(models.Model): return [] else: result = self.tags - result.sort(key=lambda x: emoji.demojize(remove_accents(x.lower()), delimiters=('000', ''))) + result.sort( + key=lambda x: emoji.demojize( + remove_accents(x.lower()), delimiters=("000", "") + ) + ) return result def get_all_tags(sort=True): @@ -924,7 +1037,11 @@ class Event(models.Model): cursor.execute(raw_query) result = [{"tag": row[0], "count": row[1]} for row in cursor] if sort: - result.sort(key=lambda x: emoji.demojize(remove_accents(x["tag"].lower()), delimiters=('000', ''))) + result.sort( + key=lambda x: emoji.demojize( + remove_accents(x["tag"].lower()), delimiters=("000", "") + ) + ) return result def is_draft(self): @@ -941,11 +1058,14 @@ class Event(models.Model): not self.pure_import() and (self.modified_date - self.created_date).total_seconds() > 1 ) - + def pure_import(self): if self.imported_date is None: return False - return self.modified_date is None or (self.modified_date - self.imported_date).total_seconds() <= 0 + return ( + self.modified_date is None + or (self.modified_date - self.imported_date).total_seconds() <= 0 + ) def local_version(self): return self.imported_date is None or self.modified() @@ -955,7 +1075,7 @@ class Event(models.Model): if self.other_versions: for o in self.other_versions.get_duplicated(): - if o.status == Event.STATUS.PUBLISHED and not o.reference_urls is None: + if o.status == Event.STATUS.PUBLISHED and o.reference_urls is not None: res += o.reference_urls res = list(set(res)) @@ -980,7 +1100,10 @@ class Event(models.Model): if self.exact_location is None: has_significant = True else: - has_significant = self.organisers.filter(~Q(principal_place=self.exact_location)).count() > 0 + has_significant = ( + self.organisers.filter(~Q(principal_place=self.exact_location)).count() + > 0 + ) if has_significant: return self.organisers.all() @@ -989,37 +1112,70 @@ class Event(models.Model): def get_nb_not_moderated(first_day, nb_mod_days=21, nb_classes=4): window_end = first_day + timedelta(days=nb_mod_days) - nb_not_moderated = Event.objects.filter(~Q(status=Event.STATUS.TRASH)). \ - filter(Q(start_day__gte=first_day)&Q(start_day__lte=window_end)). \ - filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)).values("start_day").\ - annotate(not_moderated=Count("start_day", filter=Q(moderated_date__isnull=True))). \ - annotate(nb_events=Count("start_day")). \ - order_by("start_day").values("not_moderated", "nb_events", "start_day") - + nb_not_moderated = ( + Event.objects.filter(~Q(status=Event.STATUS.TRASH)) + .filter(Q(start_day__gte=first_day) & Q(start_day__lte=window_end)) + .filter( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + .values("start_day") + .annotate( + not_moderated=Count("start_day", filter=Q(moderated_date__isnull=True)) + ) + .annotate(nb_events=Count("start_day")) + .order_by("start_day") + .values("not_moderated", "nb_events", "start_day") + ) + max_not_moderated = max([x["not_moderated"] for x in nb_not_moderated]) if max_not_moderated == 0: max_not_moderated = 1 - nb_not_moderated_dict = dict([(x["start_day"], (x["not_moderated"], x["nb_events"])) for x in nb_not_moderated]) + nb_not_moderated_dict = dict( + [ + (x["start_day"], (x["not_moderated"], x["nb_events"])) + for x in nb_not_moderated + ] + ) # add missing dates date_list = [first_day + timedelta(days=x) for x in range(0, nb_mod_days)] - nb_not_moderated = [{"start_day": d, - "is_today": d == first_day, - "nb_events": nb_not_moderated_dict[d][1] if d in nb_not_moderated_dict else 0, - "not_moderated": nb_not_moderated_dict[d][0] if d in nb_not_moderated_dict else 0} for d in date_list] - nb_not_moderated = [ x | { "note": 0 if x["not_moderated"] == 0 else int((nb_classes - 1) * x["not_moderated"] / max_not_moderated) + 1 } for x in nb_not_moderated] - return [nb_not_moderated[x:x + 7] for x in range(0, len(nb_not_moderated), 7)] - + nb_not_moderated = [ + { + "start_day": d, + "is_today": d == first_day, + "nb_events": ( + nb_not_moderated_dict[d][1] if d in nb_not_moderated_dict else 0 + ), + "not_moderated": ( + nb_not_moderated_dict[d][0] if d in nb_not_moderated_dict else 0 + ), + } + for d in date_list + ] + nb_not_moderated = [ + x + | { + "note": ( + 0 + if x["not_moderated"] == 0 + else int((nb_classes - 1) * x["not_moderated"] / max_not_moderated) + + 1 + ) + } + for x in nb_not_moderated + ] + return [nb_not_moderated[x : x + 7] for x in range(0, len(nb_not_moderated), 7)] def nb_draft_events(): return Event.objects.filter(status=Event.STATUS.DRAFT).count() def get_qs_events_with_unkwnon_place(): - return Event.objects.filter(exact_location__isnull=True). \ - filter(~Q(status=Event.STATUS.TRASH)). \ - filter(Q(other_versions=None)|Q(other_versions__representative=F('pk'))) + return ( + Event.objects.filter(exact_location__isnull=True) + .filter(~Q(status=Event.STATUS.TRASH)) + .filter(Q(other_versions=None) | Q(other_versions__representative=F("pk"))) + ) def is_representative(self): return self.other_versions is None or self.other_versions.representative == self @@ -1032,21 +1188,21 @@ class Event(models.Model): def download_image(self): # first download file - if str(self.image) != '': + if str(self.image) != "": a = urlparse(self.image) basename = os.path.basename(a.path) try: basename = basename.decode() - except: + except Exception: pass - ext = basename.split('.')[-1] + ext = basename.split(".")[-1] filename = "%s.%s" % (uuid.uuid4(), ext) try: tmpfile, _ = urllib.request.urlretrieve(self.image) - except: + except Exception: return None # if the download is ok, then create the corresponding file object @@ -1054,7 +1210,7 @@ class Event(models.Model): def add_pending_organisers(self, organisers): self.pending_organisers = organisers - + def has_pending_organisers(self): return hasattr(self, "pending_organisers") @@ -1138,7 +1294,7 @@ class Event(models.Model): return request.build_absolute_uri(self.local_image.url) else: return self.local_image.url - except: + except Exception: pass if self.image: return self.image @@ -1148,7 +1304,6 @@ class Event(models.Model): def has_image_url(self): return self.get_image_url() is not None - # return a copy of the current object for each recurrence between first an last date (included) def get_recurrences_between(self, firstdate, lastdate): if not self.has_recurrences(): @@ -1190,16 +1345,12 @@ class Event(models.Model): stime = ( time.fromisoformat(self.start_time) if isinstance(self.start_time, str) - else time() - if self.start_time is None - else self.start_time + else time() if self.start_time is None else self.start_time ) etime = ( time.fromisoformat(self.end_time) if isinstance(self.end_time, str) - else time() - if self.end_time is None - else self.end_time + else time() if self.end_time is None else self.end_time ) self.recurrence_dtstart = datetime.combine(sday, stime) @@ -1226,7 +1377,7 @@ class Event(models.Model): self.recurrence_dtend += ( self.recurrences.dtend - self.recurrences.dtstart ) - except: + except Exception: self.recurrence_dtend = self.recurrence_dtstart def prepare_save(self): @@ -1235,12 +1386,14 @@ class Event(models.Model): self.update_recurrence_dtstartend() # if the image is defined but not locally downloaded - if self.image and (not self.local_image or not default_storage.exists(self.local_image.name)): + if self.image and ( + not self.local_image or not default_storage.exists(self.local_image.name) + ): self.download_image() # remove "/" from tags if self.tags: - self.tags = [t.replace('/', '-') for t in self.tags] + self.tags = [t.replace("/", "-") for t in self.tags] # in case of importation process if self.is_in_importation_process(): @@ -1254,14 +1407,18 @@ class Event(models.Model): if not self.category or self.category.name == Category.default_name: CategorisationRule.apply_rules(self) - def get_contributor_message(self): types = [Message.TYPE.FROM_CONTRIBUTOR, Message.TYPE.FROM_CONTRIBUTOR_NO_MSG] if self.other_versions is None or self.other_versions.representative is None: - return Message.objects.filter(related_event=self.pk, message_type__in=types, closed=False) + return Message.objects.filter( + related_event=self.pk, message_type__in=types, closed=False + ) else: - return Message.objects.filter(related_event__in=self.other_versions.get_duplicated(), message_type__in=types, closed=False) - + return Message.objects.filter( + related_event__in=self.other_versions.get_duplicated(), + message_type__in=types, + closed=False, + ) def notify_if_required(self, request): notif = False @@ -1269,16 +1426,30 @@ class Event(models.Model): messages = self.get_contributor_message() if messages: for message in messages: - if message and not message.closed and message.email and message.email != "": + if ( + message + and not message.closed + and message.email + and message.email != "" + ): # send email - context = {"sitename": Site.objects.get_current(request).name, 'event_title': self.title } + context = { + "sitename": Site.objects.get_current(request).name, + "event_title": self.title, + } if self.status == Event.STATUS.PUBLISHED: - context["url"] = request.build_absolute_uri(self.get_absolute_url()) - subject = _('Your event has been published') - body = render_to_string("agenda_culturel/emails/published.txt", context) + context["url"] = request.build_absolute_uri( + self.get_absolute_url() + ) + subject = _("Your event has been published") + body = render_to_string( + "agenda_culturel/emails/published.txt", context + ) else: - subject = _('Your message has not been retained') - body = render_to_string("agenda_culturel/emails/retained.txt", context) + subject = _("Your message has not been retained") + body = render_to_string( + "agenda_culturel/emails/retained.txt", context + ) send_mail(subject, body, None, [message.email]) message.closed = True @@ -1287,7 +1458,6 @@ class Event(models.Model): return notif - def save(self, *args, **kwargs): self.prepare_save() @@ -1304,15 +1474,15 @@ class Event(models.Model): # if it exists similar events, add this relation to the event if len(similar_events) != 0: self.set_other_versions(similar_events) - + # check if it's a clone (that will become representative) - clone = self.pk is None and not self.other_versions is None + clone = self.pk is None and self.other_versions is not None # check if we need to clean the other_versions if ( - not clone and - self.pk and - self.other_versions is not None + not clone + and self.pk + and self.other_versions is not None and self.other_versions.nb_duplicated() == 1 ): self.other_versions.delete() @@ -1340,7 +1510,7 @@ class Event(models.Model): if clone: self.other_versions.representative = self self.other_versions.save() - # if we just clone a single event, its status is + # if we just clone a single event, its status is # the same as the status of the current object if self.other_versions.get_duplicated().count() == 2: for e in self.other_versions.get_duplicated(): @@ -1351,21 +1521,27 @@ class Event(models.Model): def from_structure(event_structure, import_source=None): # organisers is a manytomany relation thus cannot be initialised before creation of the event - organisers = event_structure.pop('organisers', None) + organisers = event_structure.pop("organisers", None) # supplementary information - email = event_structure.pop('email', None) - comments = event_structure.pop('comments', None) - warnings = event_structure.pop('warnings', []) + email = event_structure.pop("email", None) + comments = event_structure.pop("comments", None) + warnings = event_structure.pop("warnings", []) for w in warnings: if w == Extractor.Warning.NO_START_DATE: - event_structure["title"] += " - " + _('Warning') + ": " + _('the date has not been imported correctly.') - + event_structure["title"] += ( + " - " + + _("Warning") + + ": " + + _("the date has not been imported correctly.") + ) if "category" in event_structure and event_structure["category"] is not None: try: event_structure["category"] = Category.objects.get( - name__unaccent__icontains=remove_accents(event_structure["category"].lower()) + name__unaccent__icontains=remove_accents( + event_structure["category"].lower() + ) ) except Category.DoesNotExist: event_structure["category"] = Category.get_default_category() @@ -1438,36 +1614,57 @@ class Event(models.Model): result = Event(**event_structure) result.add_pending_organisers(organisers) if email or comments: - has_comments = not comments in ["", None] - result.add_message(Message(subject=_('during import process'), - email=email, - message=comments, - closed=False, - message_type=Message.TYPE.FROM_CONTRIBUTOR if has_comments else Message.TYPE.FROM_CONTRIBUTOR_NO_MSG)) + has_comments = comments not in ["", None] + result.add_message( + Message( + subject=_("during import process"), + email=email, + message=comments, + closed=False, + message_type=( + Message.TYPE.FROM_CONTRIBUTOR + if has_comments + else Message.TYPE.FROM_CONTRIBUTOR_NO_MSG + ), + ) + ) for w in warnings: if w == Extractor.Warning.NO_START_DATE: result.set_invalid_start_date() - result.add_message(Message(subject=_('warning'), - closed=False, - message=_('the date has not been imported correctly.'), - message_type=Message.TYPE.WARNING)) + result.add_message( + Message( + subject=_("warning"), + closed=False, + message=_("the date has not been imported correctly."), + message_type=Message.TYPE.WARNING, + ) + ) if w == Extractor.Warning.NO_TITLE: result.set_invalid_title() - result.add_message(Message(subject=_('warning'), - closed=False, - message=_('the title has not been imported correctly.'), - message_type=Message.TYPE.WARNING)) + result.add_message( + Message( + subject=_("warning"), + closed=False, + message=_("the title has not been imported correctly."), + message_type=Message.TYPE.WARNING, + ) + ) if w == Extractor.Warning.NOT_FOUND: result.status = Event.STATUS.DRAFT result.set_is_not_found_import() - result.add_message(Message(subject=_('warning'), - closed=False, - message=_('The import was unable to find an event in the page.'), - message_type=Message.TYPE.WARNING)) + result.add_message( + Message( + subject=_("warning"), + closed=False, + message=_( + "The import was unable to find an event in the page." + ), + message_type=Message.TYPE.WARNING, + ) + ) return result - def find_similar_events(self): start_time_test = Q(start_time=self.start_time) @@ -1476,12 +1673,16 @@ class Event(models.Model): if isinstance(self.start_time, str): self.start_time = time.fromisoformat(self.start_time) interval = ( - time(self.start_time.hour - 1, self.start_time.minute) - if self.start_time.hour >= 1 - else time(0, 0), - time(self.start_time.hour + 1, self.start_time.minute) - if self.start_time.hour < 23 - else time(23, 59), + ( + time(self.start_time.hour - 1, self.start_time.minute) + if self.start_time.hour >= 1 + else time(0, 0) + ), + ( + time(self.start_time.hour + 1, self.start_time.minute) + if self.start_time.hour < 23 + else time(23, 59) + ), ) start_time_test = start_time_test | Q(start_time__range=interval) @@ -1504,7 +1705,7 @@ class Event(models.Model): if self.uuids is None or len(self.uuids) == 0 else Event.objects.filter(uuids__contains=self.uuids) ) - + def get_updateable_uuid(self): if self.uuids and len(self.uuids) > 0: for s in self.uuids: @@ -1513,7 +1714,7 @@ class Event(models.Model): return None def is_updateable(self): - return not self.get_updateable_uuid() is None + return self.get_updateable_uuid() is not None def split_uuid(uuid): els = uuid.split(":") @@ -1539,7 +1740,7 @@ class Event(models.Model): if Event.is_ancestor_uuid(s_uuid, e_uuid): return True return False - + def same_uuid(self, event): if self.uuids is None or event.uuids is None: return False @@ -1553,7 +1754,11 @@ class Event(models.Model): if self.other_versions is None: return [] else: - return [e for e in self.other_versions.get_duplicated() if e.pk != self.pk and e.status != Event.STATUS.TRASH] + return [ + e + for e in self.other_versions.get_duplicated() + if e.pk != self.pk and e.status != Event.STATUS.TRASH + ] def get_other_versions(self): if self.other_versions is None: @@ -1562,7 +1767,11 @@ class Event(models.Model): return [e for e in self.other_versions.get_duplicated() if e.pk != self.pk] def masked(self): - return self.other_versions and self.other_versions.representative != None and self.other_versions.representative.pk != self.pk + return ( + self.other_versions + and self.other_versions.representative is not None + and self.other_versions.representative.pk != self.pk + ) def get_organisers(self): if self.pk: @@ -1578,12 +1787,11 @@ class Event(models.Model): return self.pending_organisers else: return [] - def get_comparison(events, all=True): result = [] for attr in Event.data_fields(all=all, local_img=False, exact_location=False): - if attr == 'organisers': + if attr == "organisers": values = [[str(o) for o in e.get_organisers()] for e in events] else: values = [getattr(e, attr) for e in events] @@ -1605,18 +1813,20 @@ class Event(models.Model): res = Event.get_comparison([self, event], all) for r in res: if not r["similar"]: - if r["key"] == "title" and (self.has_invalid_title() or event.has_invalid_title()): + if r["key"] == "title" and ( + self.has_invalid_title() or event.has_invalid_title() + ): continue - if r["key"] == "start_day" and (self.has_invalid_start_date() or event.has_invalid_start_date()): + if r["key"] == "start_day" and ( + self.has_invalid_start_date() or event.has_invalid_start_date() + ): continue return False return True def set_other_versions(self, events, force_non_fixed=False): # get existing groups - groups = list( - set([e.other_versions for e in events] + [self.other_versions]) - ) + groups = list(set([e.other_versions for e in events] + [self.other_versions])) groups = [g for g in groups if g is not None] # do we have to create a new group? @@ -1690,10 +1900,12 @@ class Event(models.Model): def update(self, other, all): # integrate pending organisers - if other.has_pending_organisers() and not other.pending_organisers is None: + if other.has_pending_organisers() and other.pending_organisers is not None: self.organisers.set(other.pending_organisers) - - logger.warning("process update " + other.title + ' ' + str(other.has_invalid_start_date())) + + logger.info( + "process update " + other.title + " " + str(other.has_invalid_start_date()) + ) # set attributes for attr in Event.data_fields(all=all, no_m2m=True): if attr == "title" and other.has_invalid_title(): @@ -1709,9 +1921,9 @@ class Event(models.Model): # add a possible missing uuid if self.uuids is None: self.uuids = [] - for uuid in other.uuids: - if uuid not in self.uuids: - self.uuids.append(uuid) + for uuide in other.uuids: + if uuide not in self.uuids: + self.uuids.append(uuide) # add possible missing sources if other.import_sources: @@ -1776,19 +1988,34 @@ class Event(models.Model): if same_imported.other_versions: if same_imported.status != Event.STATUS.TRASH: if same_imported.other_versions.is_published(): - if same_imported.other_versions.representative != same_imported: - same_imported.other_versions.representative = None + if ( + same_imported.other_versions.representative + != same_imported + ): + same_imported.other_versions.representative = ( + None + ) same_imported.other_versions.save() # add a message to explain the update if not event.is_not_found_import(): - res = [r for r in Event.get_comparison([event, same_imported], all) if not r["similar"]] + res = [ + r + for r in Event.get_comparison( + [event, same_imported], all + ) + if not r["similar"] + ] if len(res) > 0: - txt = _("Updated field(s): ") + ", ".join([r["key"] for r in res]) - msg = Message(subject=_('Update'), - name=_('update process'), - related_event=same_imported, - message=txt, - message_type=Message.TYPE.UPDATE_PROCESS) + txt = _("Updated field(s): ") + ", ".join( + [r["key"] for r in res] + ) + msg = Message( + subject=_("Update"), + name=_("update process"), + related_event=same_imported, + message=txt, + message_type=Message.TYPE.UPDATE_PROCESS, + ) msg.save() new_image = same_imported.image != event.image @@ -1799,7 +2026,9 @@ class Event(models.Model): same_imported.status = Event.STATUS.TRASH else: # we only update local information if it's a pure import and has no moderated_date - same_imported.update(event, pure and same_imported.moderated_date is None) + same_imported.update( + event, pure and same_imported.moderated_date is None + ) # save messages if event.has_message(): for msg in event.get_messages(): @@ -1808,15 +2037,22 @@ class Event(models.Model): same_imported.set_in_importation_process() same_imported.prepare_save() # fix missing or updated files - if same_imported.local_image and (not default_storage.exists(same_imported.local_image.name) or new_image): + if same_imported.local_image and ( + not default_storage.exists(same_imported.local_image.name) + or new_image + ): same_imported.download_image() same_imported.save(update_fields=["local_image"]) + to_update.append(same_imported) else: # otherwise, the new event possibly a duplication of the remaining others. # check if it should be published - trash = len([e for e in same_events if e.status != Event.STATUS.TRASH]) == 0 + trash = ( + len([e for e in same_events if e.status != Event.STATUS.TRASH]) + == 0 + ) if trash: event.status = Event.STATUS.TRASH event.set_other_versions(same_events, force_non_fixed=not trash) @@ -1839,10 +2075,14 @@ class Event(models.Model): if e.is_event_long_duration(): e.status = Event.STATUS.DRAFT e.add_message( - Message(subject=_('Import'), - name=_('import process'), - message=_("The duration of the event is a little too long for direct publication. Moderators can choose to publish it or not."), - message_type=Message.TYPE.IMPORT_PROCESS) + Message( + subject=_("Import"), + name=_("import process"), + message=_( + "The duration of the event is a little too long for direct publication. Moderators can choose to publish it or not." + ), + message_type=Message.TYPE.IMPORT_PROCESS, + ) ) # then import all the new events @@ -1901,13 +2141,12 @@ class Event(models.Model): self.current_date = date def get_start_end_datetimes(self, day): - start_h = time().min + time().min if self.start_day == day: if self.start_time is None: dtstart = datetime.combine(self.start_day, time().min) else: dtstart = datetime.combine(self.start_day, self.start_time) - start_h = self.start_time else: dtstart = datetime.combine(day, time().min) @@ -1928,7 +2167,20 @@ class Event(models.Model): def get_concurrent_events(self, remove_same_dup=True): day = self.current_date if hasattr(self, "current_date") else self.start_day - day_events = CalendarDay(day, qs = Event.objects.filter(status=Event.STATUS.PUBLISHED).only("start_day", "start_time", "title", "category", "other_versions", "recurrences", "end_day", "end_time", "uuids")).get_events() + day_events = CalendarDay( + day, + qs=Event.objects.filter(status=Event.STATUS.PUBLISHED).only( + "start_day", + "start_time", + "title", + "category", + "other_versions", + "recurrences", + "end_day", + "end_time", + "uuids", + ), + ).get_events() return [ e for e in day_events @@ -1950,7 +2202,7 @@ class Event(models.Model): cal.add("version", "2.0") for event in events: - ed = event.get_consolidated_end_day() + event.get_consolidated_end_day() eventIcal = icalEvent() # mapping if event.start_time is None: @@ -1973,7 +2225,7 @@ class Event(models.Model): event.start_time.minute, ), ) - if not event.end_day is None: + if event.end_day is not None: if event.end_time is None: eventIcal.add( "dtend", @@ -1996,13 +2248,19 @@ class Event(models.Model): ) eventIcal.add("summary", event.title) eventIcal.add("name", event.title) - url = ("\n" + event.reference_urls[0]) if event.reference_urls and len(event.reference_urls) > 0 else "" - description = event.description if event.description else "" - eventIcal.add( - "description", description + url + url = ( + ("\n" + event.reference_urls[0]) + if event.reference_urls and len(event.reference_urls) > 0 + else "" ) - if not event.local_image is None and event.local_image != "": - eventIcal.add('image', request.build_absolute_uri(event.local_image), parameters={'VALUE': 'URI'}) + description = event.description if event.description else "" + eventIcal.add("description", description + url) + if event.local_image is not None and event.local_image != "": + eventIcal.add( + "image", + request.build_absolute_uri(event.local_image), + parameters={"VALUE": "URI"}, + ) eventIcal.add("location", event.exact_location or event.location) cal.add_component(eventIcal) @@ -2011,27 +2269,54 @@ class Event(models.Model): def get_count_modification(when): start = datetime(when[0].year, when[0].month, when[0].day) - end = start + timedelta(days=when[1]) + end = start + timedelta(days=when[1]) - recurrentimport = RecurrentImport.objects.filter(source=OuterRef("import_sources__0")).order_by().annotate(count=Func(F('id'), function='Count')).values('count') + recurrentimport = ( + RecurrentImport.objects.filter(source=OuterRef("import_sources__0")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + nb_manual_creation = ( + Event.objects.filter( + created_date__gte=start, + created_date__lt=end, + imported_date__isnull=True, + ) + .filter(Q(uuids__len=0) | Q(uuids=None)) + .count() + ) + nb_local_copies = Event.objects.filter( + created_date__gte=start, + created_date__lt=end, + imported_date__isnull=True, + uuids__len__gt=0, + ).count() - nb_manual_creation = Event.objects.filter(created_date__gte=start, created_date__lt=end, imported_date__isnull=True).filter(Q(uuids__len=0)|Q(uuids=None)).count() - nb_local_copies = Event.objects.filter(created_date__gte=start, created_date__lt=end, imported_date__isnull=True, uuids__len__gt=0).count() + imported = Event.objects.filter( + created_date__gte=start, + created_date__lt=end, + imported_date__isnull=False, + uuids__len__gt=0, + ).annotate(nb_rimport=Subquery(recurrentimport)) - imported = Event.objects.filter(created_date__gte=start, created_date__lt=end, imported_date__isnull=False, uuids__len__gt=0).annotate(nb_rimport=Subquery(recurrentimport)) - - nb_manual_import = imported.filter(Q(import_sources__len=0)|Q(nb_rimport=0)).count() + nb_manual_import = imported.filter( + Q(import_sources__len=0) | Q(nb_rimport=0) + ).count() nb_first_import = imported.filter(nb_rimport__gt=0).count() - nb_moderated = Event.objects.filter(moderated_date__gte=start, moderated_date__lt=end).count() + nb_moderated = Event.objects.filter( + moderated_date__gte=start, moderated_date__lt=end + ).count() - return {"when": (start, start + timedelta(days=when[1] - 1)), - "nb_manual_creation": nb_manual_creation, - "nb_manual_import": nb_manual_import, - "nb_first_import": nb_first_import, - "nb_local_copies": nb_local_copies, - "nb_moderated": nb_moderated, + return { + "when": (start, start + timedelta(days=when[1] - 1)), + "nb_manual_creation": nb_manual_creation, + "nb_manual_import": nb_manual_import, + "nb_first_import": nb_first_import, + "nb_local_copies": nb_local_copies, + "nb_moderated": nb_moderated, } def get_count_modifications(when_list): @@ -2046,21 +2331,21 @@ class Message(models.Model): UPDATE_PROCESS = "update_process", _("Update process") CONTACT_FORM = "contact_form", _("Contact form") EVENT_REPORT = "event_report", _("Event report") - FROM_CONTRIBUTOR_NO_MSG = "from_contrib_no_msg", _("From contributor (without message)") + FROM_CONTRIBUTOR_NO_MSG = "from_contrib_no_msg", _( + "From contributor (without message)" + ) WARNING = "warning", _("Warning") class Meta: verbose_name = _("Message") verbose_name_plural = _("Messages") indexes = [ - models.Index(fields=['related_event']), - models.Index(fields=['user']), - models.Index(fields=['date']), - models.Index(fields=['spam', 'closed']), + models.Index(fields=["related_event"]), + models.Index(fields=["user"]), + models.Index(fields=["date"]), + models.Index(fields=["spam", "closed"]), ] - - subject = models.CharField( verbose_name=_("Subject"), help_text=_("The subject of your message"), @@ -2098,7 +2383,9 @@ class Message(models.Model): blank=True, null=True, ) - message = CKEditor5Field(verbose_name=_("Message"), help_text=_("Your message"), blank=True) + message = CKEditor5Field( + verbose_name=_("Message"), help_text=_("Your message"), blank=True + ) date = models.DateTimeField(auto_now_add=True) @@ -2127,11 +2414,22 @@ class Message(models.Model): verbose_name=_("Type"), max_length=20, choices=TYPE.choices, - default=None, null=True + default=None, + null=True, ) def nb_open_messages(): - return Message.objects.filter(Q(closed=False)&Q(spam=False)&Q(message_type__in=[Message.TYPE.CONTACT_FORM, Message.TYPE.EVENT_REPORT, Message.TYPE.FROM_CONTRIBUTOR])).count() + return Message.objects.filter( + Q(closed=False) + & Q(spam=False) + & Q( + message_type__in=[ + Message.TYPE.CONTACT_FORM, + Message.TYPE.EVENT_REPORT, + Message.TYPE.FROM_CONTRIBUTOR, + ] + ) + ).count() def get_absolute_url(self): return reverse("message", kwargs={"pk": self.pk}) @@ -2156,13 +2454,13 @@ class RecurrentImport(models.Model): FBEVENTS = "Facebook events", _("Événements d'une page FB") BILLETTERIECF = "Billetterie CF", _("Billetterie Clermont-Ferrand") ARACHNEE = "arachnee", _("Arachnée concert") - LERIO = "rio", _('Le Rio') - LARAYMONDE = "raymonde", _('La Raymonde') - APIDAE = 'apidae', _('Agenda apidae tourisme') - IGUANA = 'iguana', _('Agenda iguana (médiathèques)') - MILLEFORMES = 'Mille formes', _('Mille formes') - AMISCERISES = 'Amis cerises', _('Les Amis du Temps des Cerises') - MOBILIZON = 'Mobilizon', _('Mobilizon') + LERIO = "rio", _("Le Rio") + LARAYMONDE = "raymonde", _("La Raymonde") + APIDAE = "apidae", _("Agenda apidae tourisme") + IGUANA = "iguana", _("Agenda iguana (médiathèques)") + MILLEFORMES = "Mille formes", _("Mille formes") + AMISCERISES = "Amis cerises", _("Les Amis du Temps des Cerises") + MOBILIZON = "Mobilizon", _("Mobilizon") class DOWNLOADER(models.TextChoices): SIMPLE = "simple", _("simple") @@ -2234,7 +2532,7 @@ class RecurrentImport(models.Model): forceLocation = models.BooleanField( verbose_name=_("Force location"), help_text=_("force location even if another is detected."), - default=False + default=False, ) defaultOrganiser = models.ForeignKey( @@ -2298,9 +2596,9 @@ class BatchImportation(models.Model): verbose_name_plural = _("Batch importations") permissions = [("run_batchimportation", "Can run a batch importation")] indexes = [ - models.Index(fields=['created_date']), - models.Index(fields=['status']), - models.Index(fields=['created_date', 'recurrentImport']), + models.Index(fields=["created_date"]), + models.Index(fields=["status"]), + models.Index(fields=["created_date", "recurrentImport"]), ] created_date = models.DateTimeField(auto_now_add=True) @@ -2317,9 +2615,7 @@ class BatchImportation(models.Model): url_source = models.URLField( verbose_name=_("URL (if not recurrent import)"), - help_text=_( - "Source URL if no RecurrentImport is associated." - ), + help_text=_("Source URL if no RecurrentImport is associated."), max_length=1024, blank=True, null=True, @@ -2428,7 +2724,7 @@ class CategorisationRule(models.Model): # all rules are applied, starting from the first to the last def apply_rules(event): c = CategorisationRule.get_category_from_rules(event) - + if c is None: return 0 else: @@ -2438,7 +2734,11 @@ class CategorisationRule(models.Model): def get_category_from_rules(event): cats = defaultdict(lambda: 0) if CategorisationRule.rules is None: - CategorisationRule.rules = CategorisationRule.objects.all().select_related("category").select_related("place") + CategorisationRule.rules = ( + CategorisationRule.objects.all() + .select_related("category") + .select_related("place") + ) for rule in CategorisationRule.rules: if rule.match(event): @@ -2487,7 +2787,4 @@ class CategorisationRule(models.Model): if not event.exact_location == self.place: return False - return True - - diff --git a/src/agenda_culturel/settings/base.py b/src/agenda_culturel/settings/base.py index d085539..5169baf 100644 --- a/src/agenda_culturel/settings/base.py +++ b/src/agenda_culturel/settings/base.py @@ -15,7 +15,7 @@ DEBUG = os_getenv("DEBUG", "true").lower() in ["True", "true", "1", "yes", "y"] ALLOWED_HOSTS = os_getenv("ALLOWED_HOSTS", "localhost").split(",") if DEBUG: - ALLOWED_HOSTS = ALLOWED_HOSTS + ['testserver'] + ALLOWED_HOSTS = ALLOWED_HOSTS + ["testserver"] if DEBUG: CSRF_TRUSTED_ORIGINS = os_getenv("CSRF_TRUSTED_ORIGINS", "http://localhost").split( @@ -30,8 +30,8 @@ else: "," ) -ADMINS = [tuple(a.split(',')) for a in os_getenv("ADMINS", "").split(";")] -MANAGERS = [tuple(a.split(',')) for a in os_getenv("MANAGERS", "").split(";")] +ADMINS = [tuple(a.split(",")) for a in os_getenv("ADMINS", "").split(";")] +MANAGERS = [tuple(a.split(",")) for a in os_getenv("MANAGERS", "").split(";")] SERVER_EMAIL = os_getenv("SERVER_EMAIL", "") @@ -55,7 +55,7 @@ INSTALLED_APPS = [ "compressor", "django_ckeditor_5", "recurrence", - 'django.contrib.gis', + "django.contrib.gis", "location_field.apps.DefaultConfig", "django.contrib.postgres", "robots", @@ -63,8 +63,8 @@ INSTALLED_APPS = [ "cache_cleaner", "honeypot", "template_profiler_panel", - 'django_cleanup.apps.CleanupConfig', - 'django_unused_media', + "django_cleanup.apps.CleanupConfig", + "django_unused_media", ] HONEYPOT_FIELD_NAME = "alias_name" @@ -81,19 +81,29 @@ MIDDLEWARE = [ "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", - "debug_toolbar.middleware.DebugToolbarMiddleware", - 'django.contrib.sites.middleware.CurrentSiteMiddleware', -# "django.middleware.cache.UpdateCacheMiddleware", -# "django.middleware.common.CommonMiddleware", -# "django.middleware.cache.FetchFromCacheMiddleware", + "debug_toolbar.middleware.DebugToolbarMiddleware", + "django.contrib.sites.middleware.CurrentSiteMiddleware", + # "django.middleware.cache.UpdateCacheMiddleware", + # "django.middleware.common.CommonMiddleware", + # "django.middleware.cache.FetchFromCacheMiddleware", ] CKEDITOR_5_CONFIGS = { -'default': { - 'toolbar': ['heading', '|', 'bold', 'italic', 'link', '|', - 'bulletedList', 'numberedList', 'blockQuote',], - 'language': 'fr', - },} + "default": { + "toolbar": [ + "heading", + "|", + "bold", + "italic", + "link", + "|", + "bulletedList", + "numberedList", + "blockQuote", + ], + "language": "fr", + }, +} ROOT_URLCONF = "agenda_culturel.urls" @@ -116,7 +126,7 @@ TEMPLATES = [ "django.template.loaders.app_directories.Loader", ], ), - ] + ], }, }, ] @@ -128,7 +138,7 @@ WSGI_APPLICATION = "agenda_culturel.wsgi.application" DATABASES = { "default": { - 'ENGINE': 'django.contrib.gis.db.backends.postgis', + "ENGINE": "django.contrib.gis.db.backends.postgis", "NAME": os_getenv("POSTGRES_DB", "postgres"), "USER": os_getenv("POSTGRES_USER", "postgres"), "PASSWORD": os_getenv("POSTGRES_PASSWORD", "postgres"), @@ -166,9 +176,7 @@ USE_I18N = True USE_TZ = False -LANGUAGES = ( - ("fr", _("French")), -) +LANGUAGES = (("fr", _("French")),) # Auth @@ -260,13 +268,17 @@ LOCATION_FIELD = { # stop robots ROBOTS_USE_SITEMAP = False -ROBOTS_SITE_BY_REQUEST = 'cached-sitemap' +ROBOTS_SITE_BY_REQUEST = "cached-sitemap" # debug if DEBUG: import socket + hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) - INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"] + INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + [ + "127.0.0.1", + "10.0.2.2", + ] # logging @@ -297,20 +309,20 @@ LOGGING = { # debug DEBUG_TOOLBAR_PANELS = [ - 'debug_toolbar.panels.history.HistoryPanel', - 'debug_toolbar.panels.versions.VersionsPanel', - 'debug_toolbar.panels.timer.TimerPanel', - 'debug_toolbar.panels.settings.SettingsPanel', - 'debug_toolbar.panels.headers.HeadersPanel', - 'debug_toolbar.panels.request.RequestPanel', - 'debug_toolbar.panels.sql.SQLPanel', - 'debug_toolbar.panels.staticfiles.StaticFilesPanel', - 'debug_toolbar.panels.templates.TemplatesPanel', - 'debug_toolbar.panels.alerts.AlertsPanel', - 'debug_toolbar.panels.cache.CachePanel', - 'debug_toolbar.panels.signals.SignalsPanel', - 'debug_toolbar.panels.redirects.RedirectsPanel', - 'debug_toolbar.panels.profiling.ProfilingPanel', + "debug_toolbar.panels.history.HistoryPanel", + "debug_toolbar.panels.versions.VersionsPanel", + "debug_toolbar.panels.timer.TimerPanel", + "debug_toolbar.panels.settings.SettingsPanel", + "debug_toolbar.panels.headers.HeadersPanel", + "debug_toolbar.panels.request.RequestPanel", + "debug_toolbar.panels.sql.SQLPanel", + "debug_toolbar.panels.staticfiles.StaticFilesPanel", + "debug_toolbar.panels.templates.TemplatesPanel", + "debug_toolbar.panels.alerts.AlertsPanel", + "debug_toolbar.panels.cache.CachePanel", + "debug_toolbar.panels.signals.SignalsPanel", + "debug_toolbar.panels.redirects.RedirectsPanel", + "debug_toolbar.panels.profiling.ProfilingPanel", "template_profiler_panel.panels.template.TemplateProfilerPanel", ] @@ -319,6 +331,6 @@ DJANGORESIZED_DEFAULT_SIZE = [1200, 1200] DJANGORESIZED_DEFAULT_SCALE = 0.5 DJANGORESIZED_DEFAULT_QUALITY = 75 DJANGORESIZED_DEFAULT_KEEP_META = True -DJANGORESIZED_DEFAULT_FORCE_FORMAT = 'JPEG' -DJANGORESIZED_DEFAULT_FORMAT_EXTENSIONS = {'JPEG': ".jpg"} -DJANGORESIZED_DEFAULT_NORMALIZE_ROTATION = True \ No newline at end of file +DJANGORESIZED_DEFAULT_FORCE_FORMAT = "JPEG" +DJANGORESIZED_DEFAULT_FORMAT_EXTENSIONS = {"JPEG": ".jpg"} +DJANGORESIZED_DEFAULT_NORMALIZE_ROTATION = True diff --git a/src/agenda_culturel/settings/dev.py b/src/agenda_culturel/settings/dev.py index 1517776..669689a 100644 --- a/src/agenda_culturel/settings/dev.py +++ b/src/agenda_culturel/settings/dev.py @@ -1,3 +1,3 @@ from .base import * # noqa -SITE_ID=1 \ No newline at end of file +SITE_ID = 1 diff --git a/src/agenda_culturel/sitemaps.py b/src/agenda_culturel/sitemaps.py index d6b2e7d..a3ab15e 100644 --- a/src/agenda_culturel/sitemaps.py +++ b/src/agenda_culturel/sitemaps.py @@ -3,17 +3,27 @@ from django.urls import reverse from .models import Category + class StaticViewSitemap(sitemaps.Sitemap): priority = 0.5 protocol = "https" changefreq = "daily" def items(self): - return ["home", "cette_semaine", "ce_mois_ci", "aujourdhui", "a_venir", "about", "contact"] + return [ + "home", + "cette_semaine", + "ce_mois_ci", + "aujourdhui", + "a_venir", + "about", + "contact", + ] def location(self, item): return reverse(item) + class HomeCategorySitemap(sitemaps.Sitemap): priority = 0.5 protocol = "https" @@ -21,22 +31,24 @@ class HomeCategorySitemap(sitemaps.Sitemap): path = "home_category" def items(self): - result = [] return Category.objects.values_list("slug", flat=True) def location(self, item): - return reverse(self.path, kwargs={'cat': item}) + return reverse(self.path, kwargs={"cat": item}) + class MonthCategorySitemap(HomeCategorySitemap): priority = 0.3 protocol = "https" path = "ce_mois_ci_category" + class WeekCategorySitemap(HomeCategorySitemap): priority = 0.4 protocol = "https" path = "cette_semaine_category" + class UpcomingCategorySitemap(HomeCategorySitemap): priority = 0.4 protocol = "https" diff --git a/src/agenda_culturel/templatetags/cat_extra.py b/src/agenda_culturel/templatetags/cat_extra.py index 3098062..0fe4e86 100644 --- a/src/agenda_culturel/templatetags/cat_extra.py +++ b/src/agenda_culturel/templatetags/cat_extra.py @@ -4,7 +4,7 @@ from django.core.cache import cache from agenda_culturel.models import Category import colorsys -from .utils_extra import * +from .utils_extra import picto_from_name import logging @@ -48,12 +48,12 @@ def get_relative_luminance(hex_color): def adjust_lightness_saturation(hex_color, shift_lightness=0.0, scale_saturation=1): rgb = html_to_rgb(hex_color) - h, l, s = colorsys.rgb_to_hls(*rgb) + h, lg, s = colorsys.rgb_to_hls(*rgb) - l += shift_lightness + lg += shift_lightness s *= scale_saturation - r, g, b = colorsys.hls_to_rgb(h, l, s) + r, g, b = colorsys.hls_to_rgb(h, lg, s) return rgb_to_html([r, g, b]) @@ -73,12 +73,13 @@ def background_color_adjust_color(color, alpha=1): @register.simple_tag def css_categories(): - result = cache.get('css_categories') - if not result: # + result = cache.get("css_categories") + if not result: # result = '" - cache.set('css_categories', result, 86400) # 1 day + cache.set("css_categories", result, 86400) # 1 day return mark_safe(result) @@ -178,6 +179,7 @@ def small_cat(category, url=None, contrast=True, selected=True, recurrence=False def small_cat_no_selected(category, url=None): return small_cat(category, url=url, selected=False) + @register.filter def small_cat_no_contrast(category, url=None): if url is None: @@ -185,7 +187,6 @@ def small_cat_no_contrast(category, url=None): return small_cat(category, url=url, contrast=False) - @register.filter def small_cat_recurrent(category, recurrence=False): return small_cat(category, url=None, selected=True, recurrence=recurrence) @@ -213,22 +214,52 @@ def circle_cat(category, recurrence=False): '' ) + def legend_cat(category, url, selected=True, first=False, with_title=False): c = category.css_class() n = category.name - class_reduced = '' if selected else 'reduced' + class_reduced = "" if selected else "reduced" if category.pictogram: - result = '' + '' + category.name + '' + result = ( + '' + + ''
+            + category.name
+            + '' + ) else: - result = '' + result = ( + '' + ) if with_title and selected: - result = '
    ' + result + ' ' + n + ' ' + picto_from_name('x-circle') + '
    ' + result = ( + '
    ' + + result + + ' ' + + n + + " " + + picto_from_name("x-circle") + + "
    " + ) else: - result = '
    ' + result + '
    ' + result = '
    ' + result + "
    " - result = '' + result + '' + result = '' + result + "" return mark_safe(result) @@ -236,27 +267,31 @@ def legend_cat(category, url, selected=True, first=False, with_title=False): @register.simple_tag def show_legend(filter, category): cats = Category.objects.all().order_by("position") - - if not category is None: + + if category is not None: url_nocat = "/" + "/".join(filter.request.get_full_path().split("/")[2:]) - return mark_safe('
    ' + - " ".join( + return mark_safe( + '
    ' + + " ".join( [ legend_cat( c, - "/cat:" + c.slug + url_nocat - if category != c - else url_nocat, + "/cat:" + c.slug + url_nocat if category != c else url_nocat, selected=category == c, - with_title=True + with_title=True, ) for c in cats ] ) ) else: - return mark_safe("
    " + - " ".join( - [legend_cat(c, "/cat:" + c.slug + filter.request.get_full_path()) for c in cats] - ) + "
    " + return mark_safe( + "
    " + + " ".join( + [ + legend_cat(c, "/cat:" + c.slug + filter.request.get_full_path()) + for c in cats + ] + ) + + "
    " ) diff --git a/src/agenda_culturel/templatetags/duplicated_extra.py b/src/agenda_culturel/templatetags/duplicated_extra.py index 775f4eb..8f73f39 100644 --- a/src/agenda_culturel/templatetags/duplicated_extra.py +++ b/src/agenda_culturel/templatetags/duplicated_extra.py @@ -2,8 +2,6 @@ from django import template from django.utils.safestring import mark_safe from django.urls import reverse_lazy from django.template.defaultfilters import pluralize -from django.db.models import Count, Q - from agenda_culturel.models import DuplicatedEvents diff --git a/src/agenda_culturel/templatetags/event_extra.py b/src/agenda_culturel/templatetags/event_extra.py index 111a032..178341c 100644 --- a/src/agenda_culturel/templatetags/event_extra.py +++ b/src/agenda_culturel/templatetags/event_extra.py @@ -4,10 +4,9 @@ from django.urls import reverse_lazy from django.template.defaultfilters import pluralize, linebreaks, urlize from django.db.models import Q, F import re -from datetime import date, timedelta, datetime +from datetime import timedelta, datetime from agenda_culturel.models import Event -from django.db.models import Q from .utils_extra import picto_from_name @@ -54,6 +53,7 @@ def picto_status(event): else: return "" + @register.filter def picto_visibility(event, visible=True): if not visible: @@ -72,24 +72,40 @@ def show_badge_moderate(): start_day = first_day.date() start_time = first_day.time() last_day = start_day + timedelta(days=7) - nb = Event.objects.filter(~Q(status=Event.STATUS.TRASH)). \ - filter(Q(start_day__lte=last_day)). \ - filter(Q(start_day__gt=start_day)|(Q(start_day=start_day) & (Q(start_time__isnull=True)|Q(start_time__gt=start_time)))). \ - filter(moderated_date__isnull=True). \ - filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)).values("start_day").\ - count() + nb = ( + Event.objects.filter(~Q(status=Event.STATUS.TRASH)) + .filter(Q(start_day__lte=last_day)) + .filter( + Q(start_day__gt=start_day) + | ( + Q(start_day=start_day) + & (Q(start_time__isnull=True) | Q(start_time__gt=start_time)) + ) + ) + .filter(moderated_date__isnull=True) + .filter( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + .values("start_day") + .count() + ) if nb == 0: return "" else: - return mark_safe('' + - picto_from_name("target") + ' ' + str(nb) + '') + return mark_safe( + '' + + picto_from_name("target") + + " " + + str(nb) + + "" + ) @register.simple_tag @@ -165,12 +181,10 @@ def field_to_html(field, key): elif isinstance(field, list): if len(field) > 0: return mark_safe( - "
      " - + "".join(['
    • ' + i + "
    • " for i in field]) - + "
    " + "
      " + "".join(["
    • " + i + "
    • " for i in field]) + "
    " ) else: - return mark_safe('non renseigné') + return mark_safe("non renseigné") else: return field @@ -184,14 +198,18 @@ def add_url_category(url, c): else: return url + "&category=" + str(c.pk) + @register.filter def robust_urlize(txt): - return mark_safe(urlize(mark_safe(txt.replace("http", " http"))).replace(" )+", "
    ", res)) @@ -202,6 +220,7 @@ def only_allowed(elist, is_authenticated): else: return elist + @register.filter def tw_badge(event): if event.tags and len([t for t in event.tags if t.startswith("TW:")]) > 0: @@ -209,6 +228,7 @@ def tw_badge(event): else: return "" + @register.filter def get_image_uri(event, request): - return event.get_image_url(request) \ No newline at end of file + return event.get_image_url(request) diff --git a/src/agenda_culturel/templatetags/locations_extra.py b/src/agenda_culturel/templatetags/locations_extra.py index 0f95b52..0d160b6 100644 --- a/src/agenda_culturel/templatetags/locations_extra.py +++ b/src/agenda_culturel/templatetags/locations_extra.py @@ -1,6 +1,5 @@ from django import template from django.utils.safestring import mark_safe -from django.core.cache import cache from .utils_extra import picto_from_name from agenda_culturel.models import ReferenceLocation @@ -14,12 +13,25 @@ def show_suggested_positions(filter): if filter.is_filtered_by_position_radius(): return "" - - locations = ReferenceLocation.objects.all().filter(suggested_distance__isnull=False).order_by("main", "name") - - result = '' - for l in locations: - result += '
    ' + picto_from_name("map-pin") + ' ' + l.name + ' ' + str(l.suggested_distance) + 'km' + locations = ( + ReferenceLocation.objects.all() + .filter(suggested_distance__isnull=False) + .order_by("main", "name") + ) + + result = "" + for loc in locations: + result += ( + ' ' + + picto_from_name("map-pin") + + " " + + loc.name + + " " + + str(loc.suggested_distance) + + "km" + ) return mark_safe(result) diff --git a/src/agenda_culturel/templatetags/rimports_extra.py b/src/agenda_culturel/templatetags/rimports_extra.py index e8e2442..b198312 100644 --- a/src/agenda_culturel/templatetags/rimports_extra.py +++ b/src/agenda_culturel/templatetags/rimports_extra.py @@ -8,18 +8,21 @@ from django.db.models import OuterRef, Subquery, Count from agenda_culturel.models import RecurrentImport, BatchImportation from .utils_extra import picto_from_name +import logging register = template.Library() -import logging logger = logging.getLogger(__name__) def badge_rimport(status, nb, suffix, picto, cl, placement): - return ('' + + " " + + suffix + + '">' + picto_from_name(picto) + " " + str(nb) - + "") + + "" + ) + @register.simple_tag def show_badges_rimports(placement): newest = BatchImportation.objects.filter(recurrentImport=OuterRef("pk")).order_by( "-created_date" ) - request = RecurrentImport.objects.annotate( + request = ( + RecurrentImport.objects.annotate( last_run_status=Subquery(newest.values("status")[:1]) - ).values("last_run_status").annotate(nb_last_run_by_status=Count("last_run_status")) - + ) + .values("last_run_status") + .annotate(nb_last_run_by_status=Count("last_run_status")) + ) nbs = {} for res in request: @@ -52,13 +62,13 @@ def show_badges_rimports(placement): for status in ["failed", "running"]: if status in nbs and nbs[status] != 0: if status == "failed": - suffix = 'en erreur' + suffix = "en erreur" picto = "alert-triangle" - cl = "error" + cl = "error" else: - suffix = ' en cours' + suffix = " en cours" picto = "refresh-cw" - cl = "simple" + cl = "simple" result += badge_rimport(status, nbs[status], suffix, picto, cl, placement) return mark_safe(result) diff --git a/src/agenda_culturel/templatetags/tag_extra.py b/src/agenda_culturel/templatetags/tag_extra.py index cba036c..5ba573c 100644 --- a/src/agenda_culturel/templatetags/tag_extra.py +++ b/src/agenda_culturel/templatetags/tag_extra.py @@ -2,23 +2,22 @@ from django import template from django.utils.safestring import mark_safe from django.urls import reverse_lazy from agenda_culturel.models import Tag -from .utils_extra import * -from .cat_extra import * +from .cat_extra import circle_cat register = template.Library() def t_button(tag, url, strike=False, category=None): strike_class = " strike" if strike else "" - cat = "" if category is None else circle_cat(category) + ' ' - if not url is None: + cat = "" if category is None else circle_cat(category) + " " + if url is not None: return mark_safe( '' - + cat + + cat + tag + "" ) @@ -27,23 +26,30 @@ def t_button(tag, url, strike=False, category=None): '' - + cat + + cat + tag + "" ) + @register.filter def tag_button(tag, link=False, strike=False): - return t_button(tag, reverse_lazy("view_tag", kwargs={"t": tag.replace('/', '-')}) if link else None, strike) + return t_button( + tag, + reverse_lazy("view_tag", kwargs={"t": tag.replace("/", "-")}) if link else None, + strike, + ) @register.filter def tag_button_strike(tag, link=False): return tag_button(tag, link, strike=True) + @register.filter def tag_button_link(tag): - return t_button(tag, '/?tags=' + tag, False) + return t_button(tag, "/?tags=" + tag, False) + @register.filter def tag_not_in_db(tag, tags): @@ -58,17 +64,19 @@ def show_suggested_tags(filter): for t in tags: if filter.tag_exists(t.name) and not filter.is_selected_tag(t.name): - result += ' ' + t_button(t.name, filter.get_url_add_tag(t.name)) + result += " " + t_button(t.name, filter.get_url_add_tag(t.name)) return mark_safe(result) + @register.filter def prepare_tag(tag): - return tag.replace('/', '-') + return tag.replace("/", "-") + @register.filter def tw_highlight(tag): - if tag.startswith('TW:'): + if tag.startswith("TW:"): return mark_safe('TW:' + tag[3:]) else: - return tag \ No newline at end of file + return tag diff --git a/src/agenda_culturel/templatetags/utils_extra.py b/src/agenda_culturel/templatetags/utils_extra.py index d478e7b..07fcd3a 100644 --- a/src/agenda_culturel/templatetags/utils_extra.py +++ b/src/agenda_culturel/templatetags/utils_extra.py @@ -1,7 +1,6 @@ from django import template from django.utils.safestring import mark_safe from django.template.defaultfilters import stringfilter -from django.utils.translation import gettext_lazy as _ from urllib.parse import urlparse from datetime import timedelta, date, datetime @@ -14,6 +13,7 @@ import emoji register = template.Library() + @register.filter def is_facebook_url(url): if url is None: @@ -24,6 +24,7 @@ def is_facebook_url(url): else: return False + @register.filter def hostname(url): if url is None: @@ -47,18 +48,22 @@ def add_de(txt): def week(d): return d.isocalendar()[1] + @register.filter def weekyear(d): return d.isocalendar()[0] + @register.filter def not_before_first(d): return d >= datetime.now().date() - relativedelta(years=1) + @register.filter def not_after_last(d): return d <= datetime.now().date() + relativedelta(years=1) + @register.filter def shift_day(d, shift): return d + timedelta(days=shift) @@ -91,7 +96,13 @@ def calendar_classes(d, fixed_style): def url_day(d, category=None): if category: return reverse_lazy( - "day_view_category", kwargs={"year": d.year, "month": d.month, "day": d.day, "cat": category.slug} + "day_view_category", + kwargs={ + "year": d.year, + "month": d.month, + "day": d.day, + "cat": category.slug, + }, ) else: return reverse_lazy( @@ -148,35 +159,68 @@ def get_item(dictionary, key): def remove_id_prefix(value): return int(value.replace("id_", "")) + @register.filter def tocoords(c): return str(c.coords[1]) + ", " + str(c.coords[0]) + @register.filter def index(indexable, i): return indexable[i] + @register.filter def is_string(val): return isinstance(val, str) + @register.filter def html_vide(val): return len(strip_tags(val).replace(" ", "").strip()) == 0 + @register.filter def no_emoji(text): - return emoji.replace_emoji(text, replace='') + return emoji.replace_emoji(text, replace="") + @register.simple_tag def navigation_links(filter, category): - extra = '?' + filter.get_url() + extra = "?" + filter.get_url() if category is None: - result = '
  • maintenant
  • ' - result += '
  • cette semaine
  • ' - result += '
  • ce mois-ci
  • ' + result = ( + '
  • maintenant
  • ' + ) + result += ( + '
  • cette semaine
  • ' + ) + result += ( + '
  • ce mois-ci
  • ' + ) else: - result = '
  • maintenant
  • ' - result += '
  • cette semaine
  • ' - result += '
  • ce mois-ci
  • ' - return mark_safe(result) \ No newline at end of file + result = ( + '
  • maintenant
  • ' + ) + result += ( + '
  • cette semaine
  • ' + ) + result += ( + '
  • ce mois-ci
  • ' + ) + return mark_safe(result) diff --git a/src/agenda_culturel/urls.py b/src/agenda_culturel/urls.py index cb7060d..57beacc 100644 --- a/src/agenda_culturel/urls.py +++ b/src/agenda_culturel/urls.py @@ -6,7 +6,13 @@ from django.urls import path, include, re_path from django.views.i18n import JavaScriptCatalog from django.contrib.sitemaps.views import sitemap from django.contrib.sitemaps import GenericSitemap -from .sitemaps import * +from .sitemaps import ( + StaticViewSitemap, + HomeCategorySitemap, + MonthCategorySitemap, + WeekCategorySitemap, + UpcomingCategorySitemap, +) from django.views.decorators.cache import cache_page from .views import * @@ -28,42 +34,55 @@ category_dict = { sitemaps = { "static": StaticViewSitemap, - "events": GenericSitemap(event_dict, priority=1.0, protocol = "https"), - "places": GenericSitemap(place_dict, priority=0.6, protocol = "https"), - "categories": GenericSitemap(category_dict, priority=0.8, protocol = "https"), + "events": GenericSitemap(event_dict, priority=1.0, protocol="https"), + "places": GenericSitemap(place_dict, priority=0.6, protocol="https"), + "categories": GenericSitemap(category_dict, priority=0.8, protocol="https"), "home_categories": HomeCategorySitemap, "upcoming_categories": UpcomingCategorySitemap, "week_categories": WeekCategorySitemap, "month_categories": MonthCategorySitemap, - "organisations": GenericSitemap(organisation_dict, priority=0.2, protocol = "https"), + "organisations": GenericSitemap(organisation_dict, priority=0.2, protocol="https"), } urlpatterns = [ path("", home, name="home"), - path("cat:/", home, name="home_category"), - path("cat:/semaine///", week_view, name="week_view_category"), + path( + "cat:/semaine///", week_view, name="week_view_category" + ), path("cat:/cette-semaine/", week_view, name="cette_semaine_category"), - path("cat:/mois///", month_view, name="month_view_category"), - path("cat:/jour////", day_view, name="day_view_category"), + path( + "cat:/mois///", month_view, name="month_view_category" + ), + path( + "cat:/jour////", + day_view, + name="day_view_category", + ), path("cat:/jour/", day_view, name="day_view_category_when"), path("cat:/a-venir/", upcoming_events, name="a_venir_category"), path("cat:/aujourdhui/", day_view, name="aujourdhui_category"), - path("cat:/a-venir////", upcoming_events, name="a_venir_jour_category"), + path( + "cat:/a-venir////", + upcoming_events, + name="a_venir_jour_category", + ), path("cat:/cette-semaine/", week_view, name="cette_semaine_category"), path("cat:/ical", export_ical, name="export_ical_category"), path("cat:/ce-mois-ci", month_view, name="ce_mois_ci_category"), - path("semaine///", week_view, name="week_view"), path("mois///", month_view, name="month_view"), path("jour////", day_view, name="day_view"), path("jour/", day_view, name="day_view_when"), path("aujourdhui/", day_view, name="aujourdhui"), path("a-venir/", upcoming_events, name="a_venir"), - path("a-venir////", upcoming_events, name="a_venir_jour"), + path( + "a-venir////", + upcoming_events, + name="a_venir_jour", + ), path("cette-semaine/", week_view, name="cette_semaine"), path("ce-mois-ci", month_view, name="ce_mois_ci"), - path("tag//", view_tag, name="view_tag"), path("tag//past", view_tag_past, name="view_tag_past"), path("tags/", tag_list, name="view_all_tags"), @@ -83,14 +102,34 @@ urlpatterns = [ path("event//", EventDetailView.as_view(), name="edit_event_pk"), path("event//edit", EventUpdateView.as_view(), name="edit_event"), path("event//moderate", EventModerateView.as_view(), name="moderate_event"), - path("event//moderate/after/", EventModerateView.as_view(), name="moderate_event_step"), - path("event//moderate-next", moderate_event_next, name="moderate_event_next"), + path( + "event//moderate/after/", + EventModerateView.as_view(), + name="moderate_event_step", + ), + path( + "event//moderate-next", moderate_event_next, name="moderate_event_next" + ), path("moderate", EventModerateView.as_view(), name="moderate"), - path("moderate///", moderate_from_date, name="moderate_from_date"), - path("event//simple-clone/edit", EventUpdateView.as_view(), name="simple_clone_edit"), + path( + "moderate///", + moderate_from_date, + name="moderate_from_date", + ), + path( + "event//simple-clone/edit", + EventUpdateView.as_view(), + name="simple_clone_edit", + ), path("event//clone/edit", EventUpdateView.as_view(), name="clone_edit"), - path("event//message", MessageCreateView.as_view(), name="message_for_event"), - path("event//update-from-source", update_from_source, name="update_from_source"), + path( + "event//message", MessageCreateView.as_view(), name="message_for_event" + ), + path( + "event//update-from-source", + update_from_source, + name="update_from_source", + ), path( "event//change-status/", change_status_event, @@ -145,8 +184,12 @@ urlpatterns = [ path("rimports/", recurrent_imports, name="recurrent_imports"), path("rimports/run", run_all_rimports, name="run_all_rimports"), path("rimports/fb/run", run_all_fb_rimports, name="run_all_fb_rimports"), - path("rimports/status/", recurrent_imports, name="recurrent_imports_status"), - path("rimports/status//run", run_all_rimports, name="run_all_rimports_status"), + path( + "rimports/status/", recurrent_imports, name="recurrent_imports_status" + ), + path( + "rimports/status//run", run_all_rimports, name="run_all_rimports_status" + ), path("rimports/add", RecurrentImportCreateView.as_view(), name="add_rimport"), path("rimports//view", view_rimport, name="view_rimport"), path( @@ -181,24 +224,60 @@ urlpatterns = [ ), path("duplicates//fix", fix_duplicate, name="fix_duplicate"), path("duplicates//merge", merge_duplicate, name="merge_duplicate"), - path("duplicates//update/", update_duplicate_event, name="update_event"), + path( + "duplicates//update/", + update_duplicate_event, + name="update_event", + ), path("404/", page_not_found, name="page_not_found"), path("500/", internal_server_error, name="internal_server_error"), - - path("organisme//past", OrganisationDetailViewPast.as_view(), name="view_organisation_past"), - path("organisme/", OrganisationDetailView.as_view(), name="view_organisation_shortname"), - path("organisme/-", OrganisationDetailView.as_view(), name="view_organisation"), - path("organisme/-/past", OrganisationDetailViewPast.as_view(), name="view_organisation_past_fullname"), - path("organisme/-", OrganisationDetailView.as_view(), name="view_organisation_fullname"), - path("organisme//edit", OrganisationUpdateView.as_view(), name="edit_organisation"), - path("organisme//delete", OrganisationDeleteView.as_view(), name="delete_organisation"), + path( + "organisme//past", + OrganisationDetailViewPast.as_view(), + name="view_organisation_past", + ), + path( + "organisme/", + OrganisationDetailView.as_view(), + name="view_organisation_shortname", + ), + path( + "organisme/-", + OrganisationDetailView.as_view(), + name="view_organisation", + ), + path( + "organisme/-/past", + OrganisationDetailViewPast.as_view(), + name="view_organisation_past_fullname", + ), + path( + "organisme/-", + OrganisationDetailView.as_view(), + name="view_organisation_fullname", + ), + path( + "organisme//edit", + OrganisationUpdateView.as_view(), + name="edit_organisation", + ), + path( + "organisme//delete", + OrganisationDeleteView.as_view(), + name="delete_organisation", + ), path("organismes/", OrganisationListView.as_view(), name="view_organisations"), path("organisme/add", OrganisationCreateView.as_view(), name="add_organisation"), - path("place//past", PlaceDetailViewPast.as_view(), name="view_place_past"), path("place/", PlaceDetailView.as_view(), name="view_place"), - path("place/-/past", PlaceDetailViewPast.as_view(), name="view_place_past_fullname"), - path("place/-", PlaceDetailView.as_view(), name="view_place_fullname"), + path( + "place/-/past", + PlaceDetailViewPast.as_view(), + name="view_place_past_fullname", + ), + path( + "place/-", PlaceDetailView.as_view(), name="view_place_fullname" + ), path("place//edit", PlaceUpdateView.as_view(), name="edit_place"), path("place//delete", PlaceDeleteView.as_view(), name="delete_place"), path("places/", PlaceListView.as_view(), name="view_places"), @@ -223,14 +302,12 @@ urlpatterns = [ path( "event/////ical", export_event_ical, - name="export_event_ical"), - path( - "ical", - export_ical, - name="export_ical"), - re_path(r'^robots\.txt', include('robots.urls')), + name="export_event_ical", + ), + path("ical", export_ical, name="export_ical"), + re_path(r"^robots\.txt", include("robots.urls")), path("__debug__/", include("debug_toolbar.urls")), - path("ckeditor5/", include('django_ckeditor_5.urls')), + path("ckeditor5/", include("django_ckeditor_5.urls")), path( "sitemap.xml", cache_page(86400)(sitemap), @@ -238,7 +315,6 @@ urlpatterns = [ name="cached-sitemap", ), path("cache/clear", clear_cache, name="clear_cache"), - ] if settings.DEBUG: diff --git a/src/agenda_culturel/utils.py b/src/agenda_culturel/utils.py index ea69f65..2bbf26d 100644 --- a/src/agenda_culturel/utils.py +++ b/src/agenda_culturel/utils.py @@ -6,7 +6,14 @@ import unicodedata class PlaceGuesser: def __init__(self): - self.__citynames = list(ReferenceLocation.objects.values_list("name__lower__unaccent", "name")) + [("clermont-fd", "Clermont-Ferrand"), ("aurillac", "Aurillac"), ("montlucon", "Montluçon"), ("montferrand", "Clermont-Ferrand")] + self.__citynames = list( + ReferenceLocation.objects.values_list("name__lower__unaccent", "name") + ) + [ + ("clermont-fd", "Clermont-Ferrand"), + ("aurillac", "Aurillac"), + ("montlucon", "Montluçon"), + ("montferrand", "Clermont-Ferrand"), + ] self.__citynames = [(x[0].replace("-", " "), x[1]) for x in self.__citynames] def __remove_accents(self, input_str): @@ -15,19 +22,34 @@ class PlaceGuesser: nfkd_form = unicodedata.normalize("NFKD", input_str) return "".join([c for c in nfkd_form if not unicodedata.combining(c)]) - def __guess_is_address(self, part): - toponyms = ["bd", "rue", "avenue", "place", "boulevard", "allee", ] + toponyms = [ + "bd", + "rue", + "avenue", + "place", + "boulevard", + "allee", + ] part = part.strip() - if re.match(r'^[0-9]', part): + if re.match(r"^[0-9]", part): return True - + elems = part.split(" ") return any([self.__remove_accents(e.lower()) in toponyms for e in elems]) - def __clean_address(self, addr): - toponyms = ["bd", "rue", "avenue", "place", "boulevard", "allée", "bis", "ter", "ZI"] + toponyms = [ + "bd", + "rue", + "avenue", + "place", + "boulevard", + "allée", + "bis", + "ter", + "ZI", + ] for t in toponyms: addr = re.sub(" " + t + " ", " " + t + " ", addr, flags=re.IGNORECASE) return addr @@ -44,7 +66,7 @@ class PlaceGuesser: return None def __guess_city_name_postcode(self, part): - with_pc = re.search(r'^(.*)(([0-9][ ]*){5})(.*)$', part) + with_pc = re.search(r"^(.*)(([0-9][ ]*){5})(.*)$", part) if with_pc: p1 = self.__guess_city_name(with_pc.group(1).strip()) postcode = with_pc.group(2).replace(" ", "") @@ -54,16 +76,20 @@ class PlaceGuesser: return None, self.__guess_city_name(part), None def __guess_name_address(self, part): - with_num = re.search(r'^(([^0-9])+)([0-9]+)(.*)', part) + with_num = re.search(r"^(([^0-9])+)([0-9]+)(.*)", part) if with_num: name = with_num.group(1) - return name, part[len(name):] + return name, part[len(name) :] else: return "", part def guess_address_elements(self, alias): - parts = re.split(r'[,/à]', alias) - parts = [p1 for p1 in [p.strip() for p in parts] if p1 != "" and p1.lower() != "france"] + parts = re.split(r"[,/à]", alias) + parts = [ + p1 + for p1 in [p.strip() for p in parts] + if p1 != "" and p1.lower() != "france" + ] name = "" address = "" @@ -74,11 +100,11 @@ class PlaceGuesser: oparts = [] for part in parts: p, c, possible_c = self.__guess_city_name_postcode(part) - if not possible_c is None: - possible_city = possible_c - if not c is None and city == "": + if possible_c is not None: + possible_city = possible_c + if c is not None and city == "": city = c - if not p is None and postcode == "": + if p is not None and postcode == "": postcode = p if p is None and c is None: oparts.append(part) @@ -95,9 +121,6 @@ class PlaceGuesser: if len(mc) == 1: city = mc[0] - - - if len(oparts) > 0: if not self.__guess_is_address(oparts[0]): name = oparts[0] @@ -111,4 +134,3 @@ class PlaceGuesser: name = possible_city return name, address, postcode, city - diff --git a/src/agenda_culturel/views.py b/src/agenda_culturel/views.py index f10d1c7..6467e5b 100644 --- a/src/agenda_culturel/views.py +++ b/src/agenda_culturel/views.py @@ -6,9 +6,7 @@ from django.contrib.auth.mixins import ( UserPassesTestMixin, PermissionRequiredMixin, ) -from django import forms from django.http import Http404 -from django.contrib.postgres.search import SearchQuery, SearchHeadline from django.utils.safestring import mark_safe from django.utils.decorators import method_decorator from honeypot.decorators import check_honeypot @@ -18,12 +16,9 @@ from django.core.cache import cache from django.core.mail import mail_admins import calendar as _calendar -from django.contrib.gis.geos import Point -from django.contrib.gis.measure import D -from django.http import HttpResponseRedirect, HttpResponse, HttpResponseRedirect +from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden from django.urls import reverse -from collections import Counter import emoji from .forms import ( @@ -38,7 +33,6 @@ from .forms import ( CategorisationForm, EventAddPlaceForm, PlaceForm, - MultipleHiddenInput, EventModerateForm, TagForm, TagRenameForm, @@ -71,14 +65,12 @@ from .models import ( CategorisationRule, remove_accents, Place, - ReferenceLocation, - Organisation + Organisation, ) -from django.utils import timezone from django.utils.html import escape from datetime import date, timedelta from django.utils.timezone import datetime -from django.db.models import Q, Subquery, OuterRef, Count, F, Func, BooleanField, ExpressionWrapper, When, Max +from django.db.models import Q, Subquery, OuterRef, Count, F, Func from django.urls import reverse_lazy from django.utils.translation import gettext_lazy as _ @@ -90,9 +82,7 @@ from django.contrib.messages.views import SuccessMessageMixin from .calendar import CalendarMonth, CalendarWeek, CalendarDay, CalendarList -from .import_tasks.importer import URL2Events from .import_tasks.extractor import Extractor -from .import_tasks.downloader import ChromiumHeadlessDownloader from .celery import ( app as celery_app, @@ -106,7 +96,6 @@ from .celery import ( update_orphan_pure_import_events, ) -import urllib import logging logger = logging.getLogger(__name__) @@ -120,36 +109,45 @@ class PaginatorFilter(Paginator): super().__init__(filter.qs, nb) - self.url_first_page = PaginatorFilter.update_param(self.request.get_full_path(), 'page', 1) - self.url_last_page = PaginatorFilter.update_param(self.request.get_full_path(), 'page', self.num_pages) + self.url_first_page = PaginatorFilter.update_param( + self.request.get_full_path(), "page", 1 + ) + self.url_last_page = PaginatorFilter.update_param( + self.request.get_full_path(), "page", self.num_pages + ) def update_param(params, key, value): - p = params.split('?') + p = params.split("?") root = p[0] if len(p) > 1: other = p[1] - others = other.split('&') + others = other.split("&") others = [o for o in others if not o.startswith(key)] - others += [key + '=' + str(value)] - return root + '?' + '&'.join(others) + others += [key + "=" + str(value)] + return root + "?" + "&".join(others) else: - return root + '?' + key + '=' + str(value) + return root + "?" + key + "=" + str(value) def page(self, *args, **kwargs): page = super().page(*args, **kwargs) - + try: - page.url_previous_page = PaginatorFilter.update_param(self.request.get_full_path(), 'page', page.previous_page_number()) + page.url_previous_page = PaginatorFilter.update_param( + self.request.get_full_path(), "page", page.previous_page_number() + ) except EmptyPage: page.url_previous_page = self.request.get_full_path() - + try: - page.url_next_page = PaginatorFilter.update_param(self.request.get_full_path(), 'page', page.next_page_number()) + page.url_next_page = PaginatorFilter.update_param( + self.request.get_full_path(), "page", page.next_page_number() + ) except EmptyPage: page.url_next_page = self.request.get_full_path() - - + return page + + # # # Useful for translation @@ -173,16 +171,19 @@ def page_not_found(request, exception=None): def internal_server_error(request): - mail_admins(request.site.name + _(": error 500"), - _("An internal error has occurred on site {} at address {}.").format(request.site.name, request.build_absolute_uri())) + mail_admins( + request.site.name + _(": error 500"), + _("An internal error has occurred on site {} at address {}.").format( + request.site.name, request.build_absolute_uri() + ), + ) return render(request, "page-erreur.html", status=500, context={"error": 500}) + def thank_you(request): return render(request, "agenda_culturel/thank_you.html") - - def mentions_legales(request): context = { "title": "Mentions légales", @@ -215,7 +216,19 @@ def month_view(request, year=None, month=None, cat=None): month = now.month request = EventFilter.set_default_values(request) - qs = get_event_qs(request).only("title", "start_day", "start_time", "category", "other_versions", "recurrences", "end_day", "end_time", "uuids", "status", "tags") + qs = get_event_qs(request).only( + "title", + "start_day", + "start_time", + "category", + "other_versions", + "recurrences", + "end_day", + "end_time", + "uuids", + "status", + "tags", + ) if cat is not None: category = Category.objects.filter(slug=cat).first() qs = qs.filter(category=category) @@ -235,7 +248,7 @@ def month_view(request, year=None, month=None, cat=None): "calendar": cmonth, "filter": filter, "category": category, - "init_date": now if cmonth.today_in_calendar() else cmonth.firstdate + "init_date": now if cmonth.today_in_calendar() else cmonth.firstdate, } return render(request, "agenda_culturel/page-month.html", context) @@ -248,7 +261,28 @@ def week_view(request, year=None, week=None, home=False, cat=None): week = now.isocalendar()[1] request = EventFilter.set_default_values(request) - qs = get_event_qs(request).select_related("exact_location").only("title", "start_day", "start_time", "category", "other_versions", "recurrences", "end_day", "end_time", "uuids", "status", "tags", "local_image", "image", "image_alt", "exact_location", "description") + qs = ( + get_event_qs(request) + .select_related("exact_location") + .only( + "title", + "start_day", + "start_time", + "category", + "other_versions", + "recurrences", + "end_day", + "end_time", + "uuids", + "status", + "tags", + "local_image", + "image", + "image_alt", + "exact_location", + "description", + ) + ) if cat is not None: category = Category.objects.filter(slug=cat).first() qs = qs.filter(category=category) @@ -261,7 +295,14 @@ def week_view(request, year=None, week=None, home=False, cat=None): cweek = CalendarWeek(year, week, filter) - context = {"year": year, "week": week, "calendar": cweek, "filter": filter, "category": category, "init_date": now if cweek.today_in_calendar() else cweek.firstdate } + context = { + "year": year, + "week": week, + "calendar": cweek, + "filter": filter, + "category": category, + "init_date": now if cweek.today_in_calendar() else cweek.firstdate, + } if home: context["home"] = 1 return render(request, "agenda_culturel/page-week.html", context) @@ -284,7 +325,11 @@ def day_view(request, year=None, month=None, day=None, cat=None): category = None filter = EventFilter(request.GET, qs, request=request) - return HttpResponseRedirect(reverse_lazy("day_view", args=[year, month, day]) + "?" + filter.get_url()) + return HttpResponseRedirect( + reverse_lazy("day_view", args=[year, month, day]) + + "?" + + filter.get_url() + ) return upcoming_events(request, year, month, day, 0, cat) @@ -314,8 +359,10 @@ def upcoming_events(request, year=None, month=None, day=None, neighsize=1, cat=N if filter.has_category_parameters(): return HttpResponseRedirect(filter.get_new_url()) - cal = CalendarList(day + timedelta(days=-neighsize), day + timedelta(days=neighsize), filter, True) - + cal = CalendarList( + day + timedelta(days=-neighsize), day + timedelta(days=neighsize), filter, True + ) + context = { "calendar": cal, "now": now, @@ -324,10 +371,9 @@ def upcoming_events(request, year=None, month=None, day=None, neighsize=1, cat=N "filter": filter, "date_pred": day + timedelta(days=-neighsize - 1), "date_next": day + timedelta(days=neighsize + 1), - "category": category + "category": category, } - return render(request, "agenda_culturel/page-upcoming.html", context) @@ -356,13 +402,24 @@ def update_from_source(request, pk): url = event.get_updateable_uuid() if url is None: - messages.warning(request, _("The event cannot be updated because the import process is not available for the referenced sources.")) + messages.warning( + request, + _( + "The event cannot be updated because the import process is not available for the referenced sources." + ), + ) else: - import_events_from_url.delay(url, None, None, True, user_id=request.user.pk if request.user else None) - messages.success(request, _("The event update has been queued and will be completed shortly.")) + import_events_from_url.delay( + url, None, None, True, user_id=request.user.pk if request.user else None + ) + messages.success( + request, + _("The event update has been queued and will be completed shortly."), + ) return HttpResponseRedirect(event.get_absolute_url()) + class EventUpdateView( SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView ): @@ -378,8 +435,12 @@ class EventUpdateView( return kwargs def get_success_message(self, cleaned_data): - txt = _(" A message has been sent to the person who proposed the event.") if hasattr(self, "with_msg") and self.with_msg else "" - return mark_safe(_('The event has been successfully modified.') + txt) + txt = ( + _(" A message has been sent to the person who proposed the event.") + if hasattr(self, "with_msg") and self.with_msg + else "" + ) + return mark_safe(_("The event has been successfully modified.") + txt) def get_object(self, queryset=None): event = super().get_object(queryset) @@ -393,13 +454,18 @@ class EventUpdateView( return super().form_valid(form) def get_initial(self): - self.is_cloning = "clone" in self.request.path.split('/') + self.is_cloning = "clone" in self.request.path.split("/") if self.is_cloning: - messages.info(self.request, _("Changes will be visible on a local copy of the event. The version identical to the imported source will be hidden.")) - self.is_simple_cloning = "simple-clone" in self.request.path.split('/') + messages.info( + self.request, + _( + "Changes will be visible on a local copy of the event. The version identical to the imported source will be hidden." + ), + ) + self.is_simple_cloning = "simple-clone" in self.request.path.split("/") result = super().get_initial() - if self.is_cloning and not "other_versions" in result: + if self.is_cloning and "other_versions" not in result: obj = self.get_object() # if no DuplicatedEvents is associated, create one obj.other_versions = DuplicatedEvents.objects.create() @@ -414,7 +480,7 @@ class EventUpdateView( if self.is_simple_cloning: result["other_versions"] = None result["simple_cloning"] = True - + if self.is_cloning or self.is_simple_cloning: obj = self.get_object() if obj.local_image: @@ -423,7 +489,6 @@ class EventUpdateView( return result - class EventModerateView( SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView ): @@ -433,16 +498,24 @@ class EventModerateView( form_class = EventModerateForm def get_success_message(self, cleaned_data): - txt = _(" A message has been sent to the person who proposed the event.") if hasattr(self, "with_msg") and self.with_msg else "" - return mark_safe(_('The event {} has been moderated with success.').format(self.object.get_absolute_url(), self.object.title) + txt) - + txt = ( + _(" A message has been sent to the person who proposed the event.") + if hasattr(self, "with_msg") and self.with_msg + else "" + ) + return mark_safe( + _('The event {} has been moderated with success.').format( + self.object.get_absolute_url(), self.object.title + ) + + txt + ) def is_moderate_next(self): - return "after" in self.request.path.split('/') - + return "after" in self.request.path.split("/") + def is_starting_moderation(self): - return not "pk" in self.kwargs - + return "pk" not in self.kwargs + def is_moderation_from_date(self): return "m" in self.kwargs and "y" in self.kwargs and "d" in self.kwargs @@ -452,28 +525,35 @@ class EventModerateView( # select events after the current one if start_time: - qs = qs.filter(Q(start_day__gt=start_day)|(Q(start_day=start_day) & (Q(start_time__isnull=True)|Q(start_time__gt=start_time)))) + qs = qs.filter( + Q(start_day__gt=start_day) + | ( + Q(start_day=start_day) + & (Q(start_time__isnull=True) | Q(start_time__gt=start_time)) + ) + ) else: qs = qs.filter(Q(start_day__gte=start_day) & ~Q(pk=opk)) # get only possibly representative events qs = qs.filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)) + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) # remove trash events qs = qs.filter(~Q(status=Event.STATUS.TRASH)) - + # sort by datetime qs = qs.order_by("start_day", "start_time") - + return qs.first() def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) if self.is_moderate_next(): - context['pred'] = self.kwargs["pred"] + context["pred"] = self.kwargs["pred"] return context def get_object(self, queryset=None): @@ -490,10 +570,11 @@ class EventModerateView( try: return super().post(request, args, kwargs) except Http404: - return HttpResponseRedirect(reverse_lazy("error_next_event", args=[self.object.pk])) + return HttpResponseRedirect( + reverse_lazy("error_next_event", args=[self.object.pk]) + ) - - def form_valid(self, form): + def form_valid(self, form): form.instance.set_no_modification_date_changed() form.instance.set_in_moderation_process() form.instance.set_processing_user(self.request.user) @@ -501,9 +582,9 @@ class EventModerateView( return super().form_valid(form) def get_success_url(self): - if 'save_and_next' in self.request.POST: + if "save_and_next" in self.request.POST: return reverse_lazy("moderate_event_next", args=[self.object.pk]) - elif 'save_and_edit_local' in self.request.POST: + elif "save_and_edit_local" in self.request.POST: return reverse_lazy("edit_event", args=[self.object.get_local_version().pk]) else: return self.object.get_absolute_url() @@ -512,14 +593,15 @@ class EventModerateView( @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.change_event") def error_next_event(request, pk): - + obj = Event.objects.filter(pk=pk).first() return render( - request, - "agenda_culturel/event_next_error_message.html", - {"pk": pk, "object": obj}, - ) + request, + "agenda_culturel/event_next_error_message.html", + {"pk": pk, "object": obj}, + ) + @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.change_event") @@ -532,12 +614,15 @@ def moderate_event_next(request, pk): next_obj = EventModerateView.get_next_event(start_day, start_time, pk) if next_obj is None: return render( - request, - "agenda_culturel/event_next_error_message.html", - {"pk": pk, "object": obj}, - ) + request, + "agenda_culturel/event_next_error_message.html", + {"pk": pk, "object": obj}, + ) else: - return HttpResponseRedirect(reverse_lazy("moderate_event_step", args=[next_obj.pk, obj.pk])) + return HttpResponseRedirect( + reverse_lazy("moderate_event_step", args=[next_obj.pk, obj.pk]) + ) + @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.change_event") @@ -560,8 +645,13 @@ class EventDetailView(UserPassesTestMixin, DetailView, ModelFormMixin): model = Event form_class = MessageEventForm template_name = "agenda_culturel/page-event.html" - queryset = Event.objects.select_related("exact_location").select_related("category").select_related("other_versions").select_related("other_versions__representative").prefetch_related("message_set") - + queryset = ( + Event.objects.select_related("exact_location") + .select_related("category") + .select_related("other_versions") + .select_related("other_versions__representative") + .prefetch_related("message_set") + ) def test_func(self): return ( @@ -594,7 +684,7 @@ class EventDetailView(UserPassesTestMixin, DetailView, ModelFormMixin): else: return self.form_invalid(form) - def form_valid(self, form): + def form_valid(self, form): message = form.save(commit=False) message.user = self.request.user message.related_event = self.get_object() @@ -602,12 +692,10 @@ class EventDetailView(UserPassesTestMixin, DetailView, ModelFormMixin): message.spam = False message.closed = True message.save() - return super().form_valid(form) - @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.change_event") def change_status_event(request, pk, status): @@ -623,7 +711,12 @@ def change_status_event(request, pk, status): event.save(update_fields=fields) with_msg = event.notify_if_required(request) if with_msg: - messages.success(request, _("The status has been successfully modified and a message has been sent to the person who proposed the event.")) + messages.success( + request, + _( + "The status has been successfully modified and a message has been sent to the person who proposed the event." + ), + ) else: messages.success(request, _("The status has been successfully modified.")) @@ -639,9 +732,11 @@ def change_status_event(request, pk, status): {"status": status, "event": event, "cancel_url": cancel_url}, ) + def import_event_proxy(request): return render(request, "agenda_culturel/event_import.html") + class EventCreateView(SuccessMessageMixin, CreateView): model = Event form_class = EventFormWithContact @@ -659,42 +754,58 @@ class EventCreateView(SuccessMessageMixin, CreateView): def get_success_message(self, cleaned_data): if self.request.user.is_authenticated: - return mark_safe(_('The event was created: {}.').format(self.object.get_absolute_url(), self.object.title)) + return mark_safe( + _('The event was created: {}.').format( + self.object.get_absolute_url(), self.object.title + ) + ) else: - return _("The event has been submitted and will be published as soon as it has been validated by the moderation team.") - + return _( + "The event has been submitted and will be published as soon as it has been validated by the moderation team." + ) def form_valid(self, form): - if form.cleaned_data['simple_cloning']: + if form.cleaned_data["simple_cloning"]: form.instance.set_skip_duplicate_check() - if form.cleaned_data['cloning']: + if form.cleaned_data["cloning"]: form.instance.set_in_moderation_process() if form.cleaned_data.get("email") or form.cleaned_data.get("comments"): - has_comments = not form.cleaned_data.get("comments") in ["", None] + has_comments = form.cleaned_data.get("comments") not in ["", None] form.instance.add_message( - Message(subject=_('during the creation process'), - message=form.cleaned_data.get("comments"), - email=form.cleaned_data.get("email"), - closed=False, - message_type=Message.TYPE.FROM_CONTRIBUTOR if has_comments else Message.TYPE.FROM_CONTRIBUTOR_NO_MSG)) - + Message( + subject=_("during the creation process"), + message=form.cleaned_data.get("comments"), + email=form.cleaned_data.get("email"), + closed=False, + message_type=( + Message.TYPE.FROM_CONTRIBUTOR + if has_comments + else Message.TYPE.FROM_CONTRIBUTOR_NO_MSG + ), + ) + ) + form.instance.import_sources = None form.instance.set_processing_user(self.request.user) result = super().form_valid(form) - if form.cleaned_data['cloning']: + if form.cleaned_data["cloning"]: with_msg = form.instance.notify_if_required(self.request) if with_msg: - messages.success(self.request, _("A message has been sent to the person who proposed the initial event.")) + messages.success( + self.request, + _( + "A message has been sent to the person who proposed the initial event." + ), + ) return result - # A class to evaluate the URL according to the existing events and the authentification # level of the user class URLEventEvaluation: @@ -706,7 +817,7 @@ class URLEventEvaluation: self.cat = None self.tags = [] self.existing = None - self.url = form.cleaned_data.get('url') + self.url = form.cleaned_data.get("url") self.event = None if self.url is not None: self.url = Extractor.clean_url(self.url) @@ -715,20 +826,20 @@ class URLEventEvaluation: # if it's unknown if len(self.existing) == 0: self.existing = None - self.cat = form.cleaned_data.get('category') + self.cat = form.cleaned_data.get("category") if self.cat is not None: self.cat = self.cat.name - self.tags = form.cleaned_data.get('tags') + self.tags = form.cleaned_data.get("tags") else: published = [ e for e in self.existing if e.status == Event.STATUS.PUBLISHED ] - drafts = [e for e in self.existing if e.status == Event.STATUS.DRAFT] - trash = [e for e in self.existing if e.status == Event.STATUS.TRASH] if self.is_authenticated or len(published) > 1: - self.event = published[0] if len(published) > 1 else self.existing[0] + self.event = ( + published[0] if len(published) > 1 else self.existing[0] + ) else: self.event = None @@ -752,24 +863,28 @@ class URLEventEvaluation: if e is None: return "" else: - return '' + escape(e.title) + '' - + return '' + escape(e.title) + "" + def to_list(self): if self.is_new(): return (self.url, self.cat, self.tags) def import_from_urls(request): - if request.method == "POST": formset = URLSubmissionFormSet(request.POST, request.FILES) if not request.user.is_authenticated: contactform = SimpleContactForm(request.POST) - if formset.is_valid() and (request.user.is_authenticated or contactform.is_valid()): + if formset.is_valid() and ( + request.user.is_authenticated or contactform.is_valid() + ): # evaluate all the forms - ucat = [URLEventEvaluation(form, request.user.is_authenticated) for form in formset.forms] + ucat = [ + URLEventEvaluation(form, request.user.is_authenticated) + for form in formset.forms + ] # for each not new, add a message for uc in ucat: @@ -777,12 +892,20 @@ def import_from_urls(request): if uc.is_event_visible(): messages.info( request, - mark_safe(_('{} has not been submitted since it''s already known: {}.').format(uc.url, uc.get_link())) + mark_safe( + _( + "{} has not been submitted since it" + "s already known: {}." + ).format(uc.url, uc.get_link()) + ), ) else: messages.info( request, - _('{} has not been submitted since it''s already known and currently into moderation process.').format(uc.url) + _( + "{} has not been submitted since it" + "s already known and currently into moderation process." + ).format(uc.url), ) # keep only new ones @@ -791,17 +914,22 @@ def import_from_urls(request): # finally process them or go back to home page if len(ucat) > 0: messages.info( - request, - _('Integrating {} url(s) into our import process.').format(len(ucat)) - ) + request, + _("Integrating {} url(s) into our import process.").format( + len(ucat) + ), + ) email = None comments = None if not request.user.is_authenticated: email = contactform.cleaned_data["email"] comments = contactform.cleaned_data["comments"] - import_events_from_urls.delay(ucat, + import_events_from_urls.delay( + ucat, user_id=request.user.pk if request.user else None, - email=email, comments=comments) + email=email, + comments=comments, + ) return HttpResponseRedirect(reverse("thank_you")) else: return HttpResponseRedirect(reverse("home")) @@ -831,7 +959,9 @@ def import_from_url(request): # if the form has been sent if request.method == "POST": - form = URLSubmissionFormWithContact(request.POST, is_authenticated=request.user.is_authenticated) + form = URLSubmissionFormWithContact( + request.POST, is_authenticated=request.user.is_authenticated + ) # if the form is valid if form.is_valid(): @@ -841,25 +971,38 @@ def import_from_url(request): if uc.is_event_visible(): messages.info( request, - mark_safe(_('{} has not been submitted since it''s already known: {}.').format(uc.url, uc.get_link())) + mark_safe( + _( + "{} has not been submitted since it" + "s already known: {}." + ).format(uc.url, uc.get_link()) + ), ) return HttpResponseRedirect(uc.get_event().get_absolute_url()) else: messages.info( request, - _('{} has not been submitted since it''s already known and currently into moderation process.').format(uc.url) + _( + "{} has not been submitted since it" + "s already known and currently into moderation process." + ).format(uc.url), ) return HttpResponseRedirect(reverse("home")) else: messages.info( - request, - _('Integrating {} into our import process.').format(uc.url) - ) - import_events_from_url.delay(uc.url, uc.cat, uc.tags, user_id=request.user.pk if request.user else None, email=form.cleaned_data.get("email"), comments=form.cleaned_data.get("comments")) + request, _("Integrating {} into our import process.").format(uc.url) + ) + import_events_from_url.delay( + uc.url, + uc.cat, + uc.tags, + user_id=request.user.pk if request.user else None, + email=form.cleaned_data.get("email"), + comments=form.cleaned_data.get("comments"), + ) return HttpResponseRedirect(reverse("thank_you")) - return render( request, "agenda_culturel/import.html", @@ -879,11 +1022,12 @@ def export_event_ical(request, year, month, day, pk): response = HttpResponse(content_type="text/calendar") response.content = cal.to_ical().decode("utf-8").replace("\r\n", "\n") response["Content-Disposition"] = "attachment; filename={0}{1}".format( - event.title.replace('\n', ' ').replace('\r', '')[0:32], ".ics" + event.title.replace("\n", " ").replace("\r", "")[0:32], ".ics" ) return response + def export_ical(request, cat=None): now = date.today() @@ -903,28 +1047,30 @@ def export_ical(request, cat=None): id_cache = hashlib.md5(filter.get_url().encode("utf8")).hexdigest() ical = cache.get(id_cache) if not ical: - calendar = CalendarList(now + timedelta(days=-7), now + timedelta(days=+60), filter) + calendar = CalendarList( + now + timedelta(days=-7), now + timedelta(days=+60), filter + ) ical = calendar.export_to_ics(request) - cache.set(id_cache, ical, 3600) # 1 heure + cache.set(id_cache, ical, 3600) # 1 heure response = HttpResponse(content_type="text/calendar") response.content = ical.to_ical().decode("utf-8").replace("\r\n", "\n") - extra = filter.to_str(' ') + extra = filter.to_str(" ") if extra is None: - extra = '' - if not category is None: - if extra != '': - extra = ' ' + category.name + ' ' + extra + extra = "" + if category is not None: + if extra != "": + extra = " " + category.name + " " + extra else: - extra = ' ' + category.name + extra = " " + category.name response["Content-Disposition"] = "attachment; filename={0}{1}{2}".format( - 'Pommes de lune', extra, ".ics" + "Pommes de lune", extra, ".ics" ) return response -@method_decorator(check_honeypot, name='post') +@method_decorator(check_honeypot, name="post") class MessageCreateView(SuccessMessageMixin, CreateView): model = Message template_name = "agenda_culturel/message_create_form.html" @@ -952,27 +1098,29 @@ class MessageCreateView(SuccessMessageMixin, CreateView): def form_valid(self, form): if self.request.user.is_authenticated: form.instance.user = self.request.user - form.instance.message_type = Message.TYPE.EVENT_REPORT if "pk" in self.kwargs else Message.TYPE.CONTACT_FORM + form.instance.message_type = ( + Message.TYPE.EVENT_REPORT + if "pk" in self.kwargs + else Message.TYPE.CONTACT_FORM + ) return super().form_valid(form) - def get_initial(self): result = super().get_initial() if "pk" in self.kwargs: self.event = get_object_or_404(Event, pk=self.kwargs["pk"]) result["related_event"] = self.event - result["subject"] = _('Reporting the event {} on {}').format(self.event.title, self.event.start_day) + result["subject"] = _("Reporting the event {} on {}").format( + self.event.title, self.event.start_day + ) else: result["related_event"] = None return result - class MessageDeleteView(SuccessMessageMixin, DeleteView): model = Message - success_message = _( - "The contact message has been successfully deleted." - ) + success_message = _("The contact message has been successfully deleted.") success_url = reverse_lazy("messages") @@ -998,8 +1146,6 @@ class MessageUpdateView( return kwargs - - @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.view_event") def activite(request): @@ -1012,15 +1158,19 @@ def activite(request): weeks = [days[-1]] for w in range(0, 8): weeks.append(weeks[-1] + timedelta(days=-7)) - + daily_modifications = Event.get_count_modifications([(d, 1) for d in days]) weekly_modifications = Event.get_count_modifications([(w, 7) for w in weeks]) return render( request, "agenda_culturel/page-activity.html", - {"daily_modifications": daily_modifications, "weekly_modifications": weekly_modifications }, - ) + { + "daily_modifications": daily_modifications, + "weekly_modifications": weekly_modifications, + }, + ) + @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.view_event") @@ -1028,7 +1178,6 @@ def administration(request): nb_mod_days = 21 nb_classes = 4 today = date.today() - start_time = datetime.now().time() # get information about recent modifications days = [today] @@ -1037,41 +1186,55 @@ def administration(request): daily_modifications = Event.get_count_modifications([(d, 1) for d in days]) # get last created events - events = Event.objects.all().order_by("-created_date").select_related("exact_location", "category")[:5] + events = ( + Event.objects.all() + .order_by("-created_date") + .select_related("exact_location", "category")[:5] + ) # get last batch imports - rel_event = Event.objects.filter(import_sources__contains=[OuterRef('url_source')]).values("pk")[:1] - batch_imports = BatchImportation.objects.all().select_related("recurrentImport").annotate(event_id=Subquery(rel_event)).order_by("-created_date")[:5] + rel_event = Event.objects.filter( + import_sources__contains=[OuterRef("url_source")] + ).values("pk")[:1] + batch_imports = ( + BatchImportation.objects.all() + .select_related("recurrentImport") + .annotate(event_id=Subquery(rel_event)) + .order_by("-created_date")[:5] + ) # get info about batch information - newest = BatchImportation.objects.filter(recurrentImport=OuterRef("pk")).order_by( - "-created_date" - ).select_related("recurrentImport") + newest = ( + BatchImportation.objects.filter(recurrentImport=OuterRef("pk")) + .order_by("-created_date") + .select_related("recurrentImport") + ) imported_events = RecurrentImport.objects.annotate( - last_run_status=Subquery(newest.values("status")[:1]) - ) + last_run_status=Subquery(newest.values("status")[:1]) + ) - nb_failed = (imported_events - .filter(last_run_status=BatchImportation.STATUS.FAILED) - .count()) - nb_canceled = (imported_events - .filter(last_run_status=BatchImportation.STATUS.CANCELED) - .count()) - nb_running = (imported_events - .filter(last_run_status=BatchImportation.STATUS.RUNNING) - .count()) + nb_failed = imported_events.filter( + last_run_status=BatchImportation.STATUS.FAILED + ).count() + nb_canceled = imported_events.filter( + last_run_status=BatchImportation.STATUS.CANCELED + ).count() + nb_running = imported_events.filter( + last_run_status=BatchImportation.STATUS.RUNNING + ).count() nb_all = imported_events.count() # get some info about imported (or not) events srcs = RecurrentImport.objects.all().values_list("source") in_future = Event.objects.filter(Q(start_day__gte=today)) nb_in_rimport = in_future.filter(Q(import_sources__overlap=srcs)).count() - nb_in_orphan_import = in_future.filter( - (Q(import_sources__isnull=False) & - (Q(modified_date__isnull=True) | - Q(modified_date__lte=F('imported_date')))) - & ~Q(import_sources__overlap=srcs)).count() - + nb_in_orphan_import = in_future.filter( + ( + Q(import_sources__isnull=False) + & (Q(modified_date__isnull=True) | Q(modified_date__lte=F("imported_date"))) + ) + & ~Q(import_sources__overlap=srcs) + ).count() # get all non moderated events nb_not_moderated = Event.get_nb_not_moderated(today, nb_mod_days, nb_classes) @@ -1079,12 +1242,18 @@ def administration(request): return render( request, "agenda_culturel/administration.html", - {"daily_modifications": daily_modifications, - "events": events, "batch_imports": batch_imports, - "nb_failed": nb_failed, "nb_canceled": nb_canceled, - "nb_running": nb_running, "nb_all": nb_all, - "nb_not_moderated": nb_not_moderated, - "nb_in_rimport": nb_in_rimport, "nb_in_orphan_import": nb_in_orphan_import}, + { + "daily_modifications": daily_modifications, + "events": events, + "batch_imports": batch_imports, + "nb_failed": nb_failed, + "nb_canceled": nb_canceled, + "nb_running": nb_running, + "nb_all": nb_all, + "nb_not_moderated": nb_not_moderated, + "nb_in_rimport": nb_in_rimport, + "nb_in_orphan_import": nb_in_orphan_import, + }, ) @@ -1097,7 +1266,6 @@ def recent(request): paginator = PaginatorFilter(filter, 10, request) page = request.GET.get("page") - try: response = paginator.page(page) except PageNotAnInteger: @@ -1136,6 +1304,7 @@ def view_messages(request): {"filter": filter, "nb_spams": nb_spams, "paginator_filter": response}, ) + @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.view_message") def delete_cm_spam(request): @@ -1146,20 +1315,17 @@ def delete_cm_spam(request): messages.success(request, _("Spam has been successfully deleted.")) return HttpResponseRedirect(reverse_lazy("messages")) else: - nb_msgs = Message.objects.values('spam').annotate(total=Count('spam')) + nb_msgs = Message.objects.values("spam").annotate(total=Count("spam")) nb_total = sum([nb["total"] for nb in nb_msgs]) nb_spams = sum([nb["total"] for nb in nb_msgs if nb["spam"]]) cancel_url = reverse_lazy("messages") return render( request, "agenda_culturel/delete_spams_confirm.html", - { "nb_total": nb_total, "nb_spams": nb_spams, "cancel_url": cancel_url}, + {"nb_total": nb_total, "nb_spams": nb_spams, "cancel_url": cancel_url}, ) - - - def event_search(request, full=False): categories = None tags = None @@ -1169,9 +1335,13 @@ def event_search(request, full=False): qs = get_event_qs(request).order_by("-start_day") if not request.user.is_authenticated: - qs = qs.filter((Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True))) + qs = qs.filter( + ( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + ) if full: filter = SearchEventFilter( request.GET, @@ -1184,23 +1354,41 @@ def event_search(request, full=False): queryset=qs, request=request, ) - if 'q' in request.GET: - categories = Category.objects.filter(name__icontains=request.GET['q']) - s_q = remove_accents(request.GET['q'].lower()) - tags = Event.objects.extra(where=['%s ILIKE ANY (tags)'], params=[request.GET['q']]).annotate(arr_tags=Func(F('tags'), function='unnest')).values_list('arr_tags', flat=True).distinct() - tags = [(t, emoji.demojize(remove_accents(t).lower(), delimiters=('000', ''))) for t in tags] + if "q" in request.GET: + categories = Category.objects.filter(name__icontains=request.GET["q"]) + s_q = remove_accents(request.GET["q"].lower()) + tags = ( + Event.objects.extra( + where=["%s ILIKE ANY (tags)"], params=[request.GET["q"]] + ) + .annotate(arr_tags=Func(F("tags"), function="unnest")) + .values_list("arr_tags", flat=True) + .distinct() + ) + tags = [ + (t, emoji.demojize(remove_accents(t).lower(), delimiters=("000", ""))) + for t in tags + ] tags = [t for t in tags if s_q == t[1]] tags.sort(key=lambda x: x[1]) tags = [t[0] for t in tags] - places = Place.objects.filter(Q(name__icontains=request.GET['q'])|Q(description__icontains=request.GET['q'])|Q(city__icontains=request.GET['q'])) - organisations = Organisation.objects.filter(Q(name__icontains=request.GET['q'])|Q(description__icontains=request.GET['q'])) + places = Place.objects.filter( + Q(name__icontains=request.GET["q"]) + | Q(description__icontains=request.GET["q"]) + | Q(city__icontains=request.GET["q"]) + ) + organisations = Organisation.objects.filter( + Q(name__icontains=request.GET["q"]) + | Q(description__icontains=request.GET["q"]) + ) if request.user.is_authenticated: - rimports = RecurrentImport.objects.filter(name__icontains=request.GET['q']) + rimports = RecurrentImport.objects.filter( + name__icontains=request.GET["q"] + ) paginator = PaginatorFilter(filter, 10, request) page = request.GET.get("page") - try: response = paginator.page(page) except PageNotAnInteger: @@ -1238,20 +1426,28 @@ def event_search_full(request): @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.view_batchimportation") def imports(request): - rel_event = Event.objects.filter(import_sources__contains=[OuterRef('url_source')]).values("pk")[:1] - paginator = Paginator(BatchImportation.objects.all().order_by("-created_date").annotate(event_id=Subquery(rel_event)), - 30) + rel_event = Event.objects.filter( + import_sources__contains=[OuterRef("url_source")] + ).values("pk")[:1] + paginator = Paginator( + BatchImportation.objects.all() + .order_by("-created_date") + .annotate(event_id=Subquery(rel_event)), + 30, + ) page = request.GET.get("page") today = date.today() srcs = RecurrentImport.objects.all().values_list("source") in_future = Event.objects.filter(Q(start_day__gte=today)) - nb_in_orphan_import = in_future.filter( - (Q(import_sources__isnull=False) & - (Q(modified_date__isnull=True) | - Q(modified_date__lte=F('imported_date')))) - & ~Q(import_sources__overlap=srcs)).count() + nb_in_orphan_import = in_future.filter( + ( + Q(import_sources__isnull=False) + & (Q(modified_date__isnull=True) | Q(modified_date__lte=F("imported_date"))) + ) + & ~Q(import_sources__overlap=srcs) + ).count() try: response = paginator.page(page) @@ -1261,7 +1457,9 @@ def imports(request): response = paginator.page(paginator.num_pages) return render( - request, "agenda_culturel/imports.html", {"paginator_filter": response, "nb_in_orphan_import": nb_in_orphan_import} + request, + "agenda_culturel/imports.html", + {"paginator_filter": response, "nb_in_orphan_import": nb_in_orphan_import}, ) @@ -1307,6 +1505,7 @@ def cancel_import(request, pk): {"object": import_process, "cancel_url": cancel_url}, ) + @login_required(login_url="/accounts/login/") @permission_required( ["agenda_culturel.view_batchimportation", "agenda_culturel.run_batchimportation"] @@ -1324,15 +1523,23 @@ def update_orphan_events(request): srcs = RecurrentImport.objects.all().values_list("source") in_future = Event.objects.filter(Q(start_day__gte=today)) - nb_in_orphan_import = in_future.filter( - (Q(import_sources__isnull=False) & - (Q(modified_date__isnull=True) | - Q(modified_date__lte=F('imported_date')))) - & ~Q(import_sources__overlap=srcs)).count() + nb_in_orphan_import = in_future.filter( + ( + Q(import_sources__isnull=False) + & ( + Q(modified_date__isnull=True) + | Q(modified_date__lte=F("imported_date")) + ) + ) + & ~Q(import_sources__overlap=srcs) + ).count() return render( - request, "agenda_culturel/run_orphan_imports_confirm.html", {"nb_in_orphan_import": nb_in_orphan_import} + request, + "agenda_culturel/run_orphan_imports_confirm.html", + {"nb_in_orphan_import": nb_in_orphan_import}, ) + ######################### ## recurrent importations ######################### @@ -1343,32 +1550,27 @@ def update_orphan_events(request): def recurrent_imports(request, status=None): newest = BatchImportation.objects.filter(recurrentImport=OuterRef("pk")).order_by( - "-created_date") + "-created_date" + ) - qs = RecurrentImport.objects.all(). \ - annotate(last_run_status=Subquery(newest.values("status")[:1])). \ - order_by("-pk") + qs = ( + RecurrentImport.objects.all() + .annotate(last_run_status=Subquery(newest.values("status")[:1])) + .order_by("-pk") + ) - nb_failed = (qs - .filter(last_run_status=BatchImportation.STATUS.FAILED) - .count()) - nb_canceled = (qs - .filter(last_run_status=BatchImportation.STATUS.CANCELED) - .count()) - nb_running = (qs - .filter(last_run_status=BatchImportation.STATUS.RUNNING) - .count()) + nb_failed = qs.filter(last_run_status=BatchImportation.STATUS.FAILED).count() + nb_canceled = qs.filter(last_run_status=BatchImportation.STATUS.CANCELED).count() + nb_running = qs.filter(last_run_status=BatchImportation.STATUS.RUNNING).count() nb_all = qs.count() - - if not status is None: + if status is not None: qs = qs.filter(last_run_status=status) filter = RecurrentImportFilter(request.GET, queryset=qs) paginator = PaginatorFilter(filter, 20, request) - page = request.GET.get("page") try: @@ -1379,9 +1581,17 @@ def recurrent_imports(request, status=None): response = paginator.page(paginator.num_pages) return render( - request, "agenda_culturel/rimports.html", {"paginator_filter": response, + request, + "agenda_culturel/rimports.html", + { + "paginator_filter": response, "filter": filter, - "nb_all": nb_all, "nb_failed": nb_failed, "nb_canceled": nb_canceled, "nb_running": nb_running, "status": status} + "nb_all": nb_all, + "nb_failed": nb_failed, + "nb_canceled": nb_canceled, + "nb_running": nb_running, + "status": status, + }, ) @@ -1519,12 +1729,11 @@ def update_duplicate_event(request, pk, epk): if request.method == "POST": form = MergeDuplicates(request.POST, duplicates=edup) if form.is_valid(): - events = edup.get_duplicated() for f in edup.get_items_comparison(): if not f["similar"]: selected = form.get_selected_events(f["key"]) - if not selected is None: + if selected is not None: if isinstance(selected, list): values = [ x @@ -1537,12 +1746,16 @@ def update_duplicate_event(request, pk, epk): else: setattr(event, f["key"], sum(values, [])) else: - if f["key"] == 'organisers': + if f["key"] == "organisers": event.organisers.set(selected.organisers.all()) else: setattr(event, f["key"], getattr(selected, f["key"])) if f["key"] == "image": - setattr(event, "local_image", getattr(selected, "local_image")) + setattr( + event, + "local_image", + getattr(selected, "local_image"), + ) event.other_versions.fix(event) event.save() @@ -1595,20 +1808,24 @@ def merge_duplicate(request, pk): else: new_event_data[f["key"]] = getattr(selected, f["key"]) if f["key"] == "image" and "local_image" not in new_event_data: - new_event_data["local_image"] = getattr(selected, "local_image") + new_event_data["local_image"] = getattr( + selected, "local_image" + ) - - organisers = new_event_data.pop('organisers', None) + organisers = new_event_data.pop("organisers", None) # create a new event that merge the selected events new_event = Event(**new_event_data) new_event.status = Event.STATUS.PUBLISHED new_event.other_versions = edup new_event.save() - if not organisers is None: + if organisers is not None: new_event.organisers.set(organisers.all()) edup.fix(new_event) - messages.info(request, _("Creation of a merged event has been successfully completed.")) + messages.info( + request, + _("Creation of a merged event has been successfully completed."), + ) return HttpResponseRedirect(new_event.get_absolute_url()) return render( @@ -1658,16 +1875,28 @@ def fix_duplicate(request, pk): # one element has been selected to be the representative selected = form.get_selected_event(edup) if selected is None: - messages.error(request, _("The selected item is no longer included in the list of duplicates. Someone else has probably modified the list in the meantime.")) + messages.error( + request, + _( + "The selected item is no longer included in the list of duplicates. Someone else has probably modified the list in the meantime." + ), + ) else: edup.fix(selected) - messages.success(request, _("The selected event has been set as representative")) + messages.success( + request, _("The selected event has been set as representative") + ) return HttpResponseRedirect(edup.get_absolute_url()) elif form.is_action_remove(): # one element is removed from the set event = form.get_selected_event(edup) if event is None: - messages.error(request, _("The selected item is no longer included in the list of duplicates. Someone else has probably modified the list in the meantime.")) + messages.error( + request, + _( + "The selected item is no longer included in the list of duplicates. Someone else has probably modified the list in the meantime." + ), + ) return HttpResponseRedirect(edup.get_absolute_url()) else: event.other_versions = None @@ -1676,7 +1905,12 @@ def fix_duplicate(request, pk): event.set_no_modification_date_changed() event.save() edup.save() - messages.success(request, _("The event has been withdrawn from the group and made independent.")) + messages.success( + request, + _( + "The event has been withdrawn from the group and made independent." + ), + ) if edup.nb_duplicated() == 1: return HttpResponseRedirect(edup.get_absolute_url()) else: @@ -1685,7 +1919,12 @@ def fix_duplicate(request, pk): # otherwise, an event will be updated using other elements event = form.get_selected_event(edup) if event is None: - messages.error(request, _("The selected item is no longer included in the list of duplicates. Someone else has probably modified the list in the meantime.")) + messages.error( + request, + _( + "The selected item is no longer included in the list of duplicates. Someone else has probably modified the list in the meantime." + ), + ) return HttpResponseRedirect(edup.get_absolute_url()) else: return HttpResponseRedirect( @@ -1699,7 +1938,6 @@ def fix_duplicate(request, pk): else: form = FixDuplicates(edup=edup) - return render( request, "agenda_culturel/fix_duplicate.html", @@ -1720,11 +1958,9 @@ def duplicates(request): if nb_removed > 0: messages.success( request, - _("Cleaning up duplicates: {} item(s) fixed.").format( - nb_removed - ), + _("Cleaning up duplicates: {} item(s) fixed.").format(nb_removed), ) - + filter = DuplicatedEventsFilter( request.GET, queryset=DuplicatedEvents.objects.all().order_by("-pk") ) @@ -1752,10 +1988,7 @@ def set_duplicate(request, year, month, day, pk): e for e in cday.get_events() if e != event - and ( - event.other_versions is None - or event.other_versions != e.other_versions - ) + and (event.other_versions is None or event.other_versions != e.other_versions) and e.status != Event.STATUS.TRASH ] @@ -1798,7 +2031,13 @@ def set_duplicate(request, year, month, day, pk): @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.view_categorisationrule") def categorisation_rules(request): - paginator = Paginator(CategorisationRule.objects.all().order_by("pk").select_related("category").select_related("place"), 100) + paginator = Paginator( + CategorisationRule.objects.all() + .order_by("pk") + .select_related("category") + .select_related("place"), + 100, + ) page = request.GET.get("page") try: @@ -1890,7 +2129,13 @@ def apply_categorisation_rules(request): else: # first we check if events are not correctly categorised to_categorise = [] - events = Event.objects.filter(start_day__gte=datetime.now()).exclude(category=Category.get_default_category_id()).exclude(category=None).select_related("exact_location").select_related("category") + events = ( + Event.objects.filter(start_day__gte=datetime.now()) + .exclude(category=Category.get_default_category_id()) + .exclude(category=None) + .select_related("exact_location") + .select_related("category") + ) for e in events: c = CategorisationRule.get_category_from_rules(e) if c and c != e.category: @@ -1899,13 +2144,18 @@ def apply_categorisation_rules(request): # then we apply rules on events without category nb = 0 to_save = [] - events = Event.objects.filter(start_day__gte=datetime.now()).filter(Q(category=Category.get_default_category_id()) | Q(category=None)).select_related("exact_location").select_related("category") + events = ( + Event.objects.filter(start_day__gte=datetime.now()) + .filter(Q(category=Category.get_default_category_id()) | Q(category=None)) + .select_related("exact_location") + .select_related("category") + ) for e in events: success = CategorisationRule.apply_rules(e) if success: nb += 1 to_save.append(e) - + if nb != 0: Event.objects.bulk_update(to_save, fields=["category"]) @@ -1957,6 +2207,7 @@ class PlaceListView(ListView): model = Place ordering = ["name__unaccent"] + class PlaceListAdminView(PermissionRequiredMixin, ListView): model = Place paginate_by = 10 @@ -1972,27 +2223,40 @@ class PlaceDetailView(ListView): def get_queryset(self): self.place = get_object_or_404(Place, pk=self.kwargs["pk"]) - return get_event_qs(self.request).filter(exact_location=self.place).filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)).filter(start_day__gte=datetime.now()).order_by("start_day") - + return ( + get_event_qs(self.request) + .filter(exact_location=self.place) + .filter( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + .filter(start_day__gte=datetime.now()) + .order_by("start_day") + ) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["object"] = self.place return context + class PlaceDetailViewPast(PlaceDetailView): def get_queryset(self): self.place = get_object_or_404(Place, pk=self.kwargs["pk"]) self.past = True - return get_event_qs(self.request).filter(exact_location=self.place).filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)).filter(start_day__lte=datetime.now()).order_by("-start_day") - + return ( + get_event_qs(self.request) + .filter(exact_location=self.place) + .filter( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + .filter(start_day__lte=datetime.now()) + .order_by("-start_day") + ) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) @@ -2144,7 +2408,9 @@ class PlaceFromEventCreateView(PlaceCreateView): if self.event.location and "add" in self.request.GET: initial["aliases"] = [self.event.location] guesser = PlaceGuesser() - name, address, postcode, city = guesser.guess_address_elements(self.event.location) + name, address, postcode, city = guesser.guess_address_elements( + self.event.location + ) initial["name"] = name initial["address"] = address initial["postcode"] = postcode @@ -2163,7 +2429,6 @@ class PlaceFromEventCreateView(PlaceCreateView): return self.event.get_absolute_url() - ######################### ## Organisations ######################### @@ -2174,33 +2439,56 @@ class OrganisationListView(ListView): paginate_by = 10 ordering = ["name__unaccent"] + class OrganisationDetailView(ListView): model = Organisation template_name = "agenda_culturel/organisation_detail.html" paginate_by = 10 def get_queryset(self): - self.organisation = Organisation.objects.filter(pk=self.kwargs["pk"]).prefetch_related('organised_events').first() - return get_event_qs(self.request).filter(organisers__in=[self.kwargs["pk"]]).filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)).filter(start_day__gte=datetime.now()).order_by("start_day") + self.organisation = ( + Organisation.objects.filter(pk=self.kwargs["pk"]) + .prefetch_related("organised_events") + .first() + ) + return ( + get_event_qs(self.request) + .filter(organisers__in=[self.kwargs["pk"]]) + .filter( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + .filter(start_day__gte=datetime.now()) + .order_by("start_day") + ) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["object"] = self.organisation return context + class OrganisationDetailViewPast(OrganisationDetailView): def get_queryset(self): - self.organisation = Organisation.objects.filter(pk=self.kwargs["pk"]).prefetch_related('organised_events').first() + self.organisation = ( + Organisation.objects.filter(pk=self.kwargs["pk"]) + .prefetch_related("organised_events") + .first() + ) self.past = True - return get_event_qs(self.request).filter(organisers__in=[self.kwargs["pk"]]).filter( - Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True)).filter(start_day__lte=datetime.now()).order_by("-start_day") - + return ( + get_event_qs(self.request) + .filter(organisers__in=[self.kwargs["pk"]]) + .filter( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + .filter(start_day__lte=datetime.now()) + .order_by("-start_day") + ) def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) @@ -2208,23 +2496,18 @@ class OrganisationDetailViewPast(OrganisationDetailView): return context - -class OrganisationUpdateView( - PermissionRequiredMixin, SuccessMessageMixin, UpdateView -): +class OrganisationUpdateView(PermissionRequiredMixin, SuccessMessageMixin, UpdateView): model = Organisation permission_required = "agenda_culturel.change_organisation" success_message = _("The organisation has been successfully updated.") - fields = '__all__' + fields = "__all__" -class OrganisationCreateView( - PermissionRequiredMixin, SuccessMessageMixin, CreateView -): +class OrganisationCreateView(PermissionRequiredMixin, SuccessMessageMixin, CreateView): model = Organisation permission_required = "agenda_culturel.add_organisation" success_message = _("The organisation has been successfully created.") - fields = '__all__' + fields = "__all__" class OrganisationDeleteView(PermissionRequiredMixin, DeleteView): @@ -2232,10 +2515,12 @@ class OrganisationDeleteView(PermissionRequiredMixin, DeleteView): permission_required = "agenda_culturel.delete_organisation" success_url = reverse_lazy("view_organisations") + ######################### ## Tags ######################### + class TagUpdateView(PermissionRequiredMixin, SuccessMessageMixin, UpdateView): model = Tag permission_required = "agenda_culturel.change_tag" @@ -2265,14 +2550,16 @@ class TagDeleteView(PermissionRequiredMixin, DeleteView): permission_required = "agenda_culturel.delete_tag" success_url = reverse_lazy("view_all_tags") + def view_tag_past(request, t): return view_tag(request, t, True) + def view_tag(request, t, past=False): now = date.today() qs = get_event_qs(request).filter(tags__contains=[t]) - + if past: qs = qs.filter(start_day__lt=now).order_by("-start_day", "-start_time") else: @@ -2291,46 +2578,72 @@ def view_tag(request, t, past=False): rimports = RecurrentImport.objects.filter(defaultTags__contains=[t]) tag = Tag.objects.filter(name=t).first() - context = {"tag": t, "paginator_filter": response, "object": tag, "rimports": rimports, "past": past} + context = { + "tag": t, + "paginator_filter": response, + "object": tag, + "rimports": rimports, + "past": past, + } return render(request, "agenda_culturel/tag.html", context) def statistics(request): - stats = {} - max = {} first = {} last = {} - ev_published = Event.objects.filter(Q(status=Event.STATUS.PUBLISHED) & - (Q(other_versions__isnull=True) | - Q(other_versions__representative=F('pk')) | - Q(other_versions__representative__isnull=True))) + ev_published = Event.objects.filter( + Q(status=Event.STATUS.PUBLISHED) + & ( + Q(other_versions__isnull=True) + | Q(other_versions__representative=F("pk")) + | Q(other_versions__representative__isnull=True) + ) + ) - for v in ['start_day', 'created_date__date']: + for v in ["start_day", "created_date__date"]: after = 24 - last[v] = date.today() if v == 'created_date__date' else date.today() + timedelta(weeks=after) - last[v] = last[v].replace(day = _calendar.monthrange(last[v].year, last[v].month)[1]) + last[v] = ( + date.today() + if v == "created_date__date" + else date.today() + timedelta(weeks=after) + ) + last[v] = last[v].replace( + day=_calendar.monthrange(last[v].year, last[v].month)[1] + ) - r = 8 * 30 - if v == 'start_day': + r = 8 * 30 + if v == "start_day": r += after * 7 first[v] = (last[v] - timedelta(days=r)).replace(day=1) - stats[v] = ev_published. \ - annotate(day=F(v)). \ - filter(Q(day__lte=last[v]) & Q(day__gte=first[v])).values('day').annotate(total=Count('day')).order_by('day') + stats[v] = ( + ev_published.annotate(day=F(v)) + .filter(Q(day__lte=last[v]) & Q(day__gte=first[v])) + .values("day") + .annotate(total=Count("day")) + .order_by("day") + ) + nb_by_city = ( + ev_published.annotate(city=F("exact_location__city")) + .filter(city__isnull=False) + .values("city") + .annotate(total=Count("city")) + .order_by("-total") + ) - nb_by_city = ev_published.annotate(city=F('exact_location__city')).filter(city__isnull=False).values('city').annotate(total=Count('city')).order_by('-total') - - - context = {"stats_by_startday": stats["start_day"], "stats_by_creation": stats["created_date__date"], - "first_by_startday": first["start_day"], "last_by_startday": last["start_day"], - "first_by_creation": first["created_date__date"], "last_by_creation": last["created_date__date"], - "nb_by_city": nb_by_city - } + context = { + "stats_by_startday": stats["start_day"], + "stats_by_creation": stats["created_date__date"], + "first_by_startday": first["start_day"], + "last_by_startday": last["start_day"], + "first_by_creation": first["created_date__date"], + "last_by_creation": last["created_date__date"], + "nb_by_city": nb_by_city, + } return render(request, "agenda_culturel/statistics.html", context) @@ -2341,18 +2654,29 @@ def tag_list(request): d_objects = dict() for o in objects: d_objects[o.name] = o - - tags = [t | {'obj': d_objects[t["tag"]]} if t["tag"] in d_objects else t for t in tags] - tags += [{'obj': o, "tag": o.name, "count": 0} for o in objects if o.name not in r_tags] - context = {"tags": sorted(tags, key=lambda x: emoji.demojize(remove_accents(x["tag"]).lower(), delimiters=('000', '')))} + tags = [ + t | {"obj": d_objects[t["tag"]]} if t["tag"] in d_objects else t for t in tags + ] + tags += [ + {"obj": o, "tag": o.name, "count": 0} for o in objects if o.name not in r_tags + ] + + context = { + "tags": sorted( + tags, + key=lambda x: emoji.demojize( + remove_accents(x["tag"]).lower(), delimiters=("000", "") + ), + ) + } return render(request, "agenda_culturel/tags.html", context) + @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.change_tag") def rename_tag(request, t): form = TagRenameForm(name=t) - force = False if request.method == "POST": form = TagRenameForm(request.POST, name=t) @@ -2360,38 +2684,50 @@ def rename_tag(request, t): save = True if form.cleaned_data["name"] == t: messages.warning( - request, - _( - "You have not modified the tag name." - ), - ) + request, + _("You have not modified the tag name."), + ) save = False elif not form.is_force(): - if Event.objects.filter(tags__contains=[form.cleaned_data["name"]]).count() > 0: + if ( + Event.objects.filter( + tags__contains=[form.cleaned_data["name"]] + ).count() + > 0 + ): if Tag.objects.filter(name=form.cleaned_data["name"]): messages.warning( request, - (_( - "This tag {} is already in use, and is described by different information from the current tag. You can force renaming by checking the corresponding option. The information associated with tag {} will be deleted, and all events associated with tag {} will be associated with tag {}." - )).format(form.cleaned_data["name"], t, t, form.cleaned_data["name"]), + ( + _( + "This tag {} is already in use, and is described by different information from the current tag. You can force renaming by checking the corresponding option. The information associated with tag {} will be deleted, and all events associated with tag {} will be associated with tag {}." + ) + ).format( + form.cleaned_data["name"], + t, + t, + form.cleaned_data["name"], + ), ) else: messages.warning( request, - (_( - "This tag {} is already in use. You can force renaming by checking the corresponding option." - )).format(form.cleaned_data["name"]), + ( + _( + "This tag {} is already in use. You can force renaming by checking the corresponding option." + ) + ).format(form.cleaned_data["name"]), ) save = False form = TagRenameForm(request.POST, name=t, force=True) - + if save: # find all matching events and update them events = Event.objects.filter(tags__contains=[t]) new_name = form.cleaned_data["name"] for e in events: e.tags = [te for te in e.tags if te != t] - if not new_name in e.tags: + if new_name not in e.tags: e.tags += [new_name] Event.objects.bulk_update(events, fields=["tags"]) @@ -2399,7 +2735,7 @@ def rename_tag(request, t): rimports = RecurrentImport.objects.filter(defaultTags__contains=[t]) for ri in rimports: ri.tags = [te for te in ri.defaultTags if te != t] - if not new_name in ri.tags: + if new_name not in ri.tags: ri.tags += [new_name] RecurrentImport.objects.bulk_update(rimports, fields=["defaultTags"]) @@ -2409,28 +2745,32 @@ def rename_tag(request, t): tag_object.name = new_name tag_object.save() - messages.success( request, - (_( - "The tag {} has been successfully renamed to {}." - )).format(t, form.cleaned_data["name"]), + (_("The tag {} has been successfully renamed to {}.")).format( + t, form.cleaned_data["name"] + ), + ) + return HttpResponseRedirect( + reverse_lazy("view_tag", kwargs={"t": form.cleaned_data["name"]}) ) - return HttpResponseRedirect(reverse_lazy("view_tag", kwargs={"t": form.cleaned_data["name"]})) nb = Event.objects.filter(tags__contains=[t]).count() return render( - request, "agenda_culturel/tag_rename_form.html", context={"form": form, "tag": t, "nb": nb} + request, + "agenda_culturel/tag_rename_form.html", + context={"form": form, "tag": t, "nb": nb}, ) + @login_required(login_url="/accounts/login/") @permission_required("agenda_culturel.delete_tag") def delete_tag(request, t): respage = reverse_lazy("view_all_tags") if request.method == "POST": - + # remove tag from events events = Event.objects.filter(tags__contains=[t]) for e in events: @@ -2450,9 +2790,7 @@ def delete_tag(request, t): messages.success( request, - (_( - "The tag {} has been successfully deleted." - )).format(t), + (_("The tag {} has been successfully deleted.")).format(t), ) return HttpResponseRedirect(respage) else: @@ -2478,4 +2816,4 @@ def clear_cache(request): return render( request, "agenda_culturel/clear_cache.html", - ) \ No newline at end of file + ) diff --git a/src/scripts/create_categories.py b/src/scripts/create_categories.py index 09f4180..9dd3f5a 100644 --- a/src/scripts/create_categories.py +++ b/src/scripts/create_categories.py @@ -21,7 +21,5 @@ def run(): if len(Category.objects.all()) <= 1: print("On créée des catégories") for c in categories: - cat = Category( - name=c[0] - ) + cat = Category(name=c[0]) cat.save() diff --git a/src/scripts/create_reference_locations.py b/src/scripts/create_reference_locations.py index 2e777a5..0346ac4 100644 --- a/src/scripts/create_reference_locations.py +++ b/src/scripts/create_reference_locations.py @@ -1,23 +1,28 @@ -import json, os +import json +import os from django.contrib.gis.geos import Point from agenda_culturel.models import ReferenceLocation + def run(): input_file = os.path.dirname(__file__) + os.path.sep + "communes.json" data = [] - with open(input_file, 'r') as file: + with open(input_file, "r") as file: data = json.load(file) - + # remove all locations ReferenceLocation.objects.all().delete() - objs = [ReferenceLocation(location=Point(c["geo_point_2d"]["lon"], c["geo_point_2d"]["lat"]), - name=c["com_name"], - main=c["main"] if "main" in c else 0, - suggested_distance=c["suggested"] if "suggested" in c else None) for c in data] + objs = [ + ReferenceLocation( + location=Point(c["geo_point_2d"]["lon"], c["geo_point_2d"]["lat"]), + name=c["com_name"], + main=c["main"] if "main" in c else 0, + suggested_distance=c["suggested"] if "suggested" in c else None, + ) + for c in data + ] objs = ReferenceLocation.objects.bulk_create(objs, ignore_conflicts=True) - - diff --git a/src/scripts/profiling.py b/src/scripts/profiling.py index 601d9c9..137b9ed 100644 --- a/src/scripts/profiling.py +++ b/src/scripts/profiling.py @@ -1,7 +1,6 @@ from django.test import RequestFactory import django.urls -from django.contrib.auth.models import AnonymousUser -from django.http import HttpRequest +from django.contrib.auth.models import AnonymousUser import cProfile # inspiré de https://enix.io/fr/blog/django-performance-profiler/ @@ -26,8 +25,8 @@ cProfile.runctx( None, locals(), sort="tottime", - filename='logs' + filename="logs", ) -# puis visualiser avec -# snakeviz src/logs +# puis visualiser avec +# snakeviz src/logs diff --git a/src/scripts/set_pause.py b/src/scripts/set_pause.py index 73e4680..8a8d80d 100644 --- a/src/scripts/set_pause.py +++ b/src/scripts/set_pause.py @@ -1,9 +1,10 @@ from agenda_culturel.models import RecurrentImport + def run(): rimports = RecurrentImport.objects.filter(processor="Facebook events").all() for r in rimports: r.downloader = "chromium (pause)" - + RecurrentImport.objects.bulk_update(rimports, fields=["downloader"])