diff --git a/src/agenda_culturel/admin.py b/src/agenda_culturel/admin.py
index 8c5d32a..56941ca 100644
--- a/src/agenda_culturel/admin.py
+++ b/src/agenda_culturel/admin.py
@@ -11,7 +11,7 @@ from .models import (
Place,
Message,
ReferenceLocation,
- Organisation
+ Organisation,
)
from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
diff --git a/src/agenda_culturel/calendar.py b/src/agenda_culturel/calendar.py
index 1804b52..8836983 100644
--- a/src/agenda_culturel/calendar.py
+++ b/src/agenda_culturel/calendar.py
@@ -8,10 +8,10 @@ from django.http import Http404
from django.db.models import CharField
from django.db.models.functions import Lower
+import logging
CharField.register_lookup(Lower)
-import logging
logger = logging.getLogger(__name__)
@@ -43,7 +43,7 @@ class DayInCalendar:
self.events_by_category = {}
self.time_intervals = None
- self.id = d.strftime('%Y-%m-%d')
+ self.id = d.strftime("%Y-%m-%d")
def is_in_past(self):
return self.in_past
@@ -90,6 +90,7 @@ class DayInCalendar:
def _add_event_internal(self, event):
from .models import Category
from copy import copy
+
# copy event
local_event = copy(event)
@@ -115,9 +116,9 @@ class DayInCalendar:
def filter_events(self):
self.events.sort(
- key=lambda e: DayInCalendar.midnight
- if e.start_time is None
- else e.start_time
+ key=lambda e: (
+ DayInCalendar.midnight if e.start_time is None else e.start_time
+ )
)
self.today_night = False
if self.is_today():
@@ -126,8 +127,10 @@ class DayInCalendar:
nday = now.date()
ntime = now.time()
found = False
- for idx,e in enumerate(self.events):
- if (nday < e.start_day) or (nday == e.start_day and e.start_time and ntime <= e.start_time):
+ for idx, e in enumerate(self.events):
+ if (nday < e.start_day) or (
+ nday == e.start_day and e.start_time and ntime <= e.start_time
+ ):
self.events[idx].is_first_after_now = True
found = True
break
@@ -139,17 +142,32 @@ class DayInCalendar:
def events_by_category_ordered(self):
from .models import Category
+
if DayInCalendar.cats is None:
- DayInCalendar.cats = Category.objects.order_by('position')
+ DayInCalendar.cats = Category.objects.order_by("position")
result = []
for c in DayInCalendar.cats:
if c.name in self.events_by_category:
result.append((c.name, self.events_by_category[c.name]))
return result
- def build_time_intervals(self, all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers):
- self.time_intervals = [IntervalInDay(self.date, i, n[0], n[1]) for i, n in
- enumerate(zip([all_day_name] + interval_names, [all_day_short_name] + interval_short_names))]
+ def build_time_intervals(
+ self,
+ all_day_name,
+ all_day_short_name,
+ interval_names,
+ interval_short_names,
+ interval_markers,
+ ):
+ self.time_intervals = [
+ IntervalInDay(self.date, i, n[0], n[1])
+ for i, n in enumerate(
+ zip(
+ [all_day_name] + interval_names,
+ [all_day_short_name] + interval_short_names,
+ )
+ )
+ ]
for e in self.events:
if e.start_time is None:
@@ -168,20 +186,49 @@ class DayInCalendar:
def get_time_intervals(self):
if self.time_intervals is None:
if self.is_today():
- all_day_name = _('All day today')
- interval_names = [_('This morning'), _('This noon'), _('This afternoon'), _('This evening')]
+ all_day_name = _("All day today")
+ interval_names = [
+ _("This morning"),
+ _("This noon"),
+ _("This afternoon"),
+ _("This evening"),
+ ]
elif self.is_tomorrow():
name = _("Tomorrow")
- all_day_name = _('All day tomorrow')
- interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name]
+ all_day_name = _("All day tomorrow")
+ interval_names = [
+ _("%s morning") % name,
+ _("%s noon") % name,
+ _("%s afternoon") % name,
+ _("%s evening") % name,
+ ]
else:
name = _date(self.date, "l")
- all_day_name = _('All day %s') % name
- interval_names = [_('%s morning') % name, _('%s noon') % name, _('%s afternoon') % name, _('%s evening') % name]
- all_day_short_name = _('All day')
- interval_short_names = [_('Morning'), _('Noon'), _('Afternoon'), _('Evening')]
- interval_markers = [datetime.combine(self.date, time(h, m)) for h, m in [(11, 30), (13, 0), (18, 0)]]
- self.build_time_intervals(all_day_name, all_day_short_name, interval_names, interval_short_names, interval_markers)
+ all_day_name = _("All day %s") % name
+ interval_names = [
+ _("%s morning") % name,
+ _("%s noon") % name,
+ _("%s afternoon") % name,
+ _("%s evening") % name,
+ ]
+ all_day_short_name = _("All day")
+ interval_short_names = [
+ _("Morning"),
+ _("Noon"),
+ _("Afternoon"),
+ _("Evening"),
+ ]
+ interval_markers = [
+ datetime.combine(self.date, time(h, m))
+ for h, m in [(11, 30), (13, 0), (18, 0)]
+ ]
+ self.build_time_intervals(
+ all_day_name,
+ all_day_short_name,
+ interval_names,
+ interval_short_names,
+ interval_markers,
+ )
return self.time_intervals
@@ -192,10 +239,13 @@ class IntervalInDay(DayInCalendar):
self.name = name
self.short_name = short_name
super().__init__(d)
- self.id = self.id + '-' + str(id)
+ self.id = self.id + "-" + str(id)
+
class CalendarList:
- def __init__(self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None):
+ def __init__(
+ self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None
+ ):
self.firstdate = firstdate
self.lastdate = lastdate
self.now = date.today()
@@ -231,7 +281,7 @@ class CalendarList:
def get_calendar_days(self):
if self.calendar_days is None:
self.build_internal()
-
+
return self.calendar_days
def today_in_calendar(self):
@@ -253,29 +303,55 @@ class CalendarList:
if self.ignore_dup:
qs = qs.exclude(other_versions=self.ignore_dup)
- startdatetime = timezone.make_aware(datetime.combine(self.c_firstdate, time.min), timezone.get_default_timezone())
- lastdatetime = timezone.make_aware(datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone())
- qs = qs.filter(
- (Q(recurrences__isnull=False) &
- (Q(recurrence_dtend__isnull=True) & Q(recurrence_dtstart__isnull=False) & Q(recurrence_dtstart__lte=lastdatetime))
+ startdatetime = timezone.make_aware(
+ datetime.combine(self.c_firstdate, time.min),
+ timezone.get_default_timezone(),
+ )
+ lastdatetime = timezone.make_aware(
+ datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone()
+ )
+ qs = (
+ qs.filter(
+ (
+ Q(recurrences__isnull=False)
+ & (
+ Q(recurrence_dtend__isnull=True)
+ & Q(recurrence_dtstart__isnull=False)
+ & Q(recurrence_dtstart__lte=lastdatetime)
+ )
+ | (
+ Q(recurrence_dtend__isnull=False)
+ & ~(
+ Q(recurrence_dtstart__gt=lastdatetime)
+ | Q(recurrence_dtend__lt=startdatetime)
+ )
+ )
+ )
| (
- Q(recurrence_dtend__isnull=False)
- & ~(
- Q(recurrence_dtstart__gt=lastdatetime)
- | Q(recurrence_dtend__lt=startdatetime)
+ Q(
+ start_day__lte=self.c_lastdate
+ ) # start before the end of the desired period
+ & (
+ (
+ Q(end_day__isnull=True) & Q(start_day__gte=self.c_firstdate)
+ ) # end after the begining of desired period
+ | (Q(end_day__isnull=False) & Q(end_day__gte=self.c_firstdate))
)
)
)
- | (Q(start_day__lte=self.c_lastdate) & # start before the end of the desired period
- ((Q(end_day__isnull=True) & Q(start_day__gte=self.c_firstdate)) # end after the begining of desired period
- | (Q(end_day__isnull=False) & Q(end_day__gte=self.c_firstdate))))
- ).filter(
- Q(other_versions__isnull=True) |
- Q(other_versions__representative=F('pk')) |
- Q(other_versions__representative__isnull=True)
- ).order_by("start_time", "title__unaccent__lower")
-
- qs = qs.select_related("category").select_related("other_versions").select_related("other_versions__representative")
+ .filter(
+ Q(other_versions__isnull=True)
+ | Q(other_versions__representative=F("pk"))
+ | Q(other_versions__representative__isnull=True)
+ )
+ .order_by("start_time", "title__unaccent__lower")
+ )
+
+ qs = (
+ qs.select_related("category")
+ .select_related("other_versions")
+ .select_related("other_versions__representative")
+ )
self.events = qs
firstdate = datetime.fromordinal(self.c_firstdate.toordinal())
@@ -292,12 +368,16 @@ class CalendarList:
if e.start_day >= self.firstdate and e.start_day <= self.lastdate:
self.calendar_days[e.start_day.__str__()].add_event(e)
else:
- for d in daterange(max(e.start_day, self.firstdate), min(e.end_day, self.lastdate)):
+ for d in daterange(
+ max(e.start_day, self.firstdate), min(e.end_day, self.lastdate)
+ ):
self.calendar_days[d.__str__()].add_event(e)
else:
for e_rec in e.get_recurrences_between(firstdate, lastdate):
end = e_rec.start_day if e_rec.end_day is None else e_rec.end_day
- for d in daterange(max(e_rec.start_day, self.firstdate), min(end, self.lastdate)):
+ for d in daterange(
+ max(e_rec.start_day, self.firstdate), min(end, self.lastdate)
+ ):
self.calendar_days[d.__str__()].add_event(e_rec)
def create_calendar_days(self):
@@ -337,7 +417,10 @@ class CalendarList:
def export_to_ics(self, request):
from .models import Event
- events = [event for day in self.get_calendar_days().values() for event in day.events]
+
+ events = [
+ event for day in self.get_calendar_days().values() for event in day.events
+ ]
return Event.export_to_ics(events, request)
@@ -370,7 +453,7 @@ class CalendarWeek(CalendarList):
try:
first = date.fromisocalendar(self.year, self.week, 1)
last = date.fromisocalendar(self.year, self.week, 7)
- except:
+ except Exception:
raise Http404()
super().__init__(first, last, filter, qs)
diff --git a/src/agenda_culturel/celery.py b/src/agenda_culturel/celery.py
index acc1bb6..bbd426e 100644
--- a/src/agenda_culturel/celery.py
+++ b/src/agenda_culturel/celery.py
@@ -11,12 +11,13 @@ from celery.signals import worker_ready
from contextlib import contextmanager
-from .import_tasks.downloader import *
-from .import_tasks.extractor import *
-from .import_tasks.importer import *
+from .import_tasks.extractor import Extractor
+from .import_tasks.importer import URL2Events
+from .import_tasks.downloader import SimpleDownloader, ChromiumHeadlessDownloader
from .import_tasks.custom_extractors import *
from .import_tasks.generic_extractors import *
+from django.core.cache import cache
# Set the default Django settings module for the 'celery' program.
APP_ENV = os.getenv("APP_ENV", "dev")
@@ -24,7 +25,6 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", f"agenda_culturel.settings.{APP_
app = Celery("agenda_culturel")
-from django.core.cache import cache
logger = get_task_logger(__name__)
@@ -40,6 +40,7 @@ app.autodiscover_tasks()
LOCK_EXPIRE = 60 * 10 # Lock expires in 10 minutes
+
@contextmanager
def memcache_chromium_lock(oid):
lock_id = "chromium-lock"
@@ -58,6 +59,7 @@ def memcache_chromium_lock(oid):
# also don't release the lock if we didn't acquire it
cache.delete(lock_id)
+
@contextmanager
def free_memecache_chromium_lock():
lock_id = "chromium-lock"
@@ -105,7 +107,6 @@ def import_events_from_json(self, json):
close_import_task(self.request.id, success, error_message, importer)
-
class ChromiumTask(Task):
_chm = None
@@ -116,20 +117,18 @@ class ChromiumTask(Task):
return self._chm
def restartDownloader(self):
- logger.warning('Restart selenium')
- if not self._chm is None:
+ logger.info("Restart selenium")
+ if self._chm is not None:
del self._chm
self._chm = ChromiumHeadlessDownloader()
-
def run_recurrent_import_internal(rimport, downloader, req_id):
from agenda_culturel.models import RecurrentImport, BatchImportation
from .db_importer import DBImporterEvents
logger.info("Run recurrent import: {}".format(req_id))
-
# create a batch importation
importation = BatchImportation(recurrentImport=rimport, celery_id=req_id)
# save batch importation
@@ -138,7 +137,6 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
# create an importer
importer = DBImporterEvents(req_id)
-
if rimport.processor == RecurrentImport.PROCESSOR.ICAL:
extractor = ical.ICALExtractor()
elif rimport.processor == RecurrentImport.PROCESSOR.ICALNOBUSY:
@@ -192,21 +190,28 @@ def run_recurrent_import_internal(rimport, downloader, req_id):
location = rimport.defaultLocation
tags = rimport.defaultTags
published = rimport.defaultPublished
- organisers = [] if rimport.defaultOrganiser is None else [rimport.defaultOrganiser.pk]
+ organisers = (
+ [] if rimport.defaultOrganiser is None else [rimport.defaultOrganiser.pk]
+ )
try:
# get events from website
events = u2e.process(
url,
browsable_url,
- default_values={"category": category, "location": location, "tags": tags, "organisers": organisers},
+ default_values={
+ "category": category,
+ "location": location,
+ "tags": tags,
+ "organisers": organisers,
+ },
published=published,
)
# force location if required
if rimport.forceLocation and location:
- for i, e in enumerate(events['events']):
- events['events'][i]["location"] = location
+ for i, e in enumerate(events["events"]):
+ events["events"][i]["location"] = location
# convert it to json
json_events = json.dumps(events, default=str)
@@ -249,10 +254,15 @@ def run_recurrent_import(self, pklist):
# only one thread using Chromium can run at a time,
# to prevent from errors (including strange Facebook errors)
- if rimport.downloader in [RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS, RecurrentImport.DOWNLOADER.CHROMIUMHEADLESSPAUSE]:
+ if rimport.downloader in [
+ RecurrentImport.DOWNLOADER.CHROMIUMHEADLESS,
+ RecurrentImport.DOWNLOADER.CHROMIUMHEADLESSPAUSE,
+ ]:
with memcache_chromium_lock(self.app.oid) as acquired:
if acquired:
- valid = run_recurrent_import_internal(rimport, downloader, self.request.id)
+ valid = run_recurrent_import_internal(
+ rimport, downloader, self.request.id
+ )
if not valid:
self.restartDownloader()
return pklist[1:] if is_list else True
@@ -272,10 +282,14 @@ def run_recurrent_import(self, pklist):
def run_recurrent_imports_from_list(pklist):
-
- tasks = chain(run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s() for i in range(len(pklist)))
+
+ tasks = chain(
+ run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s()
+ for i in range(len(pklist))
+ )
tasks.delay()
+
@app.task(bind=True)
def daily_imports(self):
from agenda_culturel.models import RecurrentImport
@@ -288,29 +302,35 @@ def daily_imports(self):
run_recurrent_imports_from_list([imp.pk for imp in imports])
-SCREENSHOT_FILE = settings.MEDIA_ROOT + '/screenshot.png'
+SCREENSHOT_FILE = settings.MEDIA_ROOT + "/screenshot.png"
+
@app.task(bind=True)
def screenshot(self):
downloader = ChromiumHeadlessDownloader(noimage=False)
downloader.screenshot("https://pommesdelune.fr", SCREENSHOT_FILE)
+
@worker_ready.connect
def at_start(sender, **k):
logger.info("Worker is ready")
- # create screenshot
+ # create screenshot
if not os.path.isfile(SCREENSHOT_FILE):
logger.info("Init screenshot file")
with sender.app.connection() as conn:
- sender.app.send_task('agenda_culturel.celery.screenshot', None, connection=conn)
+ sender.app.send_task(
+ "agenda_culturel.celery.screenshot", None, connection=conn
+ )
else:
logger.info("Screenshot file already exists")
- # cancel running tasks
+ # cancel running tasks
from agenda_culturel.models import BatchImportation
- logger.info("Cancel running importation tasks")
- running_tasks = BatchImportation.objects.filter(status=BatchImportation.STATUS.RUNNING).update(status=BatchImportation.STATUS.CANCELED)
+ logger.info("Cancel running importation tasks")
+ BatchImportation.objects.filter(status=BatchImportation.STATUS.RUNNING).update(
+ status=BatchImportation.STATUS.CANCELED
+ )
@app.task(bind=True)
@@ -319,7 +339,9 @@ def run_all_recurrent_imports(self, only_fb=False):
logger.info("Run all imports")
if only_fb:
- imports = RecurrentImport.objects.filter(processor=RecurrentImport.PROCESSOR.FBEVENTS).order_by("pk")
+ imports = RecurrentImport.objects.filter(
+ processor=RecurrentImport.PROCESSOR.FBEVENTS
+ ).order_by("pk")
else:
imports = RecurrentImport.objects.all().order_by("pk")
@@ -334,7 +356,14 @@ def run_all_recurrent_imports_failed(self):
imports = RecurrentImport.objects.all().order_by("pk")
imports = [(imp.pk, imp.last_import()) for imp in imports]
- run_recurrent_imports_from_list([imp[0] for imp in imports if (not imp[1] is None) and imp[1].status == BatchImportation.STATUS.FAILED])
+ run_recurrent_imports_from_list(
+ [
+ imp[0]
+ for imp in imports
+ if (imp[1] is not None) and imp[1].status == BatchImportation.STATUS.FAILED
+ ]
+ )
+
@app.task(bind=True)
def run_all_recurrent_imports_canceled(self):
@@ -344,7 +373,14 @@ def run_all_recurrent_imports_canceled(self):
imports = RecurrentImport.objects.all().order_by("pk")
imports = [(imp.pk, imp.last_import()) for imp in imports]
- run_recurrent_imports_from_list([imp[0] for imp in imports if (not imp[1] is None) and imp[1].status == BatchImportation.STATUS.CANCELED])
+ run_recurrent_imports_from_list(
+ [
+ imp[0]
+ for imp in imports
+ if (imp[1] is not None)
+ and imp[1].status == BatchImportation.STATUS.CANCELED
+ ]
+ )
@app.task(bind=True)
@@ -358,11 +394,21 @@ def weekly_imports(self):
run_recurrent_imports_from_list([imp.pk for imp in imports])
+
@app.task(base=ChromiumTask, bind=True)
-def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id=None, email=None, comments=None):
+def import_events_from_url(
+ self,
+ urls,
+ cat=None,
+ tags=None,
+ force=False,
+ user_id=None,
+ email=None,
+ comments=None,
+):
from .db_importer import DBImporterEvents
- from agenda_culturel.models import RecurrentImport, BatchImportation
- from agenda_culturel.models import Event, Category
+ from agenda_culturel.models import BatchImportation
+ from agenda_culturel.models import Event
if isinstance(urls, list):
url = urls[0]
@@ -374,9 +420,9 @@ def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id
with memcache_chromium_lock(self.app.oid) as acquired:
if acquired:
-
- logger.info("URL import: {}".format(self.request.id) + " force " + str(force))
-
+ logger.info(
+ "URL import: {}".format(self.request.id) + " force " + str(force)
+ )
# clean url
url = Extractor.clean_url(url)
@@ -390,7 +436,9 @@ def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id
importer = DBImporterEvents(self.request.id)
# create a batch importation
- importation = BatchImportation(url_source=url, celery_id=self.request.id)
+ importation = BatchImportation(
+ url_source=url, celery_id=self.request.id
+ )
# save batch importation
importation.save()
@@ -409,40 +457,48 @@ def import_events_from_url(self, urls, cat=None, tags=None, force=False, user_id
values["comments"] = comments
# get event
- events = u2e.process(
- url, published=False, default_values=values
- )
+ events = u2e.process(url, published=False, default_values=values)
if events:
# convert it to json
json_events = json.dumps(events, default=str)
# import events (from json)
- success, error_message = importer.import_events(json_events, user_id)
+ success, error_message = importer.import_events(
+ json_events, user_id
+ )
# finally, close task
- close_import_task(self.request.id, success, error_message, importer)
+ close_import_task(
+ self.request.id, success, error_message, importer
+ )
else:
- close_import_task(self.request.id, False, "Cannot find any event", importer)
+ close_import_task(
+ self.request.id, False, "Cannot find any event", importer
+ )
except Exception as e:
logger.error(e)
close_import_task(self.request.id, False, e, importer)
return urls[1:] if is_list else True
-
+
# if chromium is locked, we wait 30 seconds before retrying
raise self.retry(countdown=30)
@app.task(base=ChromiumTask, bind=True)
-def import_events_from_urls(self, urls_cat_tags, user_id=None, email=None, comments=None):
+def import_events_from_urls(
+ self, urls_cat_tags, user_id=None, email=None, comments=None
+):
for ucat in urls_cat_tags:
if ucat is not None:
url = ucat[0]
cat = ucat[1]
tags = ucat[2]
- import_events_from_url.delay(url, cat, tags, user_id=user_id, email=email, comments=comments)
+ import_events_from_url.delay(
+ url, cat, tags, user_id=user_id, email=email, comments=comments
+ )
@app.task(base=ChromiumTask, bind=True)
@@ -455,20 +511,35 @@ def update_orphan_pure_import_events(self):
srcs = RecurrentImport.objects.all().values_list("source")
today = date.today()
# get all events in future with a source and not related to a recurrent import
- urls = Event.objects.filter(Q(start_day__gte=today)).filter(
- (Q(import_sources__isnull=False) &
- (Q(modified_date__isnull=True) |
- Q(modified_date__lte=F('imported_date'))))
- & ~Q(import_sources__overlap=srcs)).values_list("import_sources", flat=True)
+ urls = (
+ Event.objects.filter(Q(start_day__gte=today))
+ .filter(
+ (
+ Q(import_sources__isnull=False)
+ & (
+ Q(modified_date__isnull=True)
+ | Q(modified_date__lte=F("imported_date"))
+ )
+ )
+ & ~Q(import_sources__overlap=srcs)
+ )
+ .values_list("import_sources", flat=True)
+ )
# get urls
urls = [url_l[0] for url_l in urls if len(url_l) > 0]
# run tasks as a chain
- tasks = chain(import_events_from_url.s(urls, force=True) if i == 0 else import_events_from_url.s(force=True) for i in range(len(urls)))
+ tasks = chain(
+ (
+ import_events_from_url.s(urls, force=True)
+ if i == 0
+ else import_events_from_url.s(force=True)
+ )
+ for i in range(len(urls))
+ )
tasks.delay()
-
app.conf.beat_schedule = {
"daily_orphans_update": {
"task": "agenda_culturel.celery.update_orphan_pure_import_events",
diff --git a/src/agenda_culturel/db_importer.py b/src/agenda_culturel/db_importer.py
index 6c69b55..405a25d 100644
--- a/src/agenda_culturel/db_importer.py
+++ b/src/agenda_culturel/db_importer.py
@@ -41,7 +41,7 @@ class DBImporterEvents:
try:
structure = json.loads(json_structure)
- except:
+ except Exception:
return (False, "JSON file is not correctly structured")
if len(structure) == 0:
@@ -73,7 +73,9 @@ class DBImporterEvents:
if not self.load_event(event):
return (False, self.error_message)
else:
- logger.warning("Event in the past, will not be imported: {}".format(event))
+ logger.warning(
+ "Event in the past, will not be imported: {}".format(event)
+ )
# finally save the loaded events in database
self.save_imported()
@@ -98,7 +100,9 @@ class DBImporterEvents:
def save_imported(self):
self.db_event_objects, self.nb_updated, self.nb_removed = Event.import_events(
- self.event_objects, remove_missing_from_source=self.url, user_id=self.user_id
+ self.event_objects,
+ remove_missing_from_source=self.url,
+ user_id=self.user_id,
)
def is_valid_event_structure(self, event):
@@ -116,7 +120,7 @@ class DBImporterEvents:
def load_event(self, event):
if self.is_valid_event_structure(event):
- logger.warning(
+ logger.info(
"Valid event: {} {}".format(event["last_modified"], event["title"])
)
event_obj = Event.from_structure(event, self.url)
diff --git a/src/agenda_culturel/filters.py b/src/agenda_culturel/filters.py
index 1418814..b8a940b 100644
--- a/src/agenda_culturel/filters.py
+++ b/src/agenda_culturel/filters.py
@@ -2,43 +2,13 @@ import django_filters
from django.utils.translation import gettext_lazy as _
from django import forms
from django.contrib.postgres.search import SearchQuery, SearchHeadline
-from django.db.models import Count, Q, F
+from django.db.models import Q, F
from datetime import date, timedelta
from urllib.parse import urlparse, parse_qs, urlencode
from django.http import QueryDict
from django.contrib.gis.measure import D
-from django.forms import (
- ModelForm,
- ValidationError,
- TextInput,
- Form,
- URLField,
- MultipleHiddenInput,
- Textarea,
- CharField,
- ChoiceField,
- RadioSelect,
- MultipleChoiceField,
- BooleanField,
- HiddenInput,
- ModelChoiceField,
-)
-
-from .forms import (
- URLSubmissionForm,
- EventForm,
- BatchImportationForm,
- FixDuplicates,
- SelectEventInList,
- MergeDuplicates,
- RecurrentImportForm,
- CategorisationRuleImportForm,
- CategorisationForm,
- EventAddPlaceForm,
- PlaceForm,
-)
from .models import (
ReferenceLocation,
@@ -47,7 +17,7 @@ from .models import (
Event,
Category,
Message,
- DuplicatedEvents
+ DuplicatedEvents,
)
@@ -63,8 +33,10 @@ class EventFilter(django_filters.FilterSet):
label="À proximité de",
method="no_filter",
empty_label=_("Select a location"),
- to_field_name='slug',
- queryset=ReferenceLocation.objects.filter(main__gt=0).order_by("-main", "name__unaccent")
+ to_field_name="slug",
+ queryset=ReferenceLocation.objects.filter(main__gt=0).order_by(
+ "-main", "name__unaccent"
+ ),
)
radius = django_filters.ChoiceFilter(
@@ -72,7 +44,7 @@ class EventFilter(django_filters.FilterSet):
method="no_filter",
choices=[(x, str(x) + " km") for x in DISTANCE_CHOICES],
null_label=None,
- empty_label=None
+ empty_label=None,
)
exclude_tags = django_filters.MultipleChoiceFilter(
@@ -114,14 +86,20 @@ class EventFilter(django_filters.FilterSet):
# urls were using pk, now we moved to slug
if len(args) > 0 and "position" in args[0] and args[0]["position"].isdigit():
args[0]._mutable = True
- el = ReferenceLocation.objects.filter(pk=int(args[0]["position"])).values("slug").first()
+ el = (
+ ReferenceLocation.objects.filter(pk=int(args[0]["position"]))
+ .values("slug")
+ .first()
+ )
args[0]["position"] = None if el is None else el["slug"]
args[0]._mutable = False
super().__init__(*args, **kwargs)
if not kwargs["request"].user.is_authenticated:
self.form.fields.pop("status")
- self.form.fields["exclude_tags"].choices = Tag.get_tag_groups(exclude=True, nb_suggestions=0)
+ self.form.fields["exclude_tags"].choices = Tag.get_tag_groups(
+ exclude=True, nb_suggestions=0
+ )
self.form.fields["tags"].choices = Tag.get_tag_groups(include=True)
def has_category_parameters(self):
@@ -135,10 +113,10 @@ class EventFilter(django_filters.FilterSet):
else:
parsed_url = urlparse(url)
params = parse_qs(parsed_url.query)
- if len(params['category']) == 0:
+ if len(params["category"]) == 0:
return url
else:
- cat_id = params['category'][0]
+ cat_id = params["category"][0]
del params["category"]
url = parsed_url._replace(query=urlencode(params, doseq=True)).geturl()
if cat_id.isdigit():
@@ -150,7 +128,6 @@ class EventFilter(django_filters.FilterSet):
else:
return url
-
def filter_recurrences(self, queryset, name, value):
# construct the full lookup expression
lookup = "__".join([name, "isnull"])
@@ -162,7 +139,10 @@ class EventFilter(django_filters.FilterSet):
@property
def qs(self):
parent = super().qs
- if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None:
+ if (
+ self.get_cleaned_data("position") is None
+ or self.get_cleaned_data("radius") is None
+ ):
return parent
d = self.get_cleaned_data("radius")
p = self.get_cleaned_data("position")
@@ -177,7 +157,9 @@ class EventFilter(django_filters.FilterSet):
p = p.location
- return parent.exclude(exact_location=False).filter(exact_location__location__distance_lt=(p, D(km=d)))
+ return parent.exclude(exact_location=False).filter(
+ exact_location__location__distance_lt=(p, D(km=d))
+ )
def has_location(self):
d = self.get_cleaned_data("radius")
@@ -229,9 +211,13 @@ class EventFilter(django_filters.FilterSet):
def get_radius(self):
return self.get_cleaned_data("radius")
- def to_str(self, prefix=''):
+ def to_str(self, prefix=""):
self.form.full_clean()
- result = ' '.join([t for t in self.get_tags()] + ["~" + t for t in self.get_exclude_tags()] + [str(self.get_position()), str(self.get_radius())])
+ result = " ".join(
+ [t for t in self.get_tags()]
+ + ["~" + t for t in self.get_exclude_tags()]
+ + [str(self.get_position()), str(self.get_radius())]
+ )
if len(result) > 0:
result = prefix + result
return result
@@ -259,20 +245,20 @@ class EventFilter(django_filters.FilterSet):
if self.request.user.is_authenticated:
if (
len(self.get_cleaned_data("status")) != 1
- or
- self.get_cleaned_data("status")[0] != Event.STATUS.PUBLISHED
+ or self.get_cleaned_data("status")[0] != Event.STATUS.PUBLISHED
):
return True
else:
- if (
- len(self.get_cleaned_data("status")) != 0
- ):
+ if len(self.get_cleaned_data("status")) != 0:
return True
return (
len(self.get_cleaned_data("tags")) != 0
or len(self.get_cleaned_data("exclude_tags")) != 0
or len(self.get_cleaned_data("recurrences")) != 0
- or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None))
+ or (
+ (self.get_cleaned_data("position") is not None)
+ and (self.get_cleaned_data("radius") is not None)
+ )
)
def is_active(self, only_categories=False):
@@ -284,16 +270,21 @@ class EventFilter(django_filters.FilterSet):
or len(self.get_cleaned_data("tags")) != 0
or len(self.get_cleaned_data("exclude_tags")) != 0
or len(self.get_cleaned_data("recurrences")) != 0
- or ((not self.get_cleaned_data("position") is None) and (not self.get_cleaned_data("radius") is None))
+ or (
+ (self.get_cleaned_data("position") is not None)
+ and (self.get_cleaned_data("radius") is not None)
+ )
)
def is_selected_tag(self, tag):
- return "tags" in self.form.cleaned_data and tag in self.form.cleaned_data["tags"]
+ return (
+ "tags" in self.form.cleaned_data and tag in self.form.cleaned_data["tags"]
+ )
def get_url_add_tag(self, tag):
full_path = self.request.get_full_path()
- result = full_path + ('&' if '?' in full_path else '?') + 'tags=' + str(tag)
+ result = full_path + ("&" if "?" in full_path else "?") + "tags=" + str(tag)
return result
@@ -302,25 +293,43 @@ class EventFilter(django_filters.FilterSet):
def set_default_values(request):
if request.user.is_authenticated:
- if request.GET.get('status', None) is None:
+ if request.GET.get("status", None) is None:
tempdict = request.GET.copy()
- tempdict['status'] = 'published'
+ tempdict["status"] = "published"
request.GET = tempdict
return request
return request
def get_position_radius(self):
- if self.get_cleaned_data("position") is None or self.get_cleaned_data("radius") is None:
+ if (
+ self.get_cleaned_data("position") is None
+ or self.get_cleaned_data("radius") is None
+ ):
return ""
else:
- return str(self.get_cleaned_data("position")) + ' (' + str(self.get_cleaned_data("radius")) + ' km)'
+ return (
+ str(self.get_cleaned_data("position"))
+ + " ("
+ + str(self.get_cleaned_data("radius"))
+ + " km)"
+ )
def is_filtered_by_position_radius(self):
- return not self.get_cleaned_data("position") is None and not self.get_cleaned_data("radius") is None
+ return (
+ self.get_cleaned_data("position") is not None
+ and self.get_cleaned_data("radius") is not None
+ )
def get_url_add_suggested_position(self, location):
result = self.request.get_full_path()
- return result + ('&' if '?' in result else '?') + 'position=' + str(location.slug) + "&radius=" + str(location.suggested_distance)
+ return (
+ result
+ + ("&" if "?" in result else "?")
+ + "position="
+ + str(location.slug)
+ + "&radius="
+ + str(location.suggested_distance)
+ )
class EventFilterAdmin(django_filters.FilterSet):
@@ -330,21 +339,24 @@ class EventFilterAdmin(django_filters.FilterSet):
representative = django_filters.MultipleChoiceFilter(
label=_("Representative version"),
- choices=[(True, _("Yes")), (False, _("No"))],
+ choices=[(True, _("Yes")), (False, _("No"))],
method="filter_by_representative",
- widget=forms.CheckboxSelectMultiple)
+ widget=forms.CheckboxSelectMultiple,
+ )
pure_import = django_filters.MultipleChoiceFilter(
label=_("Pure import"),
- choices=[(True, _("Yes")), (False, _("No"))],
+ choices=[(True, _("Yes")), (False, _("No"))],
method="filter_by_pure_import",
- widget=forms.CheckboxSelectMultiple)
+ widget=forms.CheckboxSelectMultiple,
+ )
in_recurrent_import = django_filters.MultipleChoiceFilter(
label=_("In recurrent import"),
- choices=[(True, _("Yes")), (False, _("No"))],
+ choices=[(True, _("Yes")), (False, _("No"))],
method="filter_by_in_recurrent_import",
- widget=forms.CheckboxSelectMultiple)
+ widget=forms.CheckboxSelectMultiple,
+ )
o = django_filters.ChoiceFilter(
label=_("Sort by"),
@@ -353,20 +365,26 @@ class EventFilterAdmin(django_filters.FilterSet):
("modified_date", _("last modified first")),
("imported_date", _("last imported first")),
("created_date", _("last created first")),
- ],
- method="sort_on_date")
+ ],
+ method="sort_on_date",
+ )
import_sources = django_filters.ModelChoiceFilter(
label=_("Imported from"),
method="filter_by_source",
- queryset=RecurrentImport.objects.all().order_by("name__unaccent")
+ queryset=RecurrentImport.objects.all().order_by("name__unaccent"),
)
def sort_on_date(self, queryset, name, value):
print(name, value)
- if value in ['created_date', 'imported_date', 'modified_date', 'moderated_date']:
- notnull = value + '__isnull'
- return queryset.filter(**{notnull: False}).order_by('-' + value)
+ if value in [
+ "created_date",
+ "imported_date",
+ "modified_date",
+ "moderated_date",
+ ]:
+ notnull = value + "__isnull"
+ return queryset.filter(**{notnull: False}).order_by("-" + value)
else:
return queryset
@@ -380,7 +398,7 @@ class EventFilterAdmin(django_filters.FilterSet):
else:
srcs = RecurrentImport.objects.all().values_list("source")
q = Q(import_sources__overlap=srcs)
- if value[0] == 'True':
+ if value[0] == "True":
print(q)
return queryset.filter(q)
else:
@@ -390,29 +408,29 @@ class EventFilterAdmin(django_filters.FilterSet):
if value is None or len(value) != 1:
return queryset
else:
- q = (Q(import_sources__isnull=False) &
- (Q(modified_date__isnull=True) |
- Q(modified_date__lte=F('imported_date'))))
- if value[0] == 'True':
+ q = Q(import_sources__isnull=False) & (
+ Q(modified_date__isnull=True) | Q(modified_date__lte=F("imported_date"))
+ )
+ if value[0] == "True":
print(q)
return queryset.filter(q)
else:
return queryset.exclude(q)
-
def filter_by_representative(self, queryset, name, value):
if value is None or len(value) != 1:
return queryset
else:
- q = (Q(other_versions__isnull=True) |
- Q(other_versions__representative=F('pk')) |
- Q(other_versions__representative__isnull=True))
- if value[0] == 'True':
+ q = (
+ Q(other_versions__isnull=True)
+ | Q(other_versions__representative=F("pk"))
+ | Q(other_versions__representative__isnull=True)
+ )
+ if value[0] == "True":
return queryset.filter(q)
else:
return queryset.exclude(q)
-
class Meta:
model = Event
fields = ["status"]
@@ -441,13 +459,17 @@ class MessagesFilterAdmin(django_filters.FilterSet):
fields = ["closed", "spam", "message_type"]
def is_contact_messages(self):
- return "message_type" in self.form.cleaned_data and "contact_form" in self.form.cleaned_data["message_type"]
+ return (
+ "message_type" in self.form.cleaned_data
+ and "contact_form" in self.form.cleaned_data["message_type"]
+ )
class SimpleSearchEventFilter(django_filters.FilterSet):
- q = django_filters.CharFilter(method="custom_filter",
+ q = django_filters.CharFilter(
+ method="custom_filter",
label=_("Search"),
- widget=forms.TextInput(attrs={"type": "search"})
+ widget=forms.TextInput(attrs={"type": "search"}),
)
status = django_filters.MultipleChoiceFilter(
@@ -458,23 +480,27 @@ class SimpleSearchEventFilter(django_filters.FilterSet):
)
past = django_filters.ChoiceFilter(
- label=_("In the past"),
- choices=[(False, _("No")), (True, _("Yes"))],
- null_label=None,
- empty_label=None,
- method="in_past",
- widget=forms.Select)
+ label=_("In the past"),
+ choices=[(False, _("No")), (True, _("Yes"))],
+ null_label=None,
+ empty_label=None,
+ method="in_past",
+ widget=forms.Select,
+ )
def in_past(self, queryset, name, value):
if value and value == "True":
now = date.today()
- qs = queryset.filter(start_day__lt=now).order_by("-start_day", "-start_time")
+ qs = queryset.filter(start_day__lt=now).order_by(
+ "-start_day", "-start_time"
+ )
else:
start = date.today() + timedelta(days=-2)
- qs = queryset.filter(start_day__gte=start).order_by("start_day", "start_time")
+ qs = queryset.filter(start_day__gte=start).order_by(
+ "start_day", "start_time"
+ )
return qs
-
def custom_filter(self, queryset, name, value):
search_query = SearchQuery(value, config="french")
qs = queryset.filter(
@@ -508,7 +534,6 @@ class SimpleSearchEventFilter(django_filters.FilterSet):
self.form.fields.pop("status")
-
class SearchEventFilter(django_filters.FilterSet):
tags = django_filters.CharFilter(lookup_expr="icontains")
title = django_filters.CharFilter(method="hl_filter_contains")
@@ -564,8 +589,8 @@ class SearchEventFilter(django_filters.FilterSet):
class DuplicatedEventsFilter(django_filters.FilterSet):
fixed = django_filters.BooleanFilter(
- label="Résolu",
- field_name='representative', method="fixed_qs")
+ label="Résolu", field_name="representative", method="fixed_qs"
+ )
class Meta:
model = DuplicatedEvents
@@ -587,11 +612,9 @@ class DuplicatedEventsFilter(django_filters.FilterSet):
if self.form.cleaned_data["fixed"]:
return "fixed=true"
else:
- return "fixed=false"
+ return "fixed=false"
else:
return ""
-
-
def fixed_qs(self, queryset, name, value):
return DuplicatedEvents.not_fixed_qs(queryset, value)
@@ -602,10 +625,9 @@ class RecurrentImportFilter(django_filters.FilterSet):
name = django_filters.ModelMultipleChoiceFilter(
label="Filtrer par nom",
field_name="name",
- queryset=RecurrentImport.objects.all().order_by("name__unaccent")
+ queryset=RecurrentImport.objects.all().order_by("name__unaccent"),
)
class Meta:
model = RecurrentImport
fields = ["name"]
-
diff --git a/src/agenda_culturel/forms.py b/src/agenda_culturel/forms.py
index b152d91..c7acd99 100644
--- a/src/agenda_culturel/forms.py
+++ b/src/agenda_culturel/forms.py
@@ -13,13 +13,12 @@ from django.forms import (
BooleanField,
HiddenInput,
ModelChoiceField,
- EmailField
+ EmailField,
)
from django.forms import formset_factory
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
-from .utils import PlaceGuesser
from .models import (
Event,
RecurrentImport,
@@ -27,7 +26,7 @@ from .models import (
Place,
Category,
Tag,
- Message
+ Message,
)
from django.conf import settings
from django.core.files import File
@@ -44,13 +43,16 @@ import logging
logger = logging.getLogger(__name__)
+
class GroupFormMixin:
- template_name = 'agenda_culturel/forms/div_group.html'
+ template_name = "agenda_culturel/forms/div_group.html"
class FieldGroup:
- def __init__(self, id, label, display_label=False, maskable=False, default_masked=True):
+ def __init__(
+ self, id, label, display_label=False, maskable=False, default_masked=True
+ ):
self.id = id
self.label = label
self.display_label = display_label
@@ -58,7 +60,7 @@ class GroupFormMixin:
self.default_masked = default_masked
def toggle_field_name(self):
- return 'group_' + self.id
+ return "group_" + self.id
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -67,17 +69,32 @@ class GroupFormMixin:
def add_group(self, *args, **kwargs):
self.groups.append(GroupFormMixin.FieldGroup(*args, **kwargs))
if self.groups[-1].maskable:
- self.fields[self.groups[-1].toggle_field_name()] = BooleanField(required=False)
+ self.fields[self.groups[-1].toggle_field_name()] = BooleanField(
+ required=False
+ )
self.fields[self.groups[-1].toggle_field_name()].toggle_group = True
def get_fields_in_group(self, g):
- return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and hasattr(f.field, "group_id") and f.field.group_id == g.id]
+ return [
+ f
+ for f in self.visible_fields()
+ if not hasattr(f.field, "toggle_group")
+ and hasattr(f.field, "group_id")
+ and f.field.group_id == g.id
+ ]
def get_no_group_fields(self):
- return [f for f in self.visible_fields() if not hasattr(f.field, "toggle_group") and (not hasattr(f.field, "group_id") or f.field.group_id == None)]
+ return [
+ f
+ for f in self.visible_fields()
+ if not hasattr(f.field, "toggle_group")
+ and (not hasattr(f.field, "group_id") or f.field.group_id is None)
+ ]
def fields_by_group(self):
- return [(g, self.get_fields_in_group(g)) for g in self.groups] + [(GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())]
+ return [(g, self.get_fields_in_group(g)) for g in self.groups] + [
+ (GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())
+ ]
def clean(self):
result = super().clean()
@@ -86,19 +103,26 @@ class GroupFormMixin:
data = dict(self.data)
# for each masked group, we remove data
for g in self.groups:
- if g.maskable and not g.toggle_field_name() in data:
+ if g.maskable and g.toggle_field_name() not in data:
fields = self.get_fields_in_group(g)
for f in fields:
self.cleaned_data[f.name] = None
-
+
return result
+
class TagForm(ModelForm):
- required_css_class = 'required'
+ required_css_class = "required"
class Meta:
model = Tag
- fields = ["name", "description", "in_included_suggestions", "in_excluded_suggestions", "principal"]
+ fields = [
+ "name",
+ "description",
+ "in_included_suggestions",
+ "in_excluded_suggestions",
+ "principal",
+ ]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -107,45 +131,45 @@ class TagForm(ModelForm):
class TagRenameForm(Form):
- required_css_class = 'required'
+ required_css_class = "required"
- name = CharField(
- label=_('Name of new tag'),
- required=True
- )
+ name = CharField(label=_("Name of new tag"), required=True)
force = BooleanField(
- label=_('Force renaming despite the existence of events already using the chosen tag.'),
+ label=_(
+ "Force renaming despite the existence of events already using the chosen tag."
+ ),
)
def __init__(self, *args, **kwargs):
force = kwargs.pop("force", False)
name = kwargs.pop("name", None)
super().__init__(*args, **kwargs)
- if not (force or (not len(args) == 0 and 'force' in args[0])):
+ if not (force or (not len(args) == 0 and "force" in args[0])):
del self.fields["force"]
- if not name is None and self.fields["name"].initial is None:
+ if name is not None and self.fields["name"].initial is None:
self.fields["name"].initial = name
-
def is_force(self):
- return "force" in self.fields and self.cleaned_data["force"] == True
+ return "force" in self.fields and self.cleaned_data["force"] is True
class SimpleContactForm(GroupFormMixin, Form):
- email = EmailField(
+ email = EmailField(
label=_("Your email"),
help_text=_("Your email address"),
max_length=254,
- required=False
+ required=False,
)
comments = CharField(
label=_("Comments"),
- help_text=_("Your message for the moderation team (comments, clarifications, requests...)"),
+ help_text=_(
+ "Your message for the moderation team (comments, clarifications, requests...)"
+ ),
widget=Textarea,
max_length=2048,
- required=False
+ required=False,
)
def __init__(self, *args, **kwargs):
@@ -153,21 +177,23 @@ class SimpleContactForm(GroupFormMixin, Form):
super().__init__(*args, **kwargs)
if not is_authenticated:
- self.add_group('communication',
- _('Receive notification of publication or leave a message for moderation'),
- maskable=True,
- default_masked=True)
- self.fields["email"].group_id = 'communication'
- self.fields["comments"].group_id = 'communication'
+ self.add_group(
+ "communication",
+ _(
+ "Receive notification of publication or leave a message for moderation"
+ ),
+ maskable=True,
+ default_masked=True,
+ )
+ self.fields["email"].group_id = "communication"
+ self.fields["comments"].group_id = "communication"
else:
del self.fields["email"]
del self.fields["comments"]
-
-
class URLSubmissionForm(GroupFormMixin, Form):
- required_css_class = 'required'
+ required_css_class = "required"
url = URLField(max_length=512)
category = ModelChoiceField(
@@ -177,28 +203,27 @@ class URLSubmissionForm(GroupFormMixin, Form):
required=False,
)
tags = MultipleChoiceField(
- label=_("Tags"),
- initial=None,
- choices=[],
- required=False
+ label=_("Tags"), initial=None, choices=[], required=False
)
def __init__(self, *args, **kwargs):
- is_authenticated = kwargs.pop("is_authenticated", False)
+ kwargs.pop("is_authenticated", False)
super().__init__(*args, **kwargs)
self.fields["tags"].choices = Tag.get_tag_groups(all=True)
- self.add_group('event', _('Event'))
- self.fields["url"].group_id = 'event'
- self.fields["category"].group_id = 'event'
- self.fields["tags"].group_id = 'event'
+ self.add_group("event", _("Event"))
+ self.fields["url"].group_id = "event"
+ self.fields["category"].group_id = "event"
+ self.fields["tags"].group_id = "event"
class URLSubmissionFormWithContact(SimpleContactForm, URLSubmissionForm):
pass
+
URLSubmissionFormSet = formset_factory(URLSubmissionForm, extra=9, min_num=1)
+
class DynamicArrayWidgetURLs(DynamicArrayWidget):
template_name = "agenda_culturel/widgets/widget-urls.html"
@@ -208,13 +233,10 @@ class DynamicArrayWidgetTags(DynamicArrayWidget):
class RecurrentImportForm(ModelForm):
- required_css_class = 'required'
+ required_css_class = "required"
defaultTags = MultipleChoiceField(
- label=_("Tags"),
- initial=None,
- choices=[],
- required=False
+ label=_("Tags"), initial=None, choices=[], required=False
)
class Meta:
@@ -227,7 +249,7 @@ class RecurrentImportForm(ModelForm):
class CategorisationRuleImportForm(ModelForm):
- required_css_class = 'required'
+ required_css_class = "required"
class Meta:
model = CategorisationRule
@@ -235,20 +257,16 @@ class CategorisationRuleImportForm(ModelForm):
class EventForm(GroupFormMixin, ModelForm):
- required_css_class = 'required'
+ required_css_class = "required"
old_local_image = CharField(widget=HiddenInput(), required=False)
simple_cloning = CharField(widget=HiddenInput(), required=False)
cloning = CharField(widget=HiddenInput(), required=False)
tags = MultipleChoiceField(
- label=_("Tags"),
- initial=None,
- choices=[],
- required=False
+ label=_("Tags"), initial=None, choices=[], required=False
)
-
class Meta:
model = Event
exclude = [
@@ -260,7 +278,7 @@ class EventForm(GroupFormMixin, ModelForm):
"moderated_by_user",
"modified_by_user",
"created_by_user",
- "imported_by_user"
+ "imported_by_user",
]
widgets = {
"start_day": TextInput(
@@ -292,62 +310,68 @@ class EventForm(GroupFormMixin, ModelForm):
if not is_authenticated:
del self.fields["status"]
del self.fields["organisers"]
- self.fields['category'].queryset = self.fields['category'].queryset.order_by('name')
- self.fields['category'].empty_label = None
- self.fields['category'].initial = Category.get_default_category()
- self.fields['tags'].choices = Tag.get_tag_groups(all=True)
+ self.fields["category"].queryset = self.fields["category"].queryset.order_by(
+ "name"
+ )
+ self.fields["category"].empty_label = None
+ self.fields["category"].initial = Category.get_default_category()
+ self.fields["tags"].choices = Tag.get_tag_groups(all=True)
# set groups
- self.add_group('main', _('Main fields'))
- self.fields['title'].group_id = 'main'
+ self.add_group("main", _("Main fields"))
+ self.fields["title"].group_id = "main"
- self.add_group('start', _('Start of event'))
- self.fields['start_day'].group_id = 'start'
- self.fields['start_time'].group_id = 'start'
+ self.add_group("start", _("Start of event"))
+ self.fields["start_day"].group_id = "start"
+ self.fields["start_time"].group_id = "start"
- self.add_group('end', _('End of event'))
- self.fields['end_day'].group_id = 'end'
- self.fields['end_time'].group_id = 'end'
+ self.add_group("end", _("End of event"))
+ self.fields["end_day"].group_id = "end"
+ self.fields["end_time"].group_id = "end"
- self.add_group('recurrences',
- _('This is a recurring event'),
- maskable=True,
- default_masked=not (self.instance and
- self.instance.recurrences and
- self.instance.recurrences.rrules and
- len(self.instance.recurrences.rrules) > 0))
+ self.add_group(
+ "recurrences",
+ _("This is a recurring event"),
+ maskable=True,
+ default_masked=not (
+ self.instance
+ and self.instance.recurrences
+ and self.instance.recurrences.rrules
+ and len(self.instance.recurrences.rrules) > 0
+ ),
+ )
- self.fields['recurrences'].group_id = 'recurrences'
+ self.fields["recurrences"].group_id = "recurrences"
- self.add_group('details', _('Details'))
- self.fields['description'].group_id = 'details'
+ self.add_group("details", _("Details"))
+ self.fields["description"].group_id = "details"
if is_authenticated:
- self.fields['organisers'].group_id = 'details'
+ self.fields["organisers"].group_id = "details"
- self.add_group('location', _('Location'))
- self.fields['location'].group_id = 'location'
- self.fields['exact_location'].group_id = 'location'
+ self.add_group("location", _("Location"))
+ self.fields["location"].group_id = "location"
+ self.fields["exact_location"].group_id = "location"
- self.add_group('illustration', _('Illustration'))
- self.fields['local_image'].group_id = 'illustration'
- self.fields['image_alt'].group_id = 'illustration'
+ self.add_group("illustration", _("Illustration"))
+ self.fields["local_image"].group_id = "illustration"
+ self.fields["image_alt"].group_id = "illustration"
- self.add_group('urls', _('URLs'))
- self.fields["reference_urls"].group_id = 'urls'
+ self.add_group("urls", _("URLs"))
+ self.fields["reference_urls"].group_id = "urls"
if is_authenticated:
- self.add_group('meta-admin', _('Meta information'))
- self.fields['category'].group_id = 'meta-admin'
- self.fields['tags'].group_id = 'meta-admin'
- self.fields['status'].group_id = 'meta-admin'
+ self.add_group("meta-admin", _("Meta information"))
+ self.fields["category"].group_id = "meta-admin"
+ self.fields["tags"].group_id = "meta-admin"
+ self.fields["status"].group_id = "meta-admin"
else:
- self.add_group('meta', _('Meta information'))
- self.fields['category'].group_id = 'meta'
- self.fields['tags'].group_id = 'meta'
+ self.add_group("meta", _("Meta information"))
+ self.fields["category"].group_id = "meta"
+ self.fields["tags"].group_id = "meta"
def is_clone_from_url(self):
return self.cloning
-
+
def is_simple_clone_from_url(self):
return self.simple_cloning
@@ -381,58 +405,61 @@ class EventForm(GroupFormMixin, ModelForm):
super().clean()
# when cloning an existing event, we need to copy the local image
- if ((not 'local_image' in self.cleaned_data) or (self.cleaned_data['local_image'] is None)) and \
- not self.cleaned_data['old_local_image'] is None and \
- self.cleaned_data['old_local_image'] != "":
- basename = self.cleaned_data['old_local_image']
+ if (
+ (
+ ("local_image" not in self.cleaned_data)
+ or (self.cleaned_data["local_image"] is None)
+ )
+ and self.cleaned_data["old_local_image"] is not None
+ and self.cleaned_data["old_local_image"] != ""
+ ):
+ basename = self.cleaned_data["old_local_image"]
old = settings.MEDIA_ROOT + "/" + basename
if os.path.isfile(old):
- self.cleaned_data['local_image'] = File(name=basename, file=open(old, "rb"))
+ self.cleaned_data["local_image"] = File(
+ name=basename, file=open(old, "rb")
+ )
class EventFormWithContact(SimpleContactForm, EventForm):
pass
+
class MultipleChoiceFieldAcceptAll(MultipleChoiceField):
def validate(self, value):
pass
class EventModerateForm(ModelForm):
- required_css_class = 'required'
+ required_css_class = "required"
tags = MultipleChoiceField(
- label=_("Tags"),
- help_text=_('Select tags from existing ones.'),
- required=False
+ label=_("Tags"), help_text=_("Select tags from existing ones."), required=False
)
new_tags = MultipleChoiceFieldAcceptAll(
label=_("New tags"),
- help_text=_('Create new labels (sparingly). Note: by starting your tag with the characters “TW:”, you''ll create a “trigger warning” tag, and the associated events will be announced as such.'),
+ help_text=_(
+ "Create new labels (sparingly). Note: by starting your tag with the characters “TW:”, you"
+ "ll create a “trigger warning” tag, and the associated events will be announced as such."
+ ),
widget=DynamicArrayWidget(),
- required=False
+ required=False,
)
class Meta:
model = Event
- fields = [
- "status",
- "category",
- "organisers",
- "exact_location",
- "tags"
- ]
- widgets = {
- "status": RadioSelect
- }
+ fields = ["status", "category", "organisers", "exact_location", "tags"]
+ widgets = {"status": RadioSelect}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.fields['category'].queryset = self.fields['category'].queryset.order_by('name')
- self.fields['category'].empty_label = None
- self.fields['category'].initial = Category.get_default_category()
- self.fields['tags'].choices = Tag.get_tag_groups(all=True)
+ self.fields["category"].queryset = self.fields["category"].queryset.order_by(
+ "name"
+ )
+ self.fields["category"].empty_label = None
+ self.fields["category"].initial = Category.get_default_category()
+ self.fields["tags"].choices = Tag.get_tag_groups(all=True)
def clean_new_tags(self):
return list(set(self.cleaned_data.get("new_tags")))
@@ -440,17 +467,17 @@ class EventModerateForm(ModelForm):
def clean(self):
super().clean()
- if self.cleaned_data['tags'] is None:
- self.cleaned_data['tags'] = []
+ if self.cleaned_data["tags"] is None:
+ self.cleaned_data["tags"] = []
- if not self.cleaned_data.get('new_tags') is None:
- self.cleaned_data['tags'] += self.cleaned_data.get('new_tags')
-
- self.cleaned_data['tags'] = list(set(self.cleaned_data['tags']))
+ if self.cleaned_data.get("new_tags") is not None:
+ self.cleaned_data["tags"] += self.cleaned_data.get("new_tags")
+
+ self.cleaned_data["tags"] = list(set(self.cleaned_data["tags"]))
class BatchImportationForm(Form):
- required_css_class = 'required'
+ required_css_class = "required"
json = CharField(
label="JSON",
@@ -461,14 +488,14 @@ class BatchImportationForm(Form):
class FixDuplicates(Form):
- required_css_class = 'required'
+ required_css_class = "required"
action = ChoiceField()
def __init__(self, *args, **kwargs):
edup = kwargs.pop("edup", None)
events = edup.get_duplicated()
- nb_events = len(events)
+ len(events)
super().__init__(*args, **kwargs)
choices = []
@@ -485,7 +512,7 @@ class FixDuplicates(Form):
choices += [
(
"Select-" + str(e.pk),
- _("Select {} as representative version.").format(auc[i] + msg)
+ _("Select {} as representative version.").format(auc[i] + msg),
)
]
@@ -494,11 +521,12 @@ class FixDuplicates(Form):
choices += [
(
"Update-" + str(e.pk),
- _("Update {} using some fields from other versions (interactive mode).").format(auc[i])
+ _(
+ "Update {} using some fields from other versions (interactive mode)."
+ ).format(auc[i]),
)
]
-
extra = ""
if edup.has_local_version():
extra = _(" Warning: a version is already locally modified.")
@@ -511,9 +539,7 @@ class FixDuplicates(Form):
for i, e in enumerate(events):
if e.status != Event.STATUS.TRASH:
choices += [
- (
- "Remove-" + str(e.pk),
- _("Make {} independent.").format(auc[i]))
+ ("Remove-" + str(e.pk), _("Make {} independent.").format(auc[i]))
]
choices += [("NotDuplicates", _("Make all versions independent."))]
@@ -533,7 +559,11 @@ class FixDuplicates(Form):
return self.cleaned_data["action"].startswith("Remove")
def get_selected_event_code(self):
- if self.is_action_select() or self.is_action_remove() or self.is_action_update():
+ if (
+ self.is_action_select()
+ or self.is_action_remove()
+ or self.is_action_update()
+ ):
return int(self.cleaned_data["action"].split("-")[-1])
else:
return None
@@ -547,21 +577,27 @@ class FixDuplicates(Form):
class SelectEventInList(Form):
- required_css_class = 'required'
+ required_css_class = "required"
- event = ChoiceField(label=_('Event'))
+ event = ChoiceField(label=_("Event"))
def __init__(self, *args, **kwargs):
events = kwargs.pop("events", None)
super().__init__(*args, **kwargs)
self.fields["event"].choices = [
- (e.pk, (e.start_time.strftime('%H:%M') + " : " if e.start_time else "") + e.title + ((", " + e.location) if e.location else "")) for e in events
+ (
+ e.pk,
+ (e.start_time.strftime("%H:%M") + " : " if e.start_time else "")
+ + e.title
+ + ((", " + e.location) if e.location else ""),
+ )
+ for e in events
]
class MergeDuplicates(Form):
- required_css_class = 'required'
+ required_css_class = "required"
checkboxes_fields = ["reference_urls", "description", "tags"]
@@ -569,20 +605,23 @@ class MergeDuplicates(Form):
self.duplicates = kwargs.pop("duplicates", None)
self.event = kwargs.pop("event", None)
self.events = list(self.duplicates.get_duplicated())
- nb_events = len(self.events)
+ len(self.events)
super().__init__(*args, **kwargs)
-
if self.event:
choices = [
- ("event_" + str(e.pk), _("Value of version {}").format(e.pk)) if e != self.event else
- ("event_" + str(e.pk), _("Value of the selected version"))
+ (
+ ("event_" + str(e.pk), _("Value of version {}").format(e.pk))
+ if e != self.event
+ else ("event_" + str(e.pk), _("Value of the selected version"))
+ )
for e in self.events
]
initial = "event_" + str(self.event.pk)
else:
choices = [
- ("event_" + str(e.pk), _("Value of version {}").format(e.pk)) for e in self.events
+ ("event_" + str(e.pk), _("Value of version {}").format(e.pk))
+ for e in self.events
]
initial = choices[0][0]
for f in self.duplicates.get_items_comparison():
@@ -605,29 +644,41 @@ class MergeDuplicates(Form):
result += (
'
"
if key == "image":
result += str(field_to_html(ev.local_image, "local_image")) + "
"
result += "'
+ result = (
+ '
'
+ super().as_p()
- + '''
''')
-
+
"""
+ )
+
return mark_safe(result)
def apply(self):
return self.cleaned_data.get("apply_to_all")
+
class MessageForm(ModelForm):
class Meta:
model = Message
fields = ["subject", "name", "email", "message", "related_event"]
- widgets = {"related_event": HiddenInput(), "user": HiddenInput() }
+ widgets = {"related_event": HiddenInput(), "user": HiddenInput()}
def __init__(self, *args, **kwargs):
self.event = kwargs.pop("event", False)
self.internal = kwargs.pop("internal", False)
super().__init__(*args, **kwargs)
- self.fields['related_event'].required = False
+ self.fields["related_event"].required = False
if self.internal:
self.fields.pop("name")
self.fields.pop("email")
+
class MessageEventForm(ModelForm):
class Meta:
@@ -867,4 +919,4 @@ class MessageEventForm(ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
- self.fields["message"].label = _("Add a comment")
\ No newline at end of file
+ self.fields["message"].label = _("Add a comment")
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py b/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py
index e5c26e4..8b6014c 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/amisdutempsdescerises.py
@@ -1,9 +1,10 @@
-from ..extractor import *
+from ..extractor import Extractor
import json
from bs4 import BeautifulSoup
-from urllib.parse import urlparse, unquote
+from urllib.parse import urlparse
import pytz
import html
+from datetime import datetime
# A class dedicated to get events from les amis du temps des cerises
@@ -13,8 +14,7 @@ class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.data = b'------toto\r\nContent-Disposition: form-data; name="p"\r\n\r\nfutur\r\n------toto--\r\n'
- self.content_type = 'multipart/form-data; boundary=----toto'
-
+ self.content_type = "multipart/form-data; boundary=----toto"
def extract(
self, content, url, url_human=None, default_values=None, published=False
@@ -30,28 +30,28 @@ class CExtractor(Extractor):
events = json.loads(content)
for e in events:
tags = []
- start_day = e["ev_date"].split(' ')[0]
+ start_day = e["ev_date"].split(" ")[0]
start_time = e["ev_time"]
title = html.unescape(e["ev_titre"])
- if "ev_sstitre" in e and e["ev_sstitre"] != '':
- title = title + ' - ' + html.unescape(e["ev_sstitre"])
+ if "ev_sstitre" in e and e["ev_sstitre"] != "":
+ title = title + " - " + html.unescape(e["ev_sstitre"])
soup = BeautifulSoup(e["ev_info"], "html.parser")
description = soup.text
location = html.unescape(e["li_nom"]) if "li_nom" in e else None
- if "ev_canceled" in e and e["ev_canceled"] != '0':
+ if "ev_canceled" in e and e["ev_canceled"] != "0":
tags += ["annulé"]
image = None
- if "ev_img" in e and e["ev_img"] != '':
+ if "ev_img" in e and e["ev_img"] != "":
image = images_basename + e["ev_img"]
-
+
naive_dt = datetime.strptime(e["ev_date"], "%Y-%m-%d %H:%M:%S")
from_dt = from_timezone.localize(naive_dt)
dt = to_timezone.normalize(from_dt)
ts = int(datetime.timestamp(dt)) * 1000
-
+
event_url = root_url + "#" + str(ts)
self.add_event(
@@ -67,6 +67,7 @@ class CExtractor(Extractor):
url_human=event_url,
start_time=start_time,
published=published,
- image=image )
-
+ image=image,
+ )
+
return self.get_structure()
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py b/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py
index cf17be7..695b14b 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/arachnee.py
@@ -1,5 +1,9 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractorNoPause
+from ..extractor import Extractor
from bs4 import BeautifulSoup
+import re
+from datetime import datetime, timedelta, date
+
# A class dedicated to get events from Arachnée Concert
# URL: https://www.arachnee-concerts.com/agenda-des-concerts/
@@ -18,27 +22,31 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
only_future=True,
- ignore_404=True
+ ignore_404=True,
):
match = re.match(r".*\&theatres=([^&]*)&.*", url)
if match:
self.theater = match[1]
- return super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
+ return super().extract(
+ content, url, url_human, default_values, published, only_future, ignore_404
+ )
def build_event_url_list(self, content, infuture_days=180):
-
+
soup = BeautifulSoup(content, "html.parser")
containers = soup.select("ul.event_container>li")
if containers:
for c in containers:
d = Extractor.parse_french_date(c.select_one(".date").text)
- l = c.select_one(".event_auditory").text
- if (self.theater is None or (l.startswith(self.theater))) and d < datetime.date.today() + timedelta(days=infuture_days):
+ la = c.select_one(".event_auditory").text
+ if (
+ self.theater is None or (la.startswith(self.theater))
+ ) and d < date.today() + timedelta(days=infuture_days):
t = Extractor.parse_french_time(c.select_one(".time").text)
e_url = c.select_one(".info a")["href"]
- if not e_url in self.possible_dates:
+ if e_url not in self.possible_dates:
self.possible_dates[e_url] = []
self.possible_dates[e_url].append((str(d) + " " + str(t)))
self.add_event_url(e_url)
@@ -53,10 +61,18 @@ class CExtractor(TwoStepsExtractorNoPause):
):
soup = BeautifulSoup(event_content, "html.parser")
- title = ", ".join([x.text for x in [soup.select_one(y) for y in [".page_title", ".artiste-subtitle"]] if x])
-
+ title = ", ".join(
+ [
+ x.text
+ for x in [
+ soup.select_one(y) for y in [".page_title", ".artiste-subtitle"]
+ ]
+ if x
+ ]
+ )
+
image = soup.select_one(".entry-image .image_wrapper img")
- if not image is None:
+ if image is not None:
image = image["src"]
descs = soup.select(".entry-content p")
@@ -74,10 +90,22 @@ class CExtractor(TwoStepsExtractorNoPause):
elif first_cat in ["theatre", "humour / one man show"]:
category = "Spectacles"
tags.append("🎭 théâtre")
- elif first_cat in ["chanson francaise", "musique du monde", "pop / rock", "rap", "rnb", "raggae", "variete"]:
+ elif first_cat in [
+ "chanson francaise",
+ "musique du monde",
+ "pop / rock",
+ "rap",
+ "rnb",
+ "raggae",
+ "variete",
+ ]:
category = "Fêtes & Concerts"
tags.append("🎵 concert")
- elif first_cat in ["comedie musicale", "humour / one man show", "spectacle equestre"]:
+ elif first_cat in [
+ "comedie musicale",
+ "humour / one man show",
+ "spectacle equestre",
+ ]:
category = "Spectacles"
elif first_cat in ["spectacle pour enfant"]:
tags = ["🎈 jeune public"]
@@ -87,12 +115,12 @@ class CExtractor(TwoStepsExtractorNoPause):
dates = soup.select("#event_ticket_content>ul>li")
for d in dates:
- dt = datetime.datetime.fromisoformat(d.select_one(".date")["content"])
+ dt = datetime.fromisoformat(d.select_one(".date")["content"])
date = dt.date()
time = dt.time()
if str(date) + " " + str(time) in self.possible_dates[event_url]:
location = d.select_one(".event_auditory").text
-
+
self.add_event_with_props(
default_values,
event_url,
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py b/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py
index 24e78fb..452a1c3 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/billetterie_cf.py
@@ -1,6 +1,10 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractor
+from ..extractor import Extractor
from bs4 import BeautifulSoup
-from datetime import timedelta
+import re
+from datetime import datetime, timedelta
+from urllib.parse import urlparse
+
# A class dedicated to get events from La Cour des 3 Coquins and Graines de spectacle
# URL: https://billetterie-c3c.clermont-ferrand.fr//
@@ -14,15 +18,26 @@ class CExtractor(TwoStepsExtractor):
default_values=None,
published=False,
only_future=True,
- ignore_404=True):
+ ignore_404=True,
+ ):
self.root_address = "https://" + urlparse(url).netloc + "/"
- return super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
+ return super().extract(
+ content, url, url_human, default_values, published, only_future, ignore_404
+ )
def category_agenda(self, category):
if not category:
return None
- mapping = {"Théâtre": "Spectacles", "Concert": "Fêtes & Concerts", "Projection": "Cinéma"}
- mapping_tag = {"Théâtre": "🎭 théâtre", "Concert": "🎵 concert", "Projection": None}
+ mapping = {
+ "Théâtre": "Spectacles",
+ "Concert": "Fêtes & Concerts",
+ "Projection": "Cinéma",
+ }
+ mapping_tag = {
+ "Théâtre": "🎭 théâtre",
+ "Concert": "🎵 concert",
+ "Projection": None,
+ }
if category in mapping:
return mapping[category], mapping_tag[category]
else:
@@ -58,14 +73,14 @@ class CExtractor(TwoStepsExtractor):
image = image["src"]
else:
image = None
-
+
description = soup.select_one(".presentation").get_text()
duration = soup.select_one("#criteres .DUREE-V .valeur-critere li")
- if not duration is None:
+ if duration is not None:
duration = Extractor.parse_french_time(duration.text)
location = soup.select_one("#criteres .LIEU-V .valeur-critere li")
- if not location is None:
+ if location is not None:
location = location.text
categories = []
@@ -84,47 +99,74 @@ class CExtractor(TwoStepsExtractor):
# TODO: parser les dates, récupérer les heures ()
dates = [o.get("value") for o in soup.select("select.datedleb_resa option")]
-
- patternCodeSite = re.compile(r'.*gsw_vars\["CODEPRESTATAIRE"\] = "(.*?)";.*', flags=re.DOTALL)
- patternCodeObject = re.compile(r'.*gsw_vars\["CODEPRESTATION"\] = "(.*?)";.*', flags=re.DOTALL)
- patternCodeMoteur = re.compile(r'.*Resa.init_moteur_resa\(\'([0-9]+)\'\);.*', flags=re.DOTALL)
- scripts = soup.find_all('script')
+
+ patternCodeSite = re.compile(
+ r'.*gsw_vars\["CODEPRESTATAIRE"\] = "(.*?)";.*', flags=re.DOTALL
+ )
+ patternCodeObject = re.compile(
+ r'.*gsw_vars\["CODEPRESTATION"\] = "(.*?)";.*', flags=re.DOTALL
+ )
+ patternCodeMoteur = re.compile(
+ r".*Resa.init_moteur_resa\(\'([0-9]+)\'\);.*", flags=re.DOTALL
+ )
+ scripts = soup.find_all("script")
codeSite = ""
idObject = ""
moteur = ""
for script in scripts:
- if(patternCodeSite.match(str(script.string))):
+ if patternCodeSite.match(str(script.string)):
data = patternCodeSite.match(script.string)
codeSite = data.groups()[0]
- if(patternCodeObject.match(str(script.string))):
+ if patternCodeObject.match(str(script.string)):
data = patternCodeObject.match(script.string)
idObject = data.groups()[0]
- if(patternCodeMoteur.match(str(script.string))):
+ if patternCodeMoteur.match(str(script.string)):
data = patternCodeMoteur.match(script.string)
moteur = data.groups()[0]
-
pause = self.downloader.pause
self.downloader.pause = False
# get exact schedule need two supplementary requests
datetimes = []
- if codeSite != "" and idObject != "" and moteur != "":
+ if codeSite != "" and idObject != "" and moteur != "":
for date in dates:
# the first page is required such that the server knows the selected date
- page1 = self.downloader.get_content(self.root_address + "/booking?action=searchAjax&cid=" + moteur + "&afficheDirectDispo=" + date + "&type_prestataire=V&cle_fiche=PRESTATION-V-" + codeSite + "-" + idObject + "&datedeb=" + date)
+ self.downloader.get_content(
+ self.root_address
+ + "/booking?action=searchAjax&cid="
+ + moteur
+ + "&afficheDirectDispo="
+ + date
+ + "&type_prestataire=V&cle_fiche=PRESTATION-V-"
+ + codeSite
+ + "-"
+ + idObject
+ + "&datedeb="
+ + date
+ )
# then we get the form with hours
- page2 = self.downloader.get_content(self.root_address + "/booking?action=detailTarifsPrestationAjax&prestation=V-" + codeSite + "-" + idObject)
+ page2 = self.downloader.get_content(
+ self.root_address
+ + "/booking?action=detailTarifsPrestationAjax&prestation=V-"
+ + codeSite
+ + "-"
+ + idObject
+ )
soup2 = BeautifulSoup(page2, "html.parser")
times = [o.text for o in soup2.select("#quart_en_cours_spec option")]
for t in times:
startdate = Extractor.parse_french_date(date)
starttime = Extractor.parse_french_time(t)
- start = datetime.datetime.combine(startdate, starttime)
+ start = datetime.combine(startdate, starttime)
enddate = None
endtime = None
if duration is not None:
- end = start + timedelta(hours=duration.hour, minutes=duration.minute, seconds=duration.second)
+ end = start + timedelta(
+ hours=duration.hour,
+ minutes=duration.minute,
+ seconds=duration.second,
+ )
enddate = end.date()
endtime = end.time()
datetimes.append((startdate, starttime, enddate, endtime))
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py b/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py
index ab6747e..12719a5 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/lacomedie.py
@@ -1,6 +1,8 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractor
import json5
from bs4 import BeautifulSoup
+from datetime import datetime, date
+
# A class dedicated to get events from La Coopérative de Mai:
# URL: https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes
@@ -10,7 +12,17 @@ class CExtractor(TwoStepsExtractor):
url_referer = "https://lacomediedeclermont.com/saison24-25/"
def is_to_import_from_url(self, url):
- if any(keyword in url for keyword in ["podcast", "on-debriefe", "popcorn", "rencontreautour","rencontre-autour","les-cles-du-spectacle"]):
+ if any(
+ keyword in url
+ for keyword in [
+ "podcast",
+ "on-debriefe",
+ "popcorn",
+ "rencontreautour",
+ "rencontre-autour",
+ "les-cles-du-spectacle",
+ ]
+ ):
return False
else:
return True
@@ -40,11 +52,11 @@ class CExtractor(TwoStepsExtractor):
url = self.url.split("?")[0]
for d in list(set(dates)):
- if not self.only_future or self.now <= datetime.date.fromisoformat(d):
+ if not self.only_future or self.now <= date.fromisoformat(d):
events = self.downloader.get_content(
url,
post={"action": "load_evenements_jour", "jour": d},
- referer="https://lacomediedeclermont.com/saison24-25/"
+ referer="https://lacomediedeclermont.com/saison24-25/",
)
if events:
events = json5.loads(events)
@@ -102,7 +114,6 @@ class CExtractor(TwoStepsExtractor):
else:
image = None
-
description = soup.select("#descspec")
if description and len(description) > 0:
description = description[0].get_text().replace("Lire plus...", "")
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py b/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py
index 71f672d..dd13557 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/lacoope.py
@@ -1,9 +1,10 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractor
from ..generic_extractors.ggcal_link import GGCalendar
import re
import json5
from bs4 import BeautifulSoup
+
# A class dedicated to get events from La Coopérative de Mai:
# URL: https://www.lacoope.org/concerts-calendrier/
class CExtractor(TwoStepsExtractor):
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py b/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py
index c9bb1db..6655548 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/lapucealoreille.py
@@ -1,7 +1,8 @@
-from ..twosteps_extractor import *
-import re
+from ..twosteps_extractor import TwoStepsExtractor
+from ..extractor import Extractor
from bs4 import BeautifulSoup
+
# A class dedicated to get events from La puce à l'oreille
# URL: https://www.lapucealoreille63.fr/
class CExtractor(TwoStepsExtractor):
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py b/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py
index ceb62df..710eb16 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/laraymonde.py
@@ -1,6 +1,6 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractorNoPause
from bs4 import BeautifulSoup
-from datetime import datetime
+
# A class dedicated to get events from Raymond Bar
# URL: https://www.raymondbar.net/
@@ -10,15 +10,13 @@ class CExtractor(TwoStepsExtractorNoPause):
super().__init__()
def build_event_url_list(self, content, infuture_days=180):
-
+
soup = BeautifulSoup(content, "html.parser")
links = soup.select(".showsList .showMore")
if links:
- for l in links:
- print(l["href"])
- self.add_event_url(l["href"])
-
+ for lk in links:
+ self.add_event_url(lk["href"])
def add_event_from_content(
self,
@@ -29,19 +27,19 @@ class CExtractor(TwoStepsExtractorNoPause):
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
-
+
title = soup.select_one(".showDesc h4 a.summary").text
start_day = soup.select_one(".showDate .value-title")
start_time = None
- if not start_day is None:
+ if start_day is not None:
start_day = start_day["title"]
- if not start_day is None:
+ if start_day is not None:
start_day = start_day.split("T")[0]
-
- description = soup.select_one('.showDetails.description').text
- image = soup.select('.showDetails.description img')
- if not image is None:
+
+ description = soup.select_one(".showDetails.description").text
+ image = soup.select(".showDetails.description img")
+ if image is not None:
image_alt = image[-1]["alt"]
image = image[-1]["src"]
@@ -49,21 +47,21 @@ class CExtractor(TwoStepsExtractorNoPause):
title += " - Attention: l'heure n'a pu être extraite"
self.add_event_with_props(
- default_values,
- event_url,
- title,
- None,
- start_day,
- None,
- description,
- [],
- recurrences=None,
- uuids=[event_url],
- url_human=event_url,
- start_time=start_time,
- end_day=None,
- end_time=None,
- published=published,
- image=image,
- image_alt=image_alt
- )
\ No newline at end of file
+ default_values,
+ event_url,
+ title,
+ None,
+ start_day,
+ None,
+ description,
+ [],
+ recurrences=None,
+ uuids=[event_url],
+ url_human=event_url,
+ start_time=start_time,
+ end_day=None,
+ end_time=None,
+ published=published,
+ image=image,
+ image_alt=image_alt,
+ )
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py b/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py
index c385662..4d910b4 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/lefotomat.py
@@ -1,6 +1,8 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractor
+from ..extractor import Extractor
from bs4 import BeautifulSoup
+
# A class dedicated to get events from Le Fotomat'
# URL: https://www.lefotomat.com/
class CExtractor(TwoStepsExtractor):
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/lerio.py b/src/agenda_culturel/import_tasks/custom_extractors/lerio.py
index 9420dea..3fb9c2f 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/lerio.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/lerio.py
@@ -1,7 +1,9 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractorNoPause
+from ..extractor import Extractor
from bs4 import BeautifulSoup
from datetime import datetime
+
# A class dedicated to get events from Cinéma Le Rio (Clermont-Ferrand)
# URL: https://www.cinemalerio.com/evenements/
class CExtractor(TwoStepsExtractorNoPause):
@@ -12,13 +14,13 @@ class CExtractor(TwoStepsExtractorNoPause):
self.theater = None
def build_event_url_list(self, content, infuture_days=180):
-
+
soup = BeautifulSoup(content, "html.parser")
links = soup.select("td.seance_link a")
if links:
- for l in links:
- self.add_event_url(l["href"])
+ for lk in links:
+ self.add_event_url(lk["href"])
def to_text_select_one(soup, filter):
e = soup.select_one(filter)
@@ -37,7 +39,7 @@ class CExtractor(TwoStepsExtractorNoPause):
):
soup = BeautifulSoup(event_content, "html.parser")
-
+
title = soup.select_one("h1").text
alerte_date = CExtractor.to_text_select_one(soup, ".alerte_date")
@@ -45,9 +47,9 @@ class CExtractor(TwoStepsExtractorNoPause):
return
dh = alerte_date.split("à")
# if date is not found, we skip
- if len(dh) != 2:
+ if len(dh) != 2:
return
-
+
date = Extractor.parse_french_date(dh[0], default_year=datetime.now().year)
time = Extractor.parse_french_time(dh[1])
@@ -56,35 +58,43 @@ class CExtractor(TwoStepsExtractorNoPause):
special = CExtractor.to_text_select_one(soup, ".alerte_text")
# it's not a specific event: we skip it
- special_lines = None if special is None else special.split('\n')
- if special is None or len(special_lines) == 0 or \
- (len(special_lines) == 1 and special_lines[0].strip().startswith('En partenariat')):
+ special_lines = None if special is None else special.split("\n")
+ if (
+ special is None
+ or len(special_lines) == 0
+ or (
+ len(special_lines) == 1
+ and special_lines[0].strip().startswith("En partenariat")
+ )
+ ):
return
- description = "\n\n".join([x for x in [synopsis, special_titre, special] if not x is None])
+ description = "\n\n".join(
+ [x for x in [synopsis, special_titre, special] if x is not None]
+ )
image = soup.select_one(".col1 img")
image_alt = None
- if not image is None:
+ if image is not None:
image_alt = image["alt"]
image = image["src"]
self.add_event_with_props(
- default_values,
- event_url,
- title,
- None,
- date,
- None,
- description,
- [],
- recurrences=None,
- uuids=[event_url],
- url_human=event_url,
- start_time=time,
- end_day=None,
- end_time=None,
- published=published,
- image=image,
- image_alt=image_alt
- )
\ No newline at end of file
+ default_values,
+ event_url,
+ title,
+ None,
+ date,
+ None,
+ description,
+ [],
+ recurrences=None,
+ uuids=[event_url],
+ url_human=event_url,
+ start_time=time,
+ end_day=None,
+ end_time=None,
+ published=published,
+ image=image,
+ image_alt=image_alt,
+ )
diff --git a/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py b/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py
index 038cee8..e7fce3d 100644
--- a/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py
+++ b/src/agenda_culturel/import_tasks/custom_extractors/mille_formes.py
@@ -1,6 +1,9 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractorNoPause
+from ..extractor import Extractor
from bs4 import BeautifulSoup
-from datetime import datetime, date
+from datetime import date
+from urllib.parse import urlparse
+
# A class dedicated to get events from Mille formes
# URL: https://www.milleformes.fr/programme
@@ -14,50 +17,54 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
only_future=True,
- ignore_404=True):
+ ignore_404=True,
+ ):
self.root_address = "https://" + urlparse(url).netloc + "/"
self.today = date.today()
- return super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
-
+ return super().extract(
+ content, url, url_human, default_values, published, only_future, ignore_404
+ )
def parse_category(self, cat):
cat = cat.replace("\n", "").strip()
if "exposition" in cat or "dispositif artistique interactif" in cat:
- result = 'Visites & Expositions'
+ result = "Visites & Expositions"
elif "atelier" in cat:
- result = 'Animations & Ateliers'
+ result = "Animations & Ateliers"
elif cat in ["buffet"]:
- result = 'Rendez-vous locaux'
+ result = "Rendez-vous locaux"
elif "ciné" in cat:
- result = 'Cinéma'
+ result = "Cinéma"
elif "concert" in cat:
- result = 'Fêtes & Concerts'
+ result = "Fêtes & Concerts"
elif "rencontre" in cat:
- result = 'Rencontres & Débats'
+ result = "Rencontres & Débats"
elif "spectacle" in cat:
- result = 'Spectacles'
+ result = "Spectacles"
else:
- result = 'Sans catégorie'
+ result = "Sans catégorie"
return result
# this method is not perfect, but dates and hours are not structured
def parse_dates(self, date):
- dl = date.replace(' à ', '\n').split('\n')
+ dl = date.replace(" à ", "\n").split("\n")
result = []
for d in dl:
# only lines with a digit
if sum(c.isdigit() for c in d) != 0:
# split subparts
- for d2 in d.replace(' et ', ', ').split(', '):
+ for d2 in d.replace(" et ", ", ").split(", "):
d2 = d2.strip()
- dd = Extractor.parse_french_date(d2, default_year_by_proximity=self.today)
+ dd = Extractor.parse_french_date(
+ d2, default_year_by_proximity=self.today
+ )
if dd is None:
hh = Extractor.parse_french_time(d2)
for i, r in enumerate(result):
result[i][1].append(hh)
- else:
+ else:
result.append([dd, []])
if "De" in date and " à " in date:
@@ -67,12 +74,11 @@ class CExtractor(TwoStepsExtractorNoPause):
return result
def build_event_url_list(self, content, infuture_days=180):
-
- soup = BeautifulSoup(content, "html.parser")
- links = soup.select('.cell a.evenement')
- for l in links:
- self.add_event_url(self.root_address + l["href"])
+ soup = BeautifulSoup(content, "html.parser")
+ links = soup.select(".cell a.evenement")
+ for lk in links:
+ self.add_event_url(self.root_address + lk["href"])
def add_event_from_content(
self,
@@ -83,39 +89,44 @@ class CExtractor(TwoStepsExtractorNoPause):
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
- title = soup.select_one('h1').text.replace("\n", "").strip().title()
+ title = soup.select_one("h1").text.replace("\n", "").strip().title()
- image = soup.select_one('.slide img')
+ image = soup.select_one(".slide img")
if image is None:
- image_alt = ''
+ image_alt = ""
else:
image_alt = image["alt"]
image = self.root_address + image["src"]
-
- soustitre = soup.select_one('.sous-titre')
- if not soustitre is None:
+
+ soustitre = soup.select_one(".sous-titre")
+ if soustitre is not None:
soustitre = soustitre.text.strip()
- description = soup.select_one('.texte-full').text.strip()
- infos = soup.select_one('.champ .infos')
- if not infos is None:
+ description = soup.select_one(".texte-full").text.strip()
+ infos = soup.select_one(".champ .infos")
+ if infos is not None:
infos = infos.text
- location = soup.select_one('.champ .taxo.espace').text.strip()
+ location = soup.select_one(".champ .taxo.espace").text.strip()
- age = soup.select_one('.champ.taxo-age').text
- category = self.parse_category(soup.select_one('.champ.categorie').text)
+ soup.select_one(".champ.taxo-age").text
+ category = self.parse_category(soup.select_one(".champ.categorie").text)
+ date = soup.select_one(".champ.date-libre").text
- date = soup.select_one('.champ.date-libre').text
+ description = "\n\n".join(
+ [x for x in [soustitre, description, date, infos] if x is not None]
+ )
- description = '\n\n'.join([x for x in [soustitre, description, date, infos] if not x is None])
-
- if " au " in date or date.startswith("Du") or date.lower().strip() == "en continu" or date.startswith("Les"):
+ if (
+ " au " in date
+ or date.startswith("Du")
+ or date.lower().strip() == "en continu"
+ or date.startswith("Les")
+ ):
return
-
+
dates = self.parse_dates(date)
- end_day = None
for d in dates:
if len(d) >= 2:
@@ -124,70 +135,81 @@ class CExtractor(TwoStepsExtractorNoPause):
if len(d) == 3 and len(d[1]) == 2:
start_time = d[1][0]
end_time = d[1][1]
- uuid = event_url + "?date=" + str(start_day) + "&hour=" + str(start_time)
+ uuid = (
+ event_url
+ + "?date="
+ + str(start_day)
+ + "&hour="
+ + str(start_time)
+ )
self.add_event_with_props(
- default_values,
- event_url,
- title,
- category,
- start_day,
- location,
- description,
- [],
- recurrences=None,
- uuids=[uuid],
- url_human=event_url,
- start_time=start_time,
- end_day=start_day,
- end_time=end_time,
- published=published,
- image=image,
- image_alt=image_alt
- )
+ default_values,
+ event_url,
+ title,
+ category,
+ start_day,
+ location,
+ description,
+ [],
+ recurrences=None,
+ uuids=[uuid],
+ url_human=event_url,
+ start_time=start_time,
+ end_day=start_day,
+ end_time=end_time,
+ published=published,
+ image=image,
+ image_alt=image_alt,
+ )
else:
end_time = None
if len(d[1]) == 0:
start_time = None
uuid = event_url + "?date=" + str(start_day)
self.add_event_with_props(
- default_values,
- event_url,
- title,
- category,
- start_day,
- location,
- description,
- [],
- recurrences=None,
- uuids=[uuid],
- url_human=event_url,
- start_time=start_time,
- end_day=start_day,
- end_time=end_time,
- published=published,
- image=image,
- image_alt=image_alt
- )
+ default_values,
+ event_url,
+ title,
+ category,
+ start_day,
+ location,
+ description,
+ [],
+ recurrences=None,
+ uuids=[uuid],
+ url_human=event_url,
+ start_time=start_time,
+ end_day=start_day,
+ end_time=end_time,
+ published=published,
+ image=image,
+ image_alt=image_alt,
+ )
for t in d[1]:
start_time = t
- uuid = event_url + "?date=" + str(start_day) + "&hour=" + str(start_time)
+ uuid = (
+ event_url
+ + "?date="
+ + str(start_day)
+ + "&hour="
+ + str(start_time)
+ )
self.add_event_with_props(
- default_values,
- event_url,
- title,
- category,
- start_day,
- location,
- description,
- [],
- recurrences=None,
- uuids=[uuid],
- url_human=event_url,
- start_time=start_time,
- end_day=start_day,
- end_time=end_time,
- published=published,
- image=image,
- image_alt=image_alt
- )
-
+ default_values,
+ event_url,
+ title,
+ category,
+ start_day,
+ location,
+ description,
+ [],
+ recurrences=None,
+ uuids=[uuid],
+ url_human=event_url,
+ start_time=start_time,
+ end_day=start_day,
+ end_time=end_time,
+ published=published,
+ image=image,
+ image_alt=image_alt,
+ )
diff --git a/src/agenda_culturel/import_tasks/downloader.py b/src/agenda_culturel/import_tasks/downloader.py
index 65b29ea..6d8c17f 100644
--- a/src/agenda_culturel/import_tasks/downloader.py
+++ b/src/agenda_culturel/import_tasks/downloader.py
@@ -5,10 +5,17 @@ import os
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
-from selenium.common.exceptions import *
+from selenium.common.exceptions import (
+ StaleElementReferenceException,
+ NoSuchElementException,
+ TimeoutException,
+ WebDriverException,
+ SessionNotCreatedException,
+)
from abc import ABC, abstractmethod
import time
+
class Downloader(ABC):
def __init__(self):
self.support_2nd_extract = False
@@ -17,13 +24,17 @@ class Downloader(ABC):
def download(self, url, post=None):
pass
- def get_content(self, url, cache=None, referer=None, post=None, content_type=None, data=None):
+ def get_content(
+ self, url, cache=None, referer=None, post=None, content_type=None, data=None
+ ):
if cache and os.path.exists(cache):
print("Loading cache ({})".format(cache))
with open(cache) as f:
content = "\n".join(f.readlines())
else:
- content = self.download(url, referer=referer, post=post, content_type=content_type, data=data)
+ content = self.download(
+ url, referer=referer, post=post, content_type=content_type, data=data
+ )
if cache:
print("Saving cache ({})".format(cache))
@@ -64,7 +75,7 @@ class SimpleDownloader(Downloader):
except Exception as e:
print(e)
- raise Exception("Error during download: " + str(e)[:64] + '...')
+ raise Exception("Error during download: " + str(e)[:64] + "...")
class ChromiumHeadlessDownloader(Downloader):
@@ -88,10 +99,11 @@ class ChromiumHeadlessDownloader(Downloader):
if noimage:
self.options.add_experimental_option(
- "prefs", {
+ "prefs",
+ {
# block image loading
"profile.managed_default_content_settings.images": 2,
- }
+ },
)
self.service = Service("/usr/bin/chromedriver")
@@ -107,21 +119,25 @@ class ChromiumHeadlessDownloader(Downloader):
if self.pause:
time.sleep(2)
self.driver.save_screenshot(path_image)
- except:
- print(f">> Exception: {URL}")
+ except Exception:
+ print(f">> Exception: {url}")
return False
-
+
return True
def download(self, url, referer=None, post=None, content_type=None, data=None):
if post:
raise Exception("POST method with Chromium headless not yet implemented")
if referer:
- raise Exception("Referer parameter with Chromium headless not yet implemented")
+ raise Exception(
+ "Referer parameter with Chromium headless not yet implemented"
+ )
if data:
raise Exception("Data content with Chromium headless not yet implemented")
if content_type:
- raise Exception("Content-type parameter with Chromium headless not yet implemented")
+ raise Exception(
+ "Content-type parameter with Chromium headless not yet implemented"
+ )
print("Download {}".format(url))
try:
@@ -130,27 +146,25 @@ class ChromiumHeadlessDownloader(Downloader):
time.sleep(2)
doc = self.driver.page_source
-
except StaleElementReferenceException as e:
print(f">> {type(e).__name__}: {e.args}")
- raise Exception("Error during download: " + str(e)[:64] + '...')
+ raise Exception("Error during download: " + str(e)[:64] + "...")
except NoSuchElementException as e:
print(f">> {type(e).__name__}: {e.args}")
- raise Exception("Error during download: " + str(e)[:64] + '...')
+ raise Exception("Error during download: " + str(e)[:64] + "...")
except TimeoutException as e:
print(f">> {type(e).__name__}: {e.args}")
- raise Exception("Error during download: " + str(e)[:64] + '...')
+ raise Exception("Error during download: " + str(e)[:64] + "...")
except WebDriverException as e:
print(f">> {type(e).__name__}: {e.args}")
- raise Exception("Error during download: " + str(e)[:64] + '...')
+ raise Exception("Error during download: " + str(e)[:64] + "...")
except SessionNotCreatedException as e:
print(f">> {type(e).__name__}: {e.args}")
- raise Exception("Error during download: " + str(e)[:64] + '...')
+ raise Exception("Error during download: " + str(e)[:64] + "...")
except Exception as e:
- print(f">> {type(e).__name__} line {e.__traceback__.tb_lineno} of {__file__}: {e.args}")
- raise Exception("Error during download: " + str(e)[:64] + '...')
- except:
- print(f">> General Exception: {URL}")
- raise Exception("Error during download: " + str(e)[:64] + '...')
+ print(
+ f">> {type(e).__name__} line {e.__traceback__.tb_lineno} of {__file__}: {e.args}"
+ )
+ raise Exception("Error during download: " + str(e)[:64] + "...")
return doc
diff --git a/src/agenda_culturel/import_tasks/extractor.py b/src/agenda_culturel/import_tasks/extractor.py
index 3a71410..173fce5 100644
--- a/src/agenda_culturel/import_tasks/extractor.py
+++ b/src/agenda_culturel/import_tasks/extractor.py
@@ -4,7 +4,7 @@ from datetime import datetime, time, date, timedelta
import re
import unicodedata
from django.utils import timezone
-import logging
+from django.utils.translation import gettext_lazy as _
class Extractor(ABC):
@@ -13,7 +13,7 @@ class Extractor(ABC):
NO_START_DATE = 2
NOT_FOUND = 3
- url_referer=None
+ url_referer = None
def __init__(self):
self.header = {}
@@ -25,12 +25,12 @@ class Extractor(ABC):
# avoid the importer to use the downloader on the url
# (used for extractors that are self-sufficient)
self.no_downloader = False
-
+
# parameters used by the downloader to get the content
self.referer = ""
self.data = None
self.content_type = None
-
+
def prepare_2nd_extract(self):
pass
@@ -106,16 +106,18 @@ class Extractor(ABC):
return None
try:
day = int(day)
- if not year is None:
+ if year is not None:
year = int(year)
- except:
+ except Exception:
return None
if day >= 32:
return None
# by proximity
- if year is None and not default_year_by_proximity is None:
- dates = [date(default_year_by_proximity.year + x, month, day) for x in [-1, 0, 1]]
+ if year is None and default_year_by_proximity is not None:
+ dates = [
+ date(default_year_by_proximity.year + x, month, day) for x in [-1, 0, 1]
+ ]
dates = [(abs((d - default_year_by_proximity).days), d) for d in dates]
d = min(dates, key=lambda x: x[0])
return d[1]
@@ -162,7 +164,7 @@ class Extractor(ABC):
h = int(h)
m = int(m)
s = int(s)
- except:
+ except Exception:
return None
if h >= 24 or m >= 60 or s >= 60:
return None
@@ -177,10 +179,6 @@ class Extractor(ABC):
def set_downloader(self, downloader):
self.downloader = downloader
- @abstractmethod
- def clean_url(url):
- return url
-
def is_known_url(url):
return False
@@ -210,14 +208,14 @@ class Extractor(ABC):
published=False,
image=None,
image_alt=None,
- not_found=False
+ not_found=False,
):
- comments = ''
+ comments = ""
warnings = []
if title is None:
print("WARNING: cannot publish an event without name")
published = False
- title = _('Unknown title')
+ title = _("Unknown title")
warnings.append(Extractor.Warning.NO_TITLE)
if start_day is None:
print("WARNING: cannot publish an event without start day")
@@ -233,10 +231,18 @@ class Extractor(ABC):
event = {
"title": title,
- "category": category if category else self.default_value_if_exists(default_values, "category"),
+ "category": (
+ category
+ if category
+ else self.default_value_if_exists(default_values, "category")
+ ),
"start_day": start_day,
"uuids": uuids,
- "location": location if location else self.default_value_if_exists(default_values, "location"),
+ "location": (
+ location
+ if location
+ else self.default_value_if_exists(default_values, "location")
+ ),
"organisers": self.default_value_if_exists(default_values, "organisers"),
"description": description,
"tags": tags + tags_default,
@@ -250,7 +256,7 @@ class Extractor(ABC):
if event["comments"] is None:
event["comments"] = comments
else:
- event["comments"] += '\n' + comments
+ event["comments"] += "\n" + comments
# TODO: pourquoi url_human et non reference_url
if url_human is not None:
@@ -295,12 +301,23 @@ class Extractor(ABC):
def get_default_extractors(single_event=False):
from .generic_extractors.ical import ICALExtractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
- from .generic_extractors.ggcal_link import CExtractor as GoogleCalendarLinkEventExtractor
+ from .generic_extractors.ggcal_link import (
+ CExtractor as GoogleCalendarLinkEventExtractor,
+ )
if single_event:
- return [FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()]
+ return [
+ FacebookEventExtractor(),
+ GoogleCalendarLinkEventExtractor(),
+ EventNotFoundExtractor(),
+ ]
else:
- return [ICALExtractor(), FacebookEventExtractor(), GoogleCalendarLinkEventExtractor(), EventNotFoundExtractor()]
+ return [
+ ICALExtractor(),
+ FacebookEventExtractor(),
+ GoogleCalendarLinkEventExtractor(),
+ EventNotFoundExtractor(),
+ ]
# A class that only produce a not found event
@@ -312,14 +329,22 @@ class EventNotFoundExtractor(Extractor):
self.set_header(url)
self.clear_events()
- self.add_event(default_values, "événement sans titre depuis " + url,
- None, timezone.now().date(), None,
- "l'import a échoué, la saisie doit se faire manuellement à partir de l'url source " + url,
- [], [url], published=False, url_human=url, not_found=True)
+ self.add_event(
+ default_values,
+ "événement sans titre depuis " + url,
+ None,
+ timezone.now().date(),
+ None,
+ "l'import a échoué, la saisie doit se faire manuellement à partir de l'url source "
+ + url,
+ [],
+ [url],
+ published=False,
+ url_human=url,
+ not_found=True,
+ )
return self.get_structure()
-
def clean_url(url):
return url
-
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py b/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py
index ec5adf1..a761eac 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/apidae_tourisme.py
@@ -1,27 +1,33 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractorNoPause
+from ..extractor import Extractor
from bs4 import BeautifulSoup
-from datetime import datetime
+
# A class dedicated to get events from apidae-tourisme widgets
class CExtractor(TwoStepsExtractorNoPause):
-
def build_event_url_list(self, content, infuture_days=180):
-
+
# Get line starting with wrapper.querySelector(".results_agenda").innerHTML = "
# split using "=" and keep the end
# strip it, and remove the first character (") and the two last ones (";)
# remove the escapes and parse the contained html
for line in content.split("\n"):
- if line.startswith('wrapper.querySelector(".results_agenda").innerHTML = "'):
- html = ('"'.join(line.split('"')[3:])).replace('\\"', '"').replace('\\n', "\n").replace('\\/', '/')
+ if line.startswith(
+ 'wrapper.querySelector(".results_agenda").innerHTML = "'
+ ):
+ html = (
+ ('"'.join(line.split('"')[3:]))
+ .replace('\\"', '"')
+ .replace("\\n", "\n")
+ .replace("\\/", "/")
+ )
soup = BeautifulSoup(html, "html.parser")
- links = soup.select('a.widgit_result')
- for l in links:
- self.add_event_url(l["data-w-href"])
+ links = soup.select("a.widgit_result")
+ for lk in links:
+ self.add_event_url(lk["data-w-href"])
break
-
def add_event_from_content(
self,
event_content,
@@ -38,17 +44,22 @@ class CExtractor(TwoStepsExtractorNoPause):
# check for content
for line in event_content.split("\n"):
- if line.startswith('detailsWrapper.innerHTML ='):
- html = ('"'.join(line.split('"')[1:])).replace('\\"', '"').replace('\\n', "\n").replace('\\/', '/')
+ if line.startswith("detailsWrapper.innerHTML ="):
+ html = (
+ ('"'.join(line.split('"')[1:]))
+ .replace('\\"', '"')
+ .replace("\\n", "\n")
+ .replace("\\/", "/")
+ )
soup = BeautifulSoup(html, "html.parser")
- title = soup.select_one('h2.widgit_title').text.strip()
- image = soup.select_one('img')
+ title = soup.select_one("h2.widgit_title").text.strip()
+ image = soup.select_one("img")
image_alt = image["alt"]
image = image["src"]
- description = soup.select('div.desc')
- description = '\n'.join([d.text for d in description])
- openings = soup.select_one('.openings .mts').text.strip().split("\n")[0]
+ description = soup.select("div.desc")
+ description = "\n".join([d.text for d in description])
+ openings = soup.select_one(".openings .mts").text.strip().split("\n")[0]
start_time = None
end_time = None
if "tous les" in openings:
@@ -61,43 +72,43 @@ class CExtractor(TwoStepsExtractorNoPause):
start_time = Extractor.parse_french_time(hours[0])
if len(hours) > 1:
end_time = Extractor.parse_french_time(hours[1])
-
+
contact = soup.select_one(".contact")
sa = False
location = []
for c in contact.children:
- if c.name == 'h2' and c.text.strip() == "Adresse":
+ if c.name == "h2" and c.text.strip() == "Adresse":
sa = True
else:
- if c.name == 'h2' and sa:
+ if c.name == "h2" and sa:
break
- if c.name == 'p' and sa:
+ if c.name == "p" and sa:
e = c.text.strip()
if e != "":
location.append(e)
- location = ', '.join(location)
+ location = ", ".join(location)
- websites = soup.select("a.website")
+ soup.select("a.website")
event_url = url_human + "#" + ref
self.add_event_with_props(
- default_values,
- event_url,
- title,
- None,
- start_day,
- location,
- description,
- [],
- recurrences=None,
- uuids=[event_url],
- url_human=event_url,
- start_time=start_time,
- end_day=start_day,
- end_time=end_time,
- published=published,
- image=image,
- image_alt=image_alt
- )
+ default_values,
+ event_url,
+ title,
+ None,
+ start_day,
+ location,
+ description,
+ [],
+ recurrences=None,
+ uuids=[event_url],
+ url_human=event_url,
+ start_time=start_time,
+ end_day=start_day,
+ end_time=end_time,
+ published=published,
+ image=image,
+ image_alt=image_alt,
+ )
return
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py b/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py
index cddbc76..4016094 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/fbevent.py
@@ -3,15 +3,16 @@ from bs4 import BeautifulSoup
from urllib.parse import urlparse
import time as t
from django.utils.translation import gettext_lazy as _
+import re
-
-from ..extractor import *
+from ..extractor import Extractor
import json
import logging
logger = logging.getLogger(__name__)
+
class SimpleFacebookEvent:
def __init__(self, data):
self.elements = {}
@@ -20,9 +21,8 @@ class SimpleFacebookEvent:
self.elements[key] = data[key] if key in data else None
if "parent_event" in data:
- self.parent = SimpleFacebookEvent(
- data["parent_event"]
- )
+ self.parent = SimpleFacebookEvent(data["parent_event"])
+
class FacebookEvent:
name = "event"
@@ -48,17 +48,17 @@ class FacebookEvent:
# each pair in the associated list is a key of our model and a path within FB data to
# get the corresponding field
rules = {
- "event_description": [("description", ["text"])],
+ "event_description": [("description", ["text"])],
"cover_media_renderer": [
("image_alt", ["cover_photo", "photo", "accessibility_caption"]),
("image", ["cover_photo", "photo", "full_image", "uri"]),
("image", ["cover_media", 0, "full_image", "uri"]),
("image_alt", ["cover_media", 0, "accessibility_caption"]),
- ],
- "event_creator":
- [("event_creator_name", ["name"]),
- ("event_creator_url", ["url"]),
- ],
+ ],
+ "event_creator": [
+ ("event_creator_name", ["name"]),
+ ("event_creator_url", ["url"]),
+ ],
"event_place": [("event_place_name", ["name"])],
}
@@ -82,9 +82,7 @@ class FacebookEvent:
def get_element_date(self, key):
v = self.get_element(key)
- return (
- datetime.fromtimestamp(v).date() if v is not None and v != 0 else None
- )
+ return datetime.fromtimestamp(v).date() if v is not None and v != 0 else None
def get_element_time(self, key):
v = self.get_element(key)
@@ -111,7 +109,11 @@ class FacebookEvent:
error = False
c = event[k]
for ki in rule:
- if c is not None and ki in c or (isinstance(c, list) and ki < len(c)):
+ if (
+ c is not None
+ and ki in c
+ or (isinstance(c, list) and ki < len(c))
+ ):
c = c[ki]
else:
error = True
@@ -127,9 +129,7 @@ class FacebookEvent:
)
def get_neighbor_events(self, data):
- self.neighbor_events = [
- SimpleFacebookEvent(d) for d in data
- ]
+ self.neighbor_events = [SimpleFacebookEvent(d) for d in data]
def __str__(self):
return (
@@ -148,9 +148,7 @@ class FacebookEvent:
id = self.elements["id"]
for ne in self.neighbor_events:
if ne.elements["id"] == id:
- self.elements["end_timestamp"] = ne.elements[
- "end_timestamp"
- ]
+ self.elements["end_timestamp"] = ne.elements["end_timestamp"]
if (
"end_timestamp" not in self.elements
@@ -185,9 +183,7 @@ class FacebookEvent:
)
elif isinstance(array, list):
for e in array:
- event = FacebookEvent.find_event_fragment_in_array(
- e, event, False
- )
+ event = FacebookEvent.find_event_fragment_in_array(e, event, False)
if event is not None and first:
event.consolidate_current_event()
@@ -213,7 +209,6 @@ class FacebookEvent:
def get_parent_id(self):
return self.get_element("parent_if_exists_or_self")["id"]
-
def build_events(self, url):
if self.neighbor_events is None or len(self.neighbor_events) == 0:
@@ -231,13 +226,14 @@ class FacebookEvent:
result.append(clone.build_event(url_base + nb_e.elements["id"] + "/"))
return result
+
class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.has_2nd_method = True
- def prepare_2nd_extract_dler(downloader):
+ def prepare_2nd_extract_dler(downloader):
if downloader.support_2nd_extract:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
@@ -245,30 +241,54 @@ class CExtractor(Extractor):
path = './/div[not(@aria-hidden)]/div[@aria-label="Allow all cookies"]'
try:
- element = WebDriverWait(downloader.driver, 15).until(EC.visibility_of_element_located((By.XPATH, path)))
+ WebDriverWait(downloader.driver, 15).until(
+ EC.visibility_of_element_located((By.XPATH, path))
+ )
except Exception as e:
- raise Exception(_("Error while waiting for the cookie button to be visible: " + e.__class__.__name__ + ' ' + str(e)))
+ raise Exception(
+ _(
+ "Error while waiting for the cookie button to be visible: "
+ + e.__class__.__name__
+ + " "
+ + str(e)
+ )
+ )
try:
button = downloader.driver.find_element(By.XPATH, path)
except Exception as e:
- raise Exception(_("Error while getting the cookie button to be visible: " + e.__class__.__name__ + ' ' + str(e)))
+ raise Exception(
+ _(
+ "Error while getting the cookie button to be visible: "
+ + e.__class__.__name__
+ + " "
+ + str(e)
+ )
+ )
try:
button.click()
except Exception as e:
- raise Exception(_("Error while clicking on the cookie button to be visible: " + e.__class__.__name__ + ' ' + str(e)))
+ raise Exception(
+ _(
+ "Error while clicking on the cookie button to be visible: "
+ + e.__class__.__name__
+ + " "
+ + str(e)
+ )
+ )
t.sleep(5)
def prepare_2nd_extract(self):
CExtractor.prepare_2nd_extract_dler(self.downloader)
-
def clean_url(url):
if CExtractor.is_known_url(url, False):
u = urlparse(url)
result = "https://www.facebook.com" + u.path
# remove name in the url
- match = re.match(r"(.*/events)/s/([a-zA-Z-][a-zA-Z-0-9-]+)/([0-9/]*)", result)
+ match = re.match(
+ r"(.*/events)/s/([a-zA-Z-][a-zA-Z-0-9-]+)/([0-9/]*)", result
+ )
if match:
result = match[1] + "/" + match[3]
@@ -279,7 +299,6 @@ class CExtractor(Extractor):
else:
return url
-
def is_known_url(url, include_links=True):
u = urlparse(url)
url_list = ["facebook.com", "www.facebook.com", "m.facebook.com"]
@@ -298,14 +317,12 @@ class CExtractor(Extractor):
for json_script in soup.find_all("script", type="application/json"):
json_txt = json_script.get_text()
json_struct = json.loads(json_txt)
- fevent = FacebookEvent.find_event_fragment_in_array(
- json_struct, fevent
- )
+ fevent = FacebookEvent.find_event_fragment_in_array(json_struct, fevent)
if fevent is not None:
self.set_header(url)
for event in fevent.build_events(url):
- logger.warning("published: " + str(published))
+ logger.info("published: " + str(published))
event["published"] = published
if default_values and "category" in default_values:
@@ -314,8 +331,6 @@ class CExtractor(Extractor):
return self.get_structure()
else:
logger.warning("cannot find any event in page")
- raise Exception(
- _("Cannot get Facebook event from {}").format(url)
- )
+ raise Exception(_("Cannot get Facebook event from {}").format(url))
return None
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py b/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py
index f806060..ebeb46b 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/fbevents.py
@@ -1,6 +1,5 @@
-from ..twosteps_extractor import *
-from .fbevent import FacebookEvent
-import json5
+from ..twosteps_extractor import TwoStepsExtractor
+from .fbevent import FacebookEvent, CExtractor as FacebookEventExtractor
from bs4 import BeautifulSoup
import json
import os
@@ -22,10 +21,15 @@ class CExtractor(TwoStepsExtractor):
self.has_2nd_method_in_list = True
def find_event_id_fragment_in_array(self, array):
- found = False
if isinstance(array, dict):
- if "__typename" in array and array["__typename"] == "Event" and "id" in array:
- self.add_event_url("https://www.facebook.com/events/" + array["id"] + "/")
+ if (
+ "__typename" in array
+ and array["__typename"] == "Event"
+ and "id" in array
+ ):
+ self.add_event_url(
+ "https://www.facebook.com/events/" + array["id"] + "/"
+ )
self.found = True
else:
for k in array:
@@ -36,7 +40,6 @@ class CExtractor(TwoStepsExtractor):
for e in array:
self.find_event_id_fragment_in_array(e)
-
def find_in_js(self, soup):
for json_script in soup.find_all("script", type="application/json"):
@@ -44,11 +47,9 @@ class CExtractor(TwoStepsExtractor):
json_struct = json.loads(json_txt)
self.find_event_id_fragment_in_array(json_struct)
-
def prepare_2nd_extract_in_list(self):
FacebookEventExtractor.prepare_2nd_extract_dler(self.downloader)
-
def build_event_url_list(self, content):
soup = BeautifulSoup(content, "html.parser")
@@ -57,23 +58,27 @@ class CExtractor(TwoStepsExtractor):
self.found = False
links = soup.find_all("a")
for link in links:
- href = link.get('href')
- if not href is None and href.startswith('https://www.facebook.com/events/'):
- self.add_event_url(href.split('?')[0])
+ href = link.get("href")
+ if href is not None and href.startswith("https://www.facebook.com/events/"):
+ self.add_event_url(href.split("?")[0])
self.found = True
self.has_page_items = False
self.find_in_js(soup)
-
if not self.found:
- logger.warning("cannot find any event link in events page. Save content page")
+ logger.warning(
+ "cannot find any event link in events page. Save content page"
+ )
if debug:
CExtractor.dump_content_for_debug(content, self.url)
if not self.has_page_items:
- raise Exception(_("the page was not yet populated with events, so the loading time was probably too short"))
-
+ raise Exception(
+ _(
+ "the page was not yet populated with events, so the loading time was probably too short"
+ )
+ )
def dump_content_for_debug(content, url):
directory = "errors/"
@@ -85,9 +90,6 @@ class CExtractor(TwoStepsExtractor):
text_file.write("\n\n")
text_file.write(content)
-
-
-
def add_event_from_content(
self,
event_content,
@@ -103,22 +105,19 @@ class CExtractor(TwoStepsExtractor):
for json_script in soup.find_all("script", type="application/json"):
json_txt = json_script.get_text()
json_struct = json.loads(json_txt)
- fevent = FacebookEvent.find_event_fragment_in_array(
- json_struct, fevent
- )
+ fevent = FacebookEvent.find_event_fragment_in_array(json_struct, fevent)
if fevent is not None:
for event in fevent.build_events(event_url):
event["published"] = published
# only add the event if its unknown
- if len([e for e in self.events if event["uuids"][0] in e["uuids"]]) == 0:
+ if (
+ len([e for e in self.events if event["uuids"][0] in e["uuids"]])
+ == 0
+ ):
self.add_event(default_values, **event)
else:
if debug:
CExtractor.dump_content_for_debug(event_content, event_url)
- raise Exception(
- _("Cannot get Facebook event from {}").format(event_url)
- )
-
-
+ raise Exception(_("Cannot get Facebook event from {}").format(event_url))
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py b/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py
index 4eb21c9..aba9edb 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/ggcal_link.py
@@ -1,16 +1,15 @@
from datetime import datetime
from bs4 import BeautifulSoup
-from urllib.parse import urlparse
-
-from ..extractor import *
-from ..twosteps_extractor import *
-
-import json
+from urllib.parse import urlparse, parse_qs
+import dateutil.parser
+from ..extractor import Extractor
+import bbcode
import logging
logger = logging.getLogger(__name__)
+
class GGCalendar:
def __init__(self, url):
self.url = url
@@ -20,8 +19,8 @@ class GGCalendar:
result = {}
for k, v in params.items():
- if k.startswith('e[0]'):
- result[k.replace('e[0][', '')[:-1]] = v
+ if k.startswith("e[0]"):
+ result[k.replace("e[0][", "")[:-1]] = v
else:
result[k] = v
@@ -37,29 +36,37 @@ class GGCalendar:
params = GGCalendar.filter_keys(params)
self.location = params["location"][0] if "location" in params else ""
- self.title = params["text"][0] if "text" in params else params["title"][0] if "title" in params else ""
- self.description = params["description"][0] if "description" in params else params["details"][0] if "details" in params else ""
+ self.title = (
+ params["text"][0]
+ if "text" in params
+ else params["title"][0] if "title" in params else ""
+ )
+ self.description = (
+ params["description"][0]
+ if "description" in params
+ else params["details"][0] if "details" in params else ""
+ )
if self.description != "":
self.description = BeautifulSoup(self.description, "html.parser").text
if "dates" in params:
dates = [x.replace(" ", "+") for x in params["dates"][0].split("/")]
if len(dates) > 0:
- date = parser.parse(dates[0])
+ date = dateutil.parser.parse(dates[0])
self.start_day = date.date()
self.start_time = date.time()
if len(dates) == 2:
- date = parser.parse(dates[1])
+ date = dateutil.parser.parse(dates[1])
self.end_day = date.date()
self.end_time = date.time()
else:
self.end_day = None
self.end_time = None
elif "date_start" in params:
- date = parser.parse(params["date_start"][0])
+ date = dateutil.parser.parse(params["date_start"][0])
self.start_day = date.date()
self.start_time = date.time()
if "date_end" in params:
- dateend = parser.parse(params["date_end"][0])
+ dateend = dateutil.parser.parse(params["date_end"][0])
if dateend != date:
self.end_day = dateend.date()
self.end_time = dateend.time()
@@ -80,19 +87,21 @@ class GGCalendar:
self.end_time = None
-
class CExtractor(Extractor):
def __init__(self):
super().__init__()
- self.possible_urls = ["https://calendar.google.com/calendar/", "https://addtocalendar.com/", "https://www.google.com/calendar/event"]
-
+ self.possible_urls = [
+ "https://calendar.google.com/calendar/",
+ "https://addtocalendar.com/",
+ "https://www.google.com/calendar/event",
+ ]
def guess_image(self, soup, url):
image = soup.find("meta", property="og:image")
if image is None:
- for img in soup.select('img'):
- if img.find_parent(name='nav'):
+ for img in soup.select("img"):
+ if img.find_parent(name="nav"):
continue
image = img["src"]
break
@@ -105,7 +114,6 @@ class CExtractor(Extractor):
return image
-
def extract(
self, content, url, url_human=None, default_values=None, published=False
):
@@ -121,7 +129,7 @@ class CExtractor(Extractor):
if gg_cal.is_valid_event():
start_day = gg_cal.start_day
start_time = gg_cal.start_time
- description = gg_cal.description.replace(' ', '')
+ description = gg_cal.description.replace(" ", "")
end_day = gg_cal.end_day
end_time = gg_cal.end_time
location = gg_cal.location
@@ -154,5 +162,4 @@ class CExtractor(Extractor):
break
-
- return self.get_structure()
\ No newline at end of file
+ return self.get_structure()
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/ical.py b/src/agenda_culturel/import_tasks/generic_extractors/ical.py
index 9b85211..f02d0ec 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/ical.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/ical.py
@@ -8,7 +8,7 @@ from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning
import pytz
-from ..extractor import *
+from ..extractor import Extractor
from celery.utils.log import get_task_logger
@@ -21,7 +21,6 @@ class ICALExtractor(Extractor):
self.naive_timezone = False
self.to_timezone = pytz.timezone("Europe/Paris")
-
def get_item_from_vevent(self, event, name, raw=False):
try:
r = event.decoded(name)
@@ -29,21 +28,21 @@ class ICALExtractor(Extractor):
return r
else:
return r.decode()
- except:
+ except Exception:
return None
def guess_image_from_vevent(self, event):
- item = self.get_item_from_vevent(event, 'ATTACH', raw=True)
+ item = self.get_item_from_vevent(event, "ATTACH", raw=True)
if item is None:
return None
# it seems that FMTTYPE is not available through python-icalendar
if isinstance(item, list):
for i in item:
- if '.jpg' in str(i).lower():
+ if ".jpg" in str(i).lower():
return str(i)
else:
- if '.jpg' in str(item).lower():
+ if ".jpg" in str(item).lower():
return str(item)
return None
@@ -98,7 +97,7 @@ class ICALExtractor(Extractor):
end_day = end_day + timedelta(days=-1)
location = self.get_item_from_vevent(event, "LOCATION")
- if (not location is None) and location.replace(" ", "") == "":
+ if (location is not None) and location.replace(" ", "") == "":
location = None
description = self.get_item_from_vevent(event, "DESCRIPTION")
@@ -184,7 +183,7 @@ class ICALExtractor(Extractor):
end_time=end_time,
last_modified=last_modified,
published=published,
- image=image
+ image=image,
)
return self.get_structure()
@@ -293,7 +292,8 @@ class ICALNoVCExtractor(ICALExtractor):
image_alt,
)
+
class ICALNaiveTimezone(ICALExtractor):
def __init__(self):
super().__init__()
- self.naive_timezone = True
\ No newline at end of file
+ self.naive_timezone = True
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py b/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py
index ac184bd..87008ef 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/iguana_agenda.py
@@ -1,8 +1,9 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractorNoPause
+from ..extractor import Extractor
from bs4 import BeautifulSoup
-from datetime import datetime
from urllib.parse import urlparse
+
# A class dedicated to get events from Raymond Bar
# URL: https://www.raymondbar.net/
class CExtractor(TwoStepsExtractorNoPause):
@@ -24,7 +25,6 @@ class CExtractor(TwoStepsExtractorNoPause):
return "Sans catégorie"
-
def guess_tags_from_category(self, category):
tags = []
if "Lecture" in category:
@@ -35,26 +35,35 @@ class CExtractor(TwoStepsExtractorNoPause):
return tags
def build_event_url_list(self, content, infuture_days=180):
-
+
soup = BeautifulSoup(content, "html.parser")
- root_address_human = self.url_human.split('?')[0]
- root_address = self.url.split('Service')[0]
+ root_address_human = self.url_human.split("?")[0]
+ root_address = self.url.split("Service")[0]
items = soup.select("li.listItem")
if items:
for item in items:
elems = item["onclick"].split('"')
- v = elems[3].split('^')[1]
+ v = elems[3].split("^")[1]
contentItem = elems[1]
- multidate = item.select_one('.until.maindate').text != ''
+ multidate = item.select_one(".until.maindate").text != ""
if not multidate:
- url_human = root_address_human + '?p=*&v=' + v + "#contentitem=" + contentItem
- url = root_address + 'Service.PubItem.cls?action=get&instance=*&uuid=' + contentItem
+ url_human = (
+ root_address_human
+ + "?p=*&v="
+ + v
+ + "#contentitem="
+ + contentItem
+ )
+ url = (
+ root_address
+ + "Service.PubItem.cls?action=get&instance=*&uuid="
+ + contentItem
+ )
self.add_event_url(url)
self.add_event_url_human(url, url_human)
-
def add_event_from_content(
self,
event_content,
@@ -68,7 +77,6 @@ class CExtractor(TwoStepsExtractorNoPause):
soup = BeautifulSoup(event_content, "xml")
-
title = soup.select_one("Title").text
content = soup.select_one("Content").text
@@ -78,11 +86,15 @@ class CExtractor(TwoStepsExtractorNoPause):
description = soup.select_one(".rightcolumn .content").text
location = soup.select_one(".infos .location").text
public = soup.select_one(".infos .public").text
- start_day = Extractor.parse_french_date(soup.select_one(".infos .date .from").text)
- start_time = Extractor.parse_french_time(soup.select_one(".infos .date .time").text)
+ start_day = Extractor.parse_french_date(
+ soup.select_one(".infos .date .from").text
+ )
+ start_time = Extractor.parse_french_time(
+ soup.select_one(".infos .date .time").text
+ )
acces = soup.select_one(".infos .acces").text
category = soup.select_one(".rightcolumn .category").text
- infos = soup.select_one('.infos').text
+ infos = soup.select_one(".infos").text
description = description + "\n" + infos
@@ -94,21 +106,21 @@ class CExtractor(TwoStepsExtractorNoPause):
tags.append("💶 gratuit")
self.add_event_with_props(
- default_values,
- event_url,
- title,
- category,
- start_day,
- location,
- description,
- tags,
- recurrences=None,
- uuids=[event_url],
- url_human=event_url,
- start_time=start_time,
- end_day=None,
- end_time=None,
- published=published,
- image=image,
- image_alt=""
- )
\ No newline at end of file
+ default_values,
+ event_url,
+ title,
+ category,
+ start_day,
+ location,
+ description,
+ tags,
+ recurrences=None,
+ uuids=[event_url],
+ url_human=event_url,
+ start_time=start_time,
+ end_day=None,
+ end_time=None,
+ published=published,
+ image=image,
+ image_alt="",
+ )
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py b/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py
index 7202256..087b1c2 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/mobilizon.py
@@ -1,5 +1,4 @@
-from ..extractor import *
-import json
+from ..extractor import Extractor
import dateutil.parser
from datetime import datetime, timezone
import requests
@@ -10,6 +9,7 @@ import logging
logger = logging.getLogger(__name__)
+
# A class dedicated to get events from les amis du temps des cerises
# Website https://amisdutempsdescerises.org/
class CExtractor(Extractor):
@@ -21,22 +21,30 @@ class CExtractor(Extractor):
# Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python
def _request(self, body, data):
- headers = {}
+ headers = {}
- response = requests.post(url=self._api_end_point, json={ "query": body, "variables": data }, headers=headers)
+ response = requests.post(
+ url=self._api_end_point,
+ json={"query": body, "variables": data},
+ headers=headers,
+ )
- if response.status_code == 200:
- response_json = response.json()
- if 'errors' in response_json:
- raise Exception(f'Errors while requesting { body }. { str(response_json["errors"]) }')
+ if response.status_code == 200:
+ response_json = response.json()
+ if "errors" in response_json:
+ raise Exception(
+ f'Errors while requesting { body }. { str(response_json["errors"]) }'
+ )
- return response_json['data']
- else:
- raise Exception(f'Error while requesting. Status code: { response.status_code }')
+ return response_json["data"]
+ else:
+ raise Exception(
+ f"Error while requesting. Status code: { response.status_code }"
+ )
def _oncoming_events_number(self):
- query = '''
+ query = """
query($preferredUsername: String!, $afterDatetime: DateTime) {
group(preferredUsername: $preferredUsername) {
organizedEvents(afterDatetime: $afterDatetime) {
@@ -44,19 +52,15 @@ query($preferredUsername: String!, $afterDatetime: DateTime) {
}
}
}
- '''
+ """
today = datetime.now(timezone.utc).isoformat()
- data = {
- 'preferredUsername': self._group_id,
- 'afterDatetime': today
- }
+ data = {"preferredUsername": self._group_id, "afterDatetime": today}
r = self._request(query, data)
- return r['group']['organizedEvents']['total']
-
+ return r["group"]["organizedEvents"]["total"]
def _oncoming_events(self):
def _oncoming_events_page(page):
- query = '''
+ query = """
query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
group(preferredUsername: $preferredUsername) {
organizedEvents(afterDatetime: $afterDatetime, page: $page) {
@@ -98,16 +102,16 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
}
}
}
- '''
+ """
today = datetime.now(timezone.utc).isoformat()
data = {
- 'preferredUsername': self._group_id,
- 'afterDatetime': today,
- 'page': page
+ "preferredUsername": self._group_id,
+ "afterDatetime": today,
+ "page": page,
}
r = self._request(query, data)
- return r['group']['organizedEvents']['elements']
+ return r["group"]["organizedEvents"]["elements"]
number_events = self._oncoming_events_number()
@@ -132,9 +136,9 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
# https://mobilizon.extinctionrebellion.fr/@xr_clermont_ferrand/events
# split url to identify server url and actor id
- elems = [x for x in url.split('/') if len(x) > 0 and x[0] == "@"]
+ elems = [x for x in url.split("/") if len(x) > 0 and x[0] == "@"]
if len(elems) == 1:
- params = elems[0].split('@')
+ params = elems[0].split("@")
if len(params) == 2:
self._api_end_point = "https://" + urlparse(url).netloc + "/api"
self._group_id = params[1]
@@ -144,24 +148,34 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
events = self._oncoming_events()
-
for e in events:
title = e["title"]
event_url = e["url"]
image = e["picture"]["url"]
- location = e["physicalAddress"]["description"] + ', ' + e["physicalAddress"]["locality"]
+ location = (
+ e["physicalAddress"]["description"]
+ + ", "
+ + e["physicalAddress"]["locality"]
+ )
soup = BeautifulSoup(e["description"], "html.parser")
-
+
description = soup.text
- start = dateutil.parser.isoparse(e["beginsOn"]).replace(tzinfo=timezone.utc).astimezone(tz=None)
- end = dateutil.parser.isoparse(e["endsOn"]).replace(tzinfo=timezone.utc).astimezone(tz=None)
+ start = (
+ dateutil.parser.isoparse(e["beginsOn"])
+ .replace(tzinfo=timezone.utc)
+ .astimezone(tz=None)
+ )
+ end = (
+ dateutil.parser.isoparse(e["endsOn"])
+ .replace(tzinfo=timezone.utc)
+ .astimezone(tz=None)
+ )
start_day = start.date()
start_time = start.time() if e["options"]["showStartTime"] else None
end_day = end.date()
end_time = end.time() if e["options"]["showEndTime"] else None
-
self.add_event(
default_values,
title,
@@ -177,6 +191,7 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
published=published,
image=image,
end_day=end_day,
- end_time=end_time)
-
+ end_time=end_time,
+ )
+
return self.get_structure()
diff --git a/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py b/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py
index 7ecd8a9..6db93e5 100644
--- a/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py
+++ b/src/agenda_culturel/import_tasks/generic_extractors/wordpress_mec.py
@@ -1,11 +1,12 @@
-from ..twosteps_extractor import *
+from ..twosteps_extractor import TwoStepsExtractor
+from ..extractor import Extractor
from bs4 import BeautifulSoup
# A class dedicated to get events from MEC Wordpress plugin
# URL: https://webnus.net/modern-events-calendar/
class CExtractor(TwoStepsExtractor):
-
+
def local2agendaCategory(self, category):
mapping = {
"Musique": "Fêtes & Concerts",
@@ -25,7 +26,7 @@ class CExtractor(TwoStepsExtractor):
"Atelier": "atelier",
"Projection": None,
}
-
+
if category in mapping:
return mapping[category], mapping_tag[category]
else:
@@ -40,7 +41,7 @@ class CExtractor(TwoStepsExtractor):
if len(link) == 1:
url = link[0]["href"]
title = link[0].get_text()
-
+
if self.add_event_url(url):
print(url, title)
self.add_event_title(url, title)
@@ -55,7 +56,6 @@ class CExtractor(TwoStepsExtractor):
if tag:
self.add_event_category(url, tag)
-
def add_event_from_content(
self,
event_content,
@@ -65,7 +65,7 @@ class CExtractor(TwoStepsExtractor):
published=False,
):
soup = BeautifulSoup(event_content, "xml")
-
+
start_day = soup.select(".mec-start-date-label")
if start_day and len(start_day) > 0:
start_day = Extractor.parse_french_date(start_day[0].get_text())
@@ -82,13 +82,15 @@ class CExtractor(TwoStepsExtractor):
else:
start_time = None
end_time = None
-
+
image = soup.select(".mec-events-event-image img")
if image:
image = image[0]["src"]
else:
image = None
- description = soup.select(".mec-event-content .mec-single-event-description")[0].get_text(separator=" ")
+ description = soup.select(".mec-event-content .mec-single-event-description")[
+ 0
+ ].get_text(separator=" ")
url_human = event_url
diff --git a/src/agenda_culturel/import_tasks/importer.py b/src/agenda_culturel/import_tasks/importer.py
index 150f0c9..d2a3e40 100644
--- a/src/agenda_culturel/import_tasks/importer.py
+++ b/src/agenda_culturel/import_tasks/importer.py
@@ -1,5 +1,5 @@
-from .downloader import *
-from .extractor import *
+from .downloader import SimpleDownloader
+from .extractor import Extractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
import logging
@@ -7,7 +7,6 @@ import logging
logger = logging.getLogger(__name__)
-
class URL2Events:
def __init__(
self, downloader=SimpleDownloader(), extractor=None, single_event=False
@@ -17,8 +16,13 @@ class URL2Events:
self.single_event = single_event
def process(
- self, url, url_human=None, cache=None, default_values=None, published=False,
- first=True
+ self,
+ url,
+ url_human=None,
+ cache=None,
+ default_values=None,
+ published=False,
+ first=True,
):
referer = ""
data = None
@@ -29,10 +33,12 @@ class URL2Events:
data = self.extractor.data
content_type = self.extractor.content_type
if self.extractor.no_downloader:
- content = ''
+ content = ""
if content is None:
- content = self.downloader.get_content(url, cache, referer=referer, content_type=content_type, data=data)
+ content = self.downloader.get_content(
+ url, cache, referer=referer, content_type=content_type, data=data
+ )
if content is None:
return None
@@ -45,16 +51,25 @@ class URL2Events:
else:
# if the extractor is not defined, use a list of default extractors
for e in Extractor.get_default_extractors(self.single_event):
- logger.warning('Extractor::' + type(e).__name__)
+ logger.info("Extractor::" + type(e).__name__)
e.set_downloader(self.downloader)
try:
- events = e.extract(content, url, url_human, default_values, published)
+ events = e.extract(
+ content, url, url_human, default_values, published
+ )
if events is not None:
if len(events) > 0:
return events
- except Exception as ex:
- if first and FacebookEventExtractor.is_known_url(url) and self.downloader.support_2nd_extract and e.has_2nd_method:
- logger.info('Using cookie trick on a facebook event')
+ except Exception:
+ if (
+ first
+ and FacebookEventExtractor.is_known_url(url)
+ and self.downloader.support_2nd_extract
+ and e.has_2nd_method
+ ):
+ logger.info("Using cookie trick on a facebook event")
e.prepare_2nd_extract()
- return self.process(url, url_human, cache, default_values, published, False)
+ return self.process(
+ url, url_human, cache, default_values, published, False
+ )
return None
diff --git a/src/agenda_culturel/import_tasks/twosteps_extractor.py b/src/agenda_culturel/import_tasks/twosteps_extractor.py
index 5300a4c..e6c05d0 100644
--- a/src/agenda_culturel/import_tasks/twosteps_extractor.py
+++ b/src/agenda_culturel/import_tasks/twosteps_extractor.py
@@ -1,19 +1,12 @@
from abc import abstractmethod
-from urllib.parse import urlparse
-from urllib.parse import parse_qs
-from bs4 import BeautifulSoup
-
import logging
+from .extractor import Extractor
+import datetime
logger = logging.getLogger(__name__)
-from .extractor import *
-from django.utils.translation import gettext_lazy as _
-from dateutil import parser
-import datetime
-
# A class to extract events from URL with two steps:
# - first build a list of urls where the events will be found
# - then for each document downloaded from these urls, build the events
@@ -43,7 +36,7 @@ class TwoStepsExtractor(Extractor):
def add_event_url_human(self, url, url_human):
self.add_event_property(url, "url_human", url_human)
-
+
def add_event_start_day(self, url, start_day):
self.add_event_property(url, "start_day", start_day)
@@ -150,8 +143,7 @@ class TwoStepsExtractor(Extractor):
published=False,
only_future=True,
ignore_404=True,
- first=True
-
+ first=True,
):
first = True
@@ -192,22 +184,34 @@ class TwoStepsExtractor(Extractor):
)
except Exception as e:
# some website (FB) sometime need a second step
- if first and self.has_2nd_method_in_list and self.downloader.support_2nd_extract:
- logger.info('Using cookie trick on a facebook event')
+ if (
+ first
+ and self.has_2nd_method_in_list
+ and self.downloader.support_2nd_extract
+ ):
+ logger.info("Using cookie trick on a facebook event")
first = False
# TMP: on trace ce qui se passe
- from agenda_culturel.import_tasks.generic_extractors import fbevents
- fbevents.CExtractor.dump_content_for_debug(content_event, event_url)
+ from agenda_culturel.import_tasks.generic_extractors import (
+ fbevents,
+ )
+
+ fbevents.CExtractor.dump_content_for_debug(
+ content_event, event_url
+ )
self.prepare_2nd_extract_in_list()
content_event = self.downloader.get_content(event_url)
- if not content_event is None:
+ if content_event is not None:
self.add_event_from_content(
- content_event, event_url, url_human, default_values, published
+ content_event,
+ event_url,
+ url_human,
+ default_values,
+ published,
)
else:
raise e
-
return self.get_structure()
@@ -221,15 +225,16 @@ class TwoStepsExtractorNoPause(TwoStepsExtractor):
default_values=None,
published=False,
only_future=True,
- ignore_404=True
+ ignore_404=True,
):
if hasattr(self.downloader, "pause"):
pause = self.downloader.pause
else:
pause = False
self.downloader.pause = False
- result = super().extract(content, url, url_human, default_values, published, only_future, ignore_404)
+ result = super().extract(
+ content, url, url_human, default_values, published, only_future, ignore_404
+ )
self.downloader.pause = pause
return result
-
diff --git a/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py b/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py
index d71f97e..ba20cbf 100644
--- a/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py
+++ b/src/agenda_culturel/migrations/0001_squashed_0150_alter_event_local_image.py
@@ -57,7 +57,7 @@ def set_fixed_masked_from_representative(apps, cats):
# for each event
to_update = []
for d in duplicated:
- d.fixed = not d.representative is None
+ d.fixed = d.representative is not None
to_update.append(d)
DuplicatedEvents.objects.bulk_update(to_update, fields=["fixed"])
@@ -68,7 +68,7 @@ def strip_place_aliases(apps, schema_editor):
places = Place.objects.all()
for p in places:
- if not p.aliases is None:
+ if p.aliases is not None:
p.aliases = [a.strip() for a in p.aliases]
Place.objects.bulk_update(places, fields=["aliases"])
diff --git a/src/agenda_culturel/models.py b/src/agenda_culturel/models.py
index c975ff3..554b523 100644
--- a/src/agenda_culturel/models.py
+++ b/src/agenda_culturel/models.py
@@ -1,8 +1,6 @@
from django.db import models, connection
-from django.core.exceptions import FieldDoesNotExist
from django_better_admin_arrayfield.models.fields import ArrayField
from django.utils.translation import gettext_lazy as _
-from django.utils.safestring import mark_safe
from django.template.defaultfilters import slugify
from django.utils.dateparse import parse_date
from django.urls import reverse
@@ -11,7 +9,7 @@ from django_ckeditor_5.fields import CKEditor5Field
from urllib.parse import urlparse
from django.core.cache import cache
from django.core.cache.utils import make_template_fragment_key
-from django.contrib.auth.models import User, AnonymousUser
+from django.contrib.auth.models import User
import emoji
from django.core.files.storage import default_storage
from django.contrib.sites.models import Site
@@ -30,24 +28,24 @@ from django.utils import timezone
from django.contrib.postgres.search import TrigramSimilarity
from django.db.models import Q, Count, F, Subquery, OuterRef, Func
from django.db.models.functions import Lower
-from django.contrib.postgres.lookups import Unaccent
import recurrence.fields
import recurrence
import copy
import unicodedata
from collections import defaultdict
-from .import_tasks.generic_extractors.fbevent import CExtractor as FacebookEventExtractor
+from .import_tasks.generic_extractors.fbevent import (
+ CExtractor as FacebookEventExtractor,
+)
from .import_tasks.extractor import Extractor
from django.template.defaultfilters import date as _date
from datetime import time, timedelta, date
from django.utils.timezone import datetime
-from django.utils import timezone
from location_field.models.spatial import LocationField
from django.contrib.gis.geos import Point
-from .calendar import CalendarList, CalendarDay
+from .calendar import CalendarDay
from icalendar import Calendar as icalCal
from icalendar import Event as icalEvent
@@ -71,8 +69,9 @@ class StaticContent(models.Model):
unique=True,
)
text = CKEditor5Field(
- verbose_name=_("Content"), help_text=_("Text as shown to the visitors"),
- blank=True
+ verbose_name=_("Content"),
+ help_text=_("Text as shown to the visitors"),
+ blank=True,
)
url_path = models.CharField(
verbose_name=_("URL path"),
@@ -83,7 +82,7 @@ class StaticContent(models.Model):
verbose_name = _("Static content")
verbose_name_plural = _("Static contents")
indexes = [
- models.Index(fields=['name']),
+ models.Index(fields=["name"]),
]
def __str__(self):
@@ -114,7 +113,7 @@ class Category(models.Model):
verbose_name=_("Name"), help_text=_("Category name"), max_length=512
)
- slug = AutoSlugField(null=True, default=None, unique=True, populate_from='name')
+ slug = AutoSlugField(null=True, default=None, unique=True, populate_from="name")
color = ColorField(
verbose_name=_("Color"),
@@ -135,7 +134,6 @@ class Category(models.Model):
verbose_name=_("Position for ordering categories"), default=0
)
-
def save(self, *args, **kwargs):
if self.color is None:
existing_colors = [c.color for c in Category.objects.all()]
@@ -157,7 +155,7 @@ class Category(models.Model):
default = Category.objects.get(name=Category.default_name)
return default
- except:
+ except Exception:
# if it does not exist, return it
default, created = Category.objects.get_or_create(
name=Category.default_name,
@@ -176,7 +174,7 @@ class Category(models.Model):
return "cat-" + str(self.id)
def get_absolute_url(self):
- return reverse('home_category', kwargs={"cat": self.slug})
+ return reverse("home_category", kwargs={"cat": self.slug})
def __str__(self):
return self.name
@@ -185,15 +183,14 @@ class Category(models.Model):
verbose_name = _("Category")
verbose_name_plural = _("Categories")
indexes = [
- models.Index(fields=['name']),
+ models.Index(fields=["name"]),
]
class Tag(models.Model):
name = models.CharField(
- verbose_name=_("Name"), help_text=_("Tag name"), max_length=512,
- unique=True
+ verbose_name=_("Name"), help_text=_("Tag name"), max_length=512, unique=True
)
description = CKEditor5Field(
@@ -205,17 +202,19 @@ class Tag(models.Model):
principal = models.BooleanField(
verbose_name=_("Principal"),
- help_text=_("This tag is highlighted as a main tag for visitors, particularly in the filter."),
+ help_text=_(
+ "This tag is highlighted as a main tag for visitors, particularly in the filter."
+ ),
default=False,
)
- in_excluded_suggestions = models.BooleanField(
+ in_excluded_suggestions = models.BooleanField(
verbose_name=_("In excluded suggestions"),
help_text=_("This tag will be part of the excluded suggestions."),
default=False,
)
- in_included_suggestions = models.BooleanField(
+ in_included_suggestions = models.BooleanField(
verbose_name=_("In included suggestions"),
help_text=_("This tag will be part of the included suggestions."),
default=False,
@@ -225,24 +224,31 @@ class Tag(models.Model):
verbose_name = _("Tag")
verbose_name_plural = _("Tags")
indexes = [
- models.Index(fields=['name']),
+ models.Index(fields=["name"]),
]
-
def get_absolute_url(self):
return reverse("view_tag", kwargs={"t": self.name})
-
def clear_cache():
for exclude in [False, True]:
for include in [False, True]:
for nb_suggestions in [10]:
- id_cache = 'all_tags ' + str(exclude) + ' ' + str(include) + ' ' + str(nb_suggestions)
+ id_cache = (
+ "all_tags "
+ + str(exclude)
+ + " "
+ + str(include)
+ + " "
+ + str(nb_suggestions)
+ )
id_cache = hashlib.md5(id_cache.encode("utf8")).hexdigest()
cache.delete(id_cache)
def get_tag_groups(nb_suggestions=10, exclude=False, include=False, all=False):
- id_cache = 'all_tags ' + str(exclude) + ' ' + str(include) + ' ' + str(nb_suggestions)
+ id_cache = (
+ "all_tags " + str(exclude) + " " + str(include) + " " + str(nb_suggestions)
+ )
id_cache = hashlib.md5(id_cache.encode("utf8")).hexdigest()
result = cache.get(id_cache)
@@ -254,13 +260,19 @@ class Tag(models.Model):
obj_tags = Tag.objects
if all:
- obj_tags = obj_tags.filter(Q(in_excluded_suggestions=True)|Q(in_included_suggestions=True)|Q(principal=True))
+ obj_tags = obj_tags.filter(
+ Q(in_excluded_suggestions=True)
+ | Q(in_included_suggestions=True)
+ | Q(principal=True)
+ )
else:
if exclude:
obj_tags = obj_tags.filter(Q(in_excluded_suggestions=True))
if include:
- obj_tags = obj_tags.filter(Q(in_included_suggestions=True)|Q(principal=True))
-
+ obj_tags = obj_tags.filter(
+ Q(in_included_suggestions=True) | Q(principal=True)
+ )
+
if not exclude and not include:
obj_tags = obj_tags.filter(principal=True)
@@ -268,36 +280,56 @@ class Tag(models.Model):
if len(obj_tags) > nb_suggestions:
nb_suggestions = len(obj_tags)
-
- tags = [{"tag": t["tag"], "count": 1000000 if t["tag"] in obj_tags else t["count"]} for t in free_tags]
- tags += [{"tag": o, "count": 0} for o in Tag.objects.filter(~Q(name__in=f_tags)).values_list("name", flat=True)]
+ tags = [
+ {
+ "tag": t["tag"],
+ "count": 1000000 if t["tag"] in obj_tags else t["count"],
+ }
+ for t in free_tags
+ ]
+ tags += [
+ {"tag": o, "count": 0}
+ for o in Tag.objects.filter(~Q(name__in=f_tags)).values_list(
+ "name", flat=True
+ )
+ ]
tags.sort(key=lambda x: -x["count"])
tags1 = tags[0:nb_suggestions]
- tags1.sort(key=lambda x: emoji.demojize(remove_accents(x["tag"]).lower(), delimiters=('000', '')))
+ tags1.sort(
+ key=lambda x: emoji.demojize(
+ remove_accents(x["tag"]).lower(), delimiters=("000", "")
+ )
+ )
tags2 = tags[nb_suggestions:]
- tags2.sort(key=lambda x: emoji.demojize(remove_accents(x["tag"]).lower(), delimiters=('000', '')))
+ tags2.sort(
+ key=lambda x: emoji.demojize(
+ remove_accents(x["tag"]).lower(), delimiters=("000", "")
+ )
+ )
- result = ((_('Suggestions'), [(t["tag"], t["tag"]) for t in tags1]),
- (_('Others'), [(t["tag"], t["tag"]) for t in tags2]))
-
- cache.set(id_cache, result, 3000) # 50mn
+ result = (
+ (_("Suggestions"), [(t["tag"], t["tag"]) for t in tags1]),
+ (_("Others"), [(t["tag"], t["tag"]) for t in tags2]),
+ )
+
+ cache.set(id_cache, result, 3000) # 50mn
return result
def __str__(self):
return self.name
-
class DuplicatedEvents(models.Model):
-
representative = models.ForeignKey(
"Event",
verbose_name=_("Representative event"),
- help_text=_("This event is the representative event of the duplicated events group"),
+ help_text=_(
+ "This event is the representative event of the duplicated events group"
+ ),
null=True,
default=None,
on_delete=models.SET_DEFAULT,
@@ -307,10 +339,9 @@ class DuplicatedEvents(models.Model):
verbose_name = _("Duplicated events")
verbose_name_plural = _("Duplicated events")
indexes = [
- models.Index(fields=['representative']),
+ models.Index(fields=["representative"]),
]
-
def __init__(self, *args, **kwargs):
self.events = None
super().__init__(*args, **kwargs)
@@ -330,7 +361,7 @@ class DuplicatedEvents(models.Model):
return self.representative
def fixed(self):
- return not self.representative is None
+ return self.representative is not None
def is_published(self):
return len([e for e in self.get_duplicated() if e.is_published()]) > 0
@@ -345,12 +376,12 @@ class DuplicatedEvents(models.Model):
if self.representative and self.representative.local_version():
return self.representative
- l = [e for e in self.get_duplicated() if e.local_version()]
- if len(l) == 0:
+ lv = [e for e in self.get_duplicated() if e.local_version()]
+ if len(lv) == 0:
return None
else:
- l.sort(key=lambda x: x.modified_date, reverse=True)
- return l[0]
+ lv.sort(key=lambda x: x.modified_date, reverse=True)
+ return lv[0]
def merge_into(self, other):
# for all objects associated to this group
@@ -366,7 +397,7 @@ class DuplicatedEvents(models.Model):
self.delete()
# this method fixes the duplicated events by using the given event
- # as the representative one.
+ # as the representative one.
# if no event is given, the last one (by creation date) is selected.
def fix(self, event=None):
events = self.get_duplicated()
@@ -375,13 +406,12 @@ class DuplicatedEvents(models.Model):
for e in events:
if event is None:
event = e
- if not event is None:
+ if event is not None:
event.status = Event.STATUS.PUBLISHED
self.representative = event
Event.objects.bulk_update(events, fields=["status"])
self.save()
return len(events)
-
def merge_groups(groups):
if len(groups) == 0:
@@ -404,20 +434,21 @@ class DuplicatedEvents(models.Model):
nb, d = singletons.delete()
return nb
-
def not_fixed_qs(qs=None, fixed=False):
if not qs:
qs = DuplicatedEvents.objects
-
- qs = qs.annotate(nb_no_trash=Count("event", filter=~Q(event__status=Event.STATUS.TRASH)))
- q = ~Q(representative__isnull=True)|Q(nb_no_trash__lte=1)
+
+ qs = qs.annotate(
+ nb_no_trash=Count("event", filter=~Q(event__status=Event.STATUS.TRASH))
+ )
+ q = ~Q(representative__isnull=True) | Q(nb_no_trash__lte=1)
if fixed:
return qs.filter(q)
else:
return qs.exclude(q)
def save(self, *args, **kwargs):
- if self.representative and not self.representative in self.event_set.all():
+ if self.representative and self.representative not in self.event_set.all():
self.representative = None
super().save(*args, **kwargs)
@@ -425,7 +456,12 @@ class DuplicatedEvents(models.Model):
def get_import_messages(self):
msgs = []
for e in self.get_duplicated():
- for m in e.message_set.filter(message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS]).order_by("date"):
+ for m in e.message_set.filter(
+ message_type__in=[
+ Message.TYPE.IMPORT_PROCESS,
+ Message.TYPE.UPDATE_PROCESS,
+ ]
+ ).order_by("date"):
msgs.append(m)
return msgs
@@ -438,35 +474,44 @@ class DuplicatedEvents(models.Model):
class ReferenceLocation(models.Model):
- name = models.CharField(verbose_name=_("Name"), help_text=_("Name of the location"), unique=True, null=False)
- location = LocationField(based_fields=["name"], zoom=12, default=Point(3.08333, 45.783329), srid=4326)
+ name = models.CharField(
+ verbose_name=_("Name"),
+ help_text=_("Name of the location"),
+ unique=True,
+ null=False,
+ )
+ location = LocationField(
+ based_fields=["name"], zoom=12, default=Point(3.08333, 45.783329), srid=4326
+ )
main = models.IntegerField(
verbose_name=_("Main"),
- help_text=_("This location is one of the main locations (shown first higher values)."),
+ help_text=_(
+ "This location is one of the main locations (shown first higher values)."
+ ),
default=0,
)
suggested_distance = models.IntegerField(
verbose_name=_("Suggested distance (km)"),
- help_text=_("If this distance is given, this location is part of the suggested filters."),
+ help_text=_(
+ "If this distance is given, this location is part of the suggested filters."
+ ),
null=True,
- default=None
+ default=None,
)
- slug = AutoSlugField(null=True, default=None, unique=True, populate_from='name')
+ slug = AutoSlugField(null=True, default=None, unique=True, populate_from="name")
class Meta:
verbose_name = _("Reference location")
verbose_name_plural = _("Reference locations")
indexes = [
- models.Index(fields=['name']),
+ models.Index(fields=["name"]),
]
def __str__(self):
return self.name
-
-
class Place(models.Model):
name = models.CharField(verbose_name=_("Name"), help_text=_("Name of the place"))
address = models.CharField(
@@ -475,9 +520,20 @@ class Place(models.Model):
blank=True,
null=True,
)
- postcode = models.CharField(verbose_name=_("Postcode"), help_text=_("The post code is not displayed, but makes it easier to find an address when you enter it."), blank=True, null=True)
+ postcode = models.CharField(
+ verbose_name=_("Postcode"),
+ help_text=_(
+ "The post code is not displayed, but makes it easier to find an address when you enter it."
+ ),
+ blank=True,
+ null=True,
+ )
city = models.CharField(verbose_name=_("City"), help_text=_("City name"))
- location = LocationField(based_fields=["name", "address", "postcode", "city"], zoom=12, default=Point(3.08333, 45.783329))
+ location = LocationField(
+ based_fields=["name", "address", "postcode", "city"],
+ zoom=12,
+ default=Point(3.08333, 45.783329),
+ )
description = CKEditor5Field(
verbose_name=_("Description"),
@@ -501,9 +557,9 @@ class Place(models.Model):
verbose_name_plural = _("Places")
ordering = ["name"]
indexes = [
- models.Index(fields=['name']),
- models.Index(fields=['city']),
- models.Index(fields=['location']),
+ models.Index(fields=["name"]),
+ models.Index(fields=["city"]),
+ models.Index(fields=["location"]),
]
def __str__(self):
@@ -513,13 +569,19 @@ class Place(models.Model):
return self.name + ", " + self.city
def get_absolute_url(self):
- return reverse("view_place_fullname", kwargs={"pk": self.pk, "extra": slugify(self.name)})
+ return reverse(
+ "view_place_fullname", kwargs={"pk": self.pk, "extra": slugify(self.name)}
+ )
def nb_events(self):
return Event.objects.filter(exact_location=self).count()
def nb_events_future(self):
- return Event.objects.filter(start_day__gte=datetime.now()).filter(exact_location=self).count()
+ return (
+ Event.objects.filter(start_day__gte=datetime.now())
+ .filter(exact_location=self)
+ .count()
+ )
def match(self, event):
if self.aliases and event.location:
@@ -549,14 +611,19 @@ class Place(models.Model):
for p in Place.objects.values("city").distinct().order_by("city")
]
)
- except:
+ except Exception:
tags = []
return tags
+
class Organisation(models.Model):
name = models.CharField(
- verbose_name=_("Name"), help_text=_("Organisation name"), max_length=512, null=False, unique=True
- )
+ verbose_name=_("Name"),
+ help_text=_("Organisation name"),
+ max_length=512,
+ null=False,
+ unique=True,
+ )
website = models.URLField(
verbose_name=_("Website"),
@@ -576,7 +643,9 @@ class Organisation(models.Model):
principal_place = models.ForeignKey(
Place,
verbose_name=_("Principal place"),
- help_text=_("Place mainly associated with this organizer. Mainly used if there is a similarity in the name, to avoid redundant displays."),
+ help_text=_(
+ "Place mainly associated with this organizer. Mainly used if there is a similarity in the name, to avoid redundant displays."
+ ),
null=True,
on_delete=models.SET_NULL,
blank=True,
@@ -590,8 +659,7 @@ class Organisation(models.Model):
return self.name
def get_absolute_url(self):
- return reverse("view_organisation", kwargs={'pk': self.pk, "extra": self.name})
-
+ return reverse("view_organisation", kwargs={"pk": self.pk, "extra": self.name})
class Event(models.Model):
@@ -612,7 +680,7 @@ class Event(models.Model):
blank=True,
default=None,
on_delete=models.SET_DEFAULT,
- related_name="created_events"
+ related_name="created_events",
)
imported_by_user = models.ForeignKey(
User,
@@ -621,7 +689,7 @@ class Event(models.Model):
blank=True,
default=None,
on_delete=models.SET_DEFAULT,
- related_name="imported_events"
+ related_name="imported_events",
)
modified_by_user = models.ForeignKey(
User,
@@ -630,7 +698,7 @@ class Event(models.Model):
blank=True,
default=None,
on_delete=models.SET_DEFAULT,
- related_name="modified_events"
+ related_name="modified_events",
)
moderated_by_user = models.ForeignKey(
User,
@@ -639,15 +707,13 @@ class Event(models.Model):
blank=True,
default=None,
on_delete=models.SET_DEFAULT,
- related_name="moderated_events"
+ related_name="moderated_events",
)
recurrence_dtstart = models.DateTimeField(editable=False, blank=True, null=True)
recurrence_dtend = models.DateTimeField(editable=False, blank=True, null=True)
- title = models.CharField(
- verbose_name=_("Title"), max_length=512
- )
+ title = models.CharField(verbose_name=_("Title"), max_length=512)
status = models.CharField(
_("Status"), max_length=20, choices=STATUS.choices, default=STATUS.DRAFT
@@ -661,9 +727,7 @@ class Event(models.Model):
on_delete=models.SET_DEFAULT,
)
- start_day = models.DateField(
- verbose_name=_("Start day")
- )
+ start_day = models.DateField(verbose_name=_("Start day"))
start_time = models.TimeField(
verbose_name=_("Start time"),
blank=True,
@@ -675,9 +739,7 @@ class Event(models.Model):
blank=True,
null=True,
)
- end_time = models.TimeField(
- verbose_name=_("End time"), blank=True, null=True
- )
+ end_time = models.TimeField(verbose_name=_("End time"), blank=True, null=True)
recurrences = recurrence.fields.RecurrenceField(
verbose_name=_("Recurrence"), include_dtstart=False, blank=True, null=True
@@ -698,7 +760,7 @@ class Event(models.Model):
max_length=512,
default="",
null=True,
- blank=True
+ blank=True,
)
description = models.TextField(
@@ -707,14 +769,14 @@ class Event(models.Model):
null=True,
)
-
- organisers = models.ManyToManyField(Organisation,
- related_name='organised_events',
+ organisers = models.ManyToManyField(
+ Organisation,
+ related_name="organised_events",
verbose_name=_("Organisers"),
help_text=_(
"list of event organisers. Organizers will only be displayed if one of them does not normally use the venue."
),
- blank=True
+ blank=True,
)
local_image = ResizedImageField(
@@ -782,7 +844,9 @@ class Event(models.Model):
self._messages = []
def get_import_messages(self):
- return self.message_set.filter(message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS]).order_by("date")
+ return self.message_set.filter(
+ message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS]
+ ).order_by("date")
def get_consolidated_end_day(self, intuitive=True):
if intuitive:
@@ -869,7 +933,14 @@ class Event(models.Model):
models.Index(fields=["recurrences"]),
models.Index(fields=["recurrence_dtstart", "recurrence_dtend"]),
models.Index("start_time", Lower("title"), name="start_time title"),
- models.Index("start_time", "start_day", "end_day", "end_time", Lower("title"), name="datetimes title")
+ models.Index(
+ "start_time",
+ "start_day",
+ "end_day",
+ "end_time",
+ Lower("title"),
+ name="datetimes title",
+ ),
]
def chronology_dates(self):
@@ -877,31 +948,69 @@ class Event(models.Model):
def chronology(self, simple=False):
c = []
- if self.modified_date:
- c.append({ "timestamp": self.modified_date, "data": "modified_date", "user": self.modified_by_user, "is_date": True })
+ if self.modified_date:
+ c.append(
+ {
+ "timestamp": self.modified_date,
+ "data": "modified_date",
+ "user": self.modified_by_user,
+ "is_date": True,
+ }
+ )
if self.moderated_date:
- c.append({ "timestamp": self.moderated_date, "data": "moderated_date", "user" : self.moderated_by_user, "is_date": True})
+ c.append(
+ {
+ "timestamp": self.moderated_date,
+ "data": "moderated_date",
+ "user": self.moderated_by_user,
+ "is_date": True,
+ }
+ )
if self.imported_date:
- c.append({ "timestamp": self.imported_date, "data": "imported_date", "user": self.imported_by_user, "is_date": True })
+ c.append(
+ {
+ "timestamp": self.imported_date,
+ "data": "imported_date",
+ "user": self.imported_by_user,
+ "is_date": True,
+ }
+ )
if self.created_date:
if self.created_by_user:
user = self.created_by_user
else:
if self.in_recurrent_import():
- user = _('recurrent import')
+ user = _("recurrent import")
else:
- user = _('a non authenticated user')
+ user = _("a non authenticated user")
- c.append({ "timestamp": self.created_date + timedelta(milliseconds=-1), "data": "created_date", "user": user, "is_date": True})
+ c.append(
+ {
+ "timestamp": self.created_date + timedelta(milliseconds=-1),
+ "data": "created_date",
+ "user": user,
+ "is_date": True,
+ }
+ )
if not simple:
- c += [{ "timestamp": m.date, "data": m, "user": m.user, "is_date": False} for m in self.message_set.filter(spam=False)]
+ c += [
+ {"timestamp": m.date, "data": m, "user": m.user, "is_date": False}
+ for m in self.message_set.filter(spam=False)
+ ]
if self.other_versions:
for o in self.other_versions.get_duplicated():
if o != self:
- c += [{ "timestamp": m.date, "data": m, "user": m.user, "is_date": False} for m in o.message_set.filter(spam=False)]
-
+ c += [
+ {
+ "timestamp": m.date,
+ "data": m,
+ "user": m.user,
+ "is_date": False,
+ }
+ for m in o.message_set.filter(spam=False)
+ ]
c.sort(key=lambda x: x["timestamp"])
@@ -912,7 +1021,11 @@ class Event(models.Model):
return []
else:
result = self.tags
- result.sort(key=lambda x: emoji.demojize(remove_accents(x.lower()), delimiters=('000', '')))
+ result.sort(
+ key=lambda x: emoji.demojize(
+ remove_accents(x.lower()), delimiters=("000", "")
+ )
+ )
return result
def get_all_tags(sort=True):
@@ -924,7 +1037,11 @@ class Event(models.Model):
cursor.execute(raw_query)
result = [{"tag": row[0], "count": row[1]} for row in cursor]
if sort:
- result.sort(key=lambda x: emoji.demojize(remove_accents(x["tag"].lower()), delimiters=('000', '')))
+ result.sort(
+ key=lambda x: emoji.demojize(
+ remove_accents(x["tag"].lower()), delimiters=("000", "")
+ )
+ )
return result
def is_draft(self):
@@ -941,11 +1058,14 @@ class Event(models.Model):
not self.pure_import()
and (self.modified_date - self.created_date).total_seconds() > 1
)
-
+
def pure_import(self):
if self.imported_date is None:
return False
- return self.modified_date is None or (self.modified_date - self.imported_date).total_seconds() <= 0
+ return (
+ self.modified_date is None
+ or (self.modified_date - self.imported_date).total_seconds() <= 0
+ )
def local_version(self):
return self.imported_date is None or self.modified()
@@ -955,7 +1075,7 @@ class Event(models.Model):
if self.other_versions:
for o in self.other_versions.get_duplicated():
- if o.status == Event.STATUS.PUBLISHED and not o.reference_urls is None:
+ if o.status == Event.STATUS.PUBLISHED and o.reference_urls is not None:
res += o.reference_urls
res = list(set(res))
@@ -980,7 +1100,10 @@ class Event(models.Model):
if self.exact_location is None:
has_significant = True
else:
- has_significant = self.organisers.filter(~Q(principal_place=self.exact_location)).count() > 0
+ has_significant = (
+ self.organisers.filter(~Q(principal_place=self.exact_location)).count()
+ > 0
+ )
if has_significant:
return self.organisers.all()
@@ -989,37 +1112,70 @@ class Event(models.Model):
def get_nb_not_moderated(first_day, nb_mod_days=21, nb_classes=4):
window_end = first_day + timedelta(days=nb_mod_days)
- nb_not_moderated = Event.objects.filter(~Q(status=Event.STATUS.TRASH)). \
- filter(Q(start_day__gte=first_day)&Q(start_day__lte=window_end)). \
- filter(
- Q(other_versions__isnull=True) |
- Q(other_versions__representative=F('pk')) |
- Q(other_versions__representative__isnull=True)).values("start_day").\
- annotate(not_moderated=Count("start_day", filter=Q(moderated_date__isnull=True))). \
- annotate(nb_events=Count("start_day")). \
- order_by("start_day").values("not_moderated", "nb_events", "start_day")
-
+ nb_not_moderated = (
+ Event.objects.filter(~Q(status=Event.STATUS.TRASH))
+ .filter(Q(start_day__gte=first_day) & Q(start_day__lte=window_end))
+ .filter(
+ Q(other_versions__isnull=True)
+ | Q(other_versions__representative=F("pk"))
+ | Q(other_versions__representative__isnull=True)
+ )
+ .values("start_day")
+ .annotate(
+ not_moderated=Count("start_day", filter=Q(moderated_date__isnull=True))
+ )
+ .annotate(nb_events=Count("start_day"))
+ .order_by("start_day")
+ .values("not_moderated", "nb_events", "start_day")
+ )
+
max_not_moderated = max([x["not_moderated"] for x in nb_not_moderated])
if max_not_moderated == 0:
max_not_moderated = 1
- nb_not_moderated_dict = dict([(x["start_day"], (x["not_moderated"], x["nb_events"])) for x in nb_not_moderated])
+ nb_not_moderated_dict = dict(
+ [
+ (x["start_day"], (x["not_moderated"], x["nb_events"]))
+ for x in nb_not_moderated
+ ]
+ )
# add missing dates
date_list = [first_day + timedelta(days=x) for x in range(0, nb_mod_days)]
- nb_not_moderated = [{"start_day": d,
- "is_today": d == first_day,
- "nb_events": nb_not_moderated_dict[d][1] if d in nb_not_moderated_dict else 0,
- "not_moderated": nb_not_moderated_dict[d][0] if d in nb_not_moderated_dict else 0} for d in date_list]
- nb_not_moderated = [ x | { "note": 0 if x["not_moderated"] == 0 else int((nb_classes - 1) * x["not_moderated"] / max_not_moderated) + 1 } for x in nb_not_moderated]
- return [nb_not_moderated[x:x + 7] for x in range(0, len(nb_not_moderated), 7)]
-
+ nb_not_moderated = [
+ {
+ "start_day": d,
+ "is_today": d == first_day,
+ "nb_events": (
+ nb_not_moderated_dict[d][1] if d in nb_not_moderated_dict else 0
+ ),
+ "not_moderated": (
+ nb_not_moderated_dict[d][0] if d in nb_not_moderated_dict else 0
+ ),
+ }
+ for d in date_list
+ ]
+ nb_not_moderated = [
+ x
+ | {
+ "note": (
+ 0
+ if x["not_moderated"] == 0
+ else int((nb_classes - 1) * x["not_moderated"] / max_not_moderated)
+ + 1
+ )
+ }
+ for x in nb_not_moderated
+ ]
+ return [nb_not_moderated[x : x + 7] for x in range(0, len(nb_not_moderated), 7)]
def nb_draft_events():
return Event.objects.filter(status=Event.STATUS.DRAFT).count()
def get_qs_events_with_unkwnon_place():
- return Event.objects.filter(exact_location__isnull=True). \
- filter(~Q(status=Event.STATUS.TRASH)). \
- filter(Q(other_versions=None)|Q(other_versions__representative=F('pk')))
+ return (
+ Event.objects.filter(exact_location__isnull=True)
+ .filter(~Q(status=Event.STATUS.TRASH))
+ .filter(Q(other_versions=None) | Q(other_versions__representative=F("pk")))
+ )
def is_representative(self):
return self.other_versions is None or self.other_versions.representative == self
@@ -1032,21 +1188,21 @@ class Event(models.Model):
def download_image(self):
# first download file
- if str(self.image) != '':
+ if str(self.image) != "":
a = urlparse(self.image)
basename = os.path.basename(a.path)
try:
basename = basename.decode()
- except:
+ except Exception:
pass
- ext = basename.split('.')[-1]
+ ext = basename.split(".")[-1]
filename = "%s.%s" % (uuid.uuid4(), ext)
try:
tmpfile, _ = urllib.request.urlretrieve(self.image)
- except:
+ except Exception:
return None
# if the download is ok, then create the corresponding file object
@@ -1054,7 +1210,7 @@ class Event(models.Model):
def add_pending_organisers(self, organisers):
self.pending_organisers = organisers
-
+
def has_pending_organisers(self):
return hasattr(self, "pending_organisers")
@@ -1138,7 +1294,7 @@ class Event(models.Model):
return request.build_absolute_uri(self.local_image.url)
else:
return self.local_image.url
- except:
+ except Exception:
pass
if self.image:
return self.image
@@ -1148,7 +1304,6 @@ class Event(models.Model):
def has_image_url(self):
return self.get_image_url() is not None
-
# return a copy of the current object for each recurrence between first an last date (included)
def get_recurrences_between(self, firstdate, lastdate):
if not self.has_recurrences():
@@ -1190,16 +1345,12 @@ class Event(models.Model):
stime = (
time.fromisoformat(self.start_time)
if isinstance(self.start_time, str)
- else time()
- if self.start_time is None
- else self.start_time
+ else time() if self.start_time is None else self.start_time
)
etime = (
time.fromisoformat(self.end_time)
if isinstance(self.end_time, str)
- else time()
- if self.end_time is None
- else self.end_time
+ else time() if self.end_time is None else self.end_time
)
self.recurrence_dtstart = datetime.combine(sday, stime)
@@ -1226,7 +1377,7 @@ class Event(models.Model):
self.recurrence_dtend += (
self.recurrences.dtend - self.recurrences.dtstart
)
- except:
+ except Exception:
self.recurrence_dtend = self.recurrence_dtstart
def prepare_save(self):
@@ -1235,12 +1386,14 @@ class Event(models.Model):
self.update_recurrence_dtstartend()
# if the image is defined but not locally downloaded
- if self.image and (not self.local_image or not default_storage.exists(self.local_image.name)):
+ if self.image and (
+ not self.local_image or not default_storage.exists(self.local_image.name)
+ ):
self.download_image()
# remove "/" from tags
if self.tags:
- self.tags = [t.replace('/', '-') for t in self.tags]
+ self.tags = [t.replace("/", "-") for t in self.tags]
# in case of importation process
if self.is_in_importation_process():
@@ -1254,14 +1407,18 @@ class Event(models.Model):
if not self.category or self.category.name == Category.default_name:
CategorisationRule.apply_rules(self)
-
def get_contributor_message(self):
types = [Message.TYPE.FROM_CONTRIBUTOR, Message.TYPE.FROM_CONTRIBUTOR_NO_MSG]
if self.other_versions is None or self.other_versions.representative is None:
- return Message.objects.filter(related_event=self.pk, message_type__in=types, closed=False)
+ return Message.objects.filter(
+ related_event=self.pk, message_type__in=types, closed=False
+ )
else:
- return Message.objects.filter(related_event__in=self.other_versions.get_duplicated(), message_type__in=types, closed=False)
-
+ return Message.objects.filter(
+ related_event__in=self.other_versions.get_duplicated(),
+ message_type__in=types,
+ closed=False,
+ )
def notify_if_required(self, request):
notif = False
@@ -1269,16 +1426,30 @@ class Event(models.Model):
messages = self.get_contributor_message()
if messages:
for message in messages:
- if message and not message.closed and message.email and message.email != "":
+ if (
+ message
+ and not message.closed
+ and message.email
+ and message.email != ""
+ ):
# send email
- context = {"sitename": Site.objects.get_current(request).name, 'event_title': self.title }
+ context = {
+ "sitename": Site.objects.get_current(request).name,
+ "event_title": self.title,
+ }
if self.status == Event.STATUS.PUBLISHED:
- context["url"] = request.build_absolute_uri(self.get_absolute_url())
- subject = _('Your event has been published')
- body = render_to_string("agenda_culturel/emails/published.txt", context)
+ context["url"] = request.build_absolute_uri(
+ self.get_absolute_url()
+ )
+ subject = _("Your event has been published")
+ body = render_to_string(
+ "agenda_culturel/emails/published.txt", context
+ )
else:
- subject = _('Your message has not been retained')
- body = render_to_string("agenda_culturel/emails/retained.txt", context)
+ subject = _("Your message has not been retained")
+ body = render_to_string(
+ "agenda_culturel/emails/retained.txt", context
+ )
send_mail(subject, body, None, [message.email])
message.closed = True
@@ -1287,7 +1458,6 @@ class Event(models.Model):
return notif
-
def save(self, *args, **kwargs):
self.prepare_save()
@@ -1304,15 +1474,15 @@ class Event(models.Model):
# if it exists similar events, add this relation to the event
if len(similar_events) != 0:
self.set_other_versions(similar_events)
-
+
# check if it's a clone (that will become representative)
- clone = self.pk is None and not self.other_versions is None
+ clone = self.pk is None and self.other_versions is not None
# check if we need to clean the other_versions
if (
- not clone and
- self.pk and
- self.other_versions is not None
+ not clone
+ and self.pk
+ and self.other_versions is not None
and self.other_versions.nb_duplicated() == 1
):
self.other_versions.delete()
@@ -1340,7 +1510,7 @@ class Event(models.Model):
if clone:
self.other_versions.representative = self
self.other_versions.save()
- # if we just clone a single event, its status is
+ # if we just clone a single event, its status is
# the same as the status of the current object
if self.other_versions.get_duplicated().count() == 2:
for e in self.other_versions.get_duplicated():
@@ -1351,21 +1521,27 @@ class Event(models.Model):
def from_structure(event_structure, import_source=None):
# organisers is a manytomany relation thus cannot be initialised before creation of the event
- organisers = event_structure.pop('organisers', None)
+ organisers = event_structure.pop("organisers", None)
# supplementary information
- email = event_structure.pop('email', None)
- comments = event_structure.pop('comments', None)
- warnings = event_structure.pop('warnings', [])
+ email = event_structure.pop("email", None)
+ comments = event_structure.pop("comments", None)
+ warnings = event_structure.pop("warnings", [])
for w in warnings:
if w == Extractor.Warning.NO_START_DATE:
- event_structure["title"] += " - " + _('Warning') + ": " + _('the date has not been imported correctly.')
-
+ event_structure["title"] += (
+ " - "
+ + _("Warning")
+ + ": "
+ + _("the date has not been imported correctly.")
+ )
if "category" in event_structure and event_structure["category"] is not None:
try:
event_structure["category"] = Category.objects.get(
- name__unaccent__icontains=remove_accents(event_structure["category"].lower())
+ name__unaccent__icontains=remove_accents(
+ event_structure["category"].lower()
+ )
)
except Category.DoesNotExist:
event_structure["category"] = Category.get_default_category()
@@ -1438,36 +1614,57 @@ class Event(models.Model):
result = Event(**event_structure)
result.add_pending_organisers(organisers)
if email or comments:
- has_comments = not comments in ["", None]
- result.add_message(Message(subject=_('during import process'),
- email=email,
- message=comments,
- closed=False,
- message_type=Message.TYPE.FROM_CONTRIBUTOR if has_comments else Message.TYPE.FROM_CONTRIBUTOR_NO_MSG))
+ has_comments = comments not in ["", None]
+ result.add_message(
+ Message(
+ subject=_("during import process"),
+ email=email,
+ message=comments,
+ closed=False,
+ message_type=(
+ Message.TYPE.FROM_CONTRIBUTOR
+ if has_comments
+ else Message.TYPE.FROM_CONTRIBUTOR_NO_MSG
+ ),
+ )
+ )
for w in warnings:
if w == Extractor.Warning.NO_START_DATE:
result.set_invalid_start_date()
- result.add_message(Message(subject=_('warning'),
- closed=False,
- message=_('the date has not been imported correctly.'),
- message_type=Message.TYPE.WARNING))
+ result.add_message(
+ Message(
+ subject=_("warning"),
+ closed=False,
+ message=_("the date has not been imported correctly."),
+ message_type=Message.TYPE.WARNING,
+ )
+ )
if w == Extractor.Warning.NO_TITLE:
result.set_invalid_title()
- result.add_message(Message(subject=_('warning'),
- closed=False,
- message=_('the title has not been imported correctly.'),
- message_type=Message.TYPE.WARNING))
+ result.add_message(
+ Message(
+ subject=_("warning"),
+ closed=False,
+ message=_("the title has not been imported correctly."),
+ message_type=Message.TYPE.WARNING,
+ )
+ )
if w == Extractor.Warning.NOT_FOUND:
result.status = Event.STATUS.DRAFT
result.set_is_not_found_import()
- result.add_message(Message(subject=_('warning'),
- closed=False,
- message=_('The import was unable to find an event in the page.'),
- message_type=Message.TYPE.WARNING))
+ result.add_message(
+ Message(
+ subject=_("warning"),
+ closed=False,
+ message=_(
+ "The import was unable to find an event in the page."
+ ),
+ message_type=Message.TYPE.WARNING,
+ )
+ )
return result
-
def find_similar_events(self):
start_time_test = Q(start_time=self.start_time)
@@ -1476,12 +1673,16 @@ class Event(models.Model):
if isinstance(self.start_time, str):
self.start_time = time.fromisoformat(self.start_time)
interval = (
- time(self.start_time.hour - 1, self.start_time.minute)
- if self.start_time.hour >= 1
- else time(0, 0),
- time(self.start_time.hour + 1, self.start_time.minute)
- if self.start_time.hour < 23
- else time(23, 59),
+ (
+ time(self.start_time.hour - 1, self.start_time.minute)
+ if self.start_time.hour >= 1
+ else time(0, 0)
+ ),
+ (
+ time(self.start_time.hour + 1, self.start_time.minute)
+ if self.start_time.hour < 23
+ else time(23, 59)
+ ),
)
start_time_test = start_time_test | Q(start_time__range=interval)
@@ -1504,7 +1705,7 @@ class Event(models.Model):
if self.uuids is None or len(self.uuids) == 0
else Event.objects.filter(uuids__contains=self.uuids)
)
-
+
def get_updateable_uuid(self):
if self.uuids and len(self.uuids) > 0:
for s in self.uuids:
@@ -1513,7 +1714,7 @@ class Event(models.Model):
return None
def is_updateable(self):
- return not self.get_updateable_uuid() is None
+ return self.get_updateable_uuid() is not None
def split_uuid(uuid):
els = uuid.split(":")
@@ -1539,7 +1740,7 @@ class Event(models.Model):
if Event.is_ancestor_uuid(s_uuid, e_uuid):
return True
return False
-
+
def same_uuid(self, event):
if self.uuids is None or event.uuids is None:
return False
@@ -1553,7 +1754,11 @@ class Event(models.Model):
if self.other_versions is None:
return []
else:
- return [e for e in self.other_versions.get_duplicated() if e.pk != self.pk and e.status != Event.STATUS.TRASH]
+ return [
+ e
+ for e in self.other_versions.get_duplicated()
+ if e.pk != self.pk and e.status != Event.STATUS.TRASH
+ ]
def get_other_versions(self):
if self.other_versions is None:
@@ -1562,7 +1767,11 @@ class Event(models.Model):
return [e for e in self.other_versions.get_duplicated() if e.pk != self.pk]
def masked(self):
- return self.other_versions and self.other_versions.representative != None and self.other_versions.representative.pk != self.pk
+ return (
+ self.other_versions
+ and self.other_versions.representative is not None
+ and self.other_versions.representative.pk != self.pk
+ )
def get_organisers(self):
if self.pk:
@@ -1578,12 +1787,11 @@ class Event(models.Model):
return self.pending_organisers
else:
return []
-
def get_comparison(events, all=True):
result = []
for attr in Event.data_fields(all=all, local_img=False, exact_location=False):
- if attr == 'organisers':
+ if attr == "organisers":
values = [[str(o) for o in e.get_organisers()] for e in events]
else:
values = [getattr(e, attr) for e in events]
@@ -1605,18 +1813,20 @@ class Event(models.Model):
res = Event.get_comparison([self, event], all)
for r in res:
if not r["similar"]:
- if r["key"] == "title" and (self.has_invalid_title() or event.has_invalid_title()):
+ if r["key"] == "title" and (
+ self.has_invalid_title() or event.has_invalid_title()
+ ):
continue
- if r["key"] == "start_day" and (self.has_invalid_start_date() or event.has_invalid_start_date()):
+ if r["key"] == "start_day" and (
+ self.has_invalid_start_date() or event.has_invalid_start_date()
+ ):
continue
return False
return True
def set_other_versions(self, events, force_non_fixed=False):
# get existing groups
- groups = list(
- set([e.other_versions for e in events] + [self.other_versions])
- )
+ groups = list(set([e.other_versions for e in events] + [self.other_versions]))
groups = [g for g in groups if g is not None]
# do we have to create a new group?
@@ -1690,10 +1900,12 @@ class Event(models.Model):
def update(self, other, all):
# integrate pending organisers
- if other.has_pending_organisers() and not other.pending_organisers is None:
+ if other.has_pending_organisers() and other.pending_organisers is not None:
self.organisers.set(other.pending_organisers)
-
- logger.warning("process update " + other.title + ' ' + str(other.has_invalid_start_date()))
+
+ logger.info(
+ "process update " + other.title + " " + str(other.has_invalid_start_date())
+ )
# set attributes
for attr in Event.data_fields(all=all, no_m2m=True):
if attr == "title" and other.has_invalid_title():
@@ -1709,9 +1921,9 @@ class Event(models.Model):
# add a possible missing uuid
if self.uuids is None:
self.uuids = []
- for uuid in other.uuids:
- if uuid not in self.uuids:
- self.uuids.append(uuid)
+ for uuide in other.uuids:
+ if uuide not in self.uuids:
+ self.uuids.append(uuide)
# add possible missing sources
if other.import_sources:
@@ -1776,19 +1988,34 @@ class Event(models.Model):
if same_imported.other_versions:
if same_imported.status != Event.STATUS.TRASH:
if same_imported.other_versions.is_published():
- if same_imported.other_versions.representative != same_imported:
- same_imported.other_versions.representative = None
+ if (
+ same_imported.other_versions.representative
+ != same_imported
+ ):
+ same_imported.other_versions.representative = (
+ None
+ )
same_imported.other_versions.save()
# add a message to explain the update
if not event.is_not_found_import():
- res = [r for r in Event.get_comparison([event, same_imported], all) if not r["similar"]]
+ res = [
+ r
+ for r in Event.get_comparison(
+ [event, same_imported], all
+ )
+ if not r["similar"]
+ ]
if len(res) > 0:
- txt = _("Updated field(s): ") + ", ".join([r["key"] for r in res])
- msg = Message(subject=_('Update'),
- name=_('update process'),
- related_event=same_imported,
- message=txt,
- message_type=Message.TYPE.UPDATE_PROCESS)
+ txt = _("Updated field(s): ") + ", ".join(
+ [r["key"] for r in res]
+ )
+ msg = Message(
+ subject=_("Update"),
+ name=_("update process"),
+ related_event=same_imported,
+ message=txt,
+ message_type=Message.TYPE.UPDATE_PROCESS,
+ )
msg.save()
new_image = same_imported.image != event.image
@@ -1799,7 +2026,9 @@ class Event(models.Model):
same_imported.status = Event.STATUS.TRASH
else:
# we only update local information if it's a pure import and has no moderated_date
- same_imported.update(event, pure and same_imported.moderated_date is None)
+ same_imported.update(
+ event, pure and same_imported.moderated_date is None
+ )
# save messages
if event.has_message():
for msg in event.get_messages():
@@ -1808,15 +2037,22 @@ class Event(models.Model):
same_imported.set_in_importation_process()
same_imported.prepare_save()
# fix missing or updated files
- if same_imported.local_image and (not default_storage.exists(same_imported.local_image.name) or new_image):
+ if same_imported.local_image and (
+ not default_storage.exists(same_imported.local_image.name)
+ or new_image
+ ):
same_imported.download_image()
same_imported.save(update_fields=["local_image"])
+
to_update.append(same_imported)
else:
# otherwise, the new event possibly a duplication of the remaining others.
# check if it should be published
- trash = len([e for e in same_events if e.status != Event.STATUS.TRASH]) == 0
+ trash = (
+ len([e for e in same_events if e.status != Event.STATUS.TRASH])
+ == 0
+ )
if trash:
event.status = Event.STATUS.TRASH
event.set_other_versions(same_events, force_non_fixed=not trash)
@@ -1839,10 +2075,14 @@ class Event(models.Model):
if e.is_event_long_duration():
e.status = Event.STATUS.DRAFT
e.add_message(
- Message(subject=_('Import'),
- name=_('import process'),
- message=_("The duration of the event is a little too long for direct publication. Moderators can choose to publish it or not."),
- message_type=Message.TYPE.IMPORT_PROCESS)
+ Message(
+ subject=_("Import"),
+ name=_("import process"),
+ message=_(
+ "The duration of the event is a little too long for direct publication. Moderators can choose to publish it or not."
+ ),
+ message_type=Message.TYPE.IMPORT_PROCESS,
+ )
)
# then import all the new events
@@ -1901,13 +2141,12 @@ class Event(models.Model):
self.current_date = date
def get_start_end_datetimes(self, day):
- start_h = time().min
+ time().min
if self.start_day == day:
if self.start_time is None:
dtstart = datetime.combine(self.start_day, time().min)
else:
dtstart = datetime.combine(self.start_day, self.start_time)
- start_h = self.start_time
else:
dtstart = datetime.combine(day, time().min)
@@ -1928,7 +2167,20 @@ class Event(models.Model):
def get_concurrent_events(self, remove_same_dup=True):
day = self.current_date if hasattr(self, "current_date") else self.start_day
- day_events = CalendarDay(day, qs = Event.objects.filter(status=Event.STATUS.PUBLISHED).only("start_day", "start_time", "title", "category", "other_versions", "recurrences", "end_day", "end_time", "uuids")).get_events()
+ day_events = CalendarDay(
+ day,
+ qs=Event.objects.filter(status=Event.STATUS.PUBLISHED).only(
+ "start_day",
+ "start_time",
+ "title",
+ "category",
+ "other_versions",
+ "recurrences",
+ "end_day",
+ "end_time",
+ "uuids",
+ ),
+ ).get_events()
return [
e
for e in day_events
@@ -1950,7 +2202,7 @@ class Event(models.Model):
cal.add("version", "2.0")
for event in events:
- ed = event.get_consolidated_end_day()
+ event.get_consolidated_end_day()
eventIcal = icalEvent()
# mapping
if event.start_time is None:
@@ -1973,7 +2225,7 @@ class Event(models.Model):
event.start_time.minute,
),
)
- if not event.end_day is None:
+ if event.end_day is not None:
if event.end_time is None:
eventIcal.add(
"dtend",
@@ -1996,13 +2248,19 @@ class Event(models.Model):
)
eventIcal.add("summary", event.title)
eventIcal.add("name", event.title)
- url = ("\n" + event.reference_urls[0]) if event.reference_urls and len(event.reference_urls) > 0 else ""
- description = event.description if event.description else ""
- eventIcal.add(
- "description", description + url
+ url = (
+ ("\n" + event.reference_urls[0])
+ if event.reference_urls and len(event.reference_urls) > 0
+ else ""
)
- if not event.local_image is None and event.local_image != "":
- eventIcal.add('image', request.build_absolute_uri(event.local_image), parameters={'VALUE': 'URI'})
+ description = event.description if event.description else ""
+ eventIcal.add("description", description + url)
+ if event.local_image is not None and event.local_image != "":
+ eventIcal.add(
+ "image",
+ request.build_absolute_uri(event.local_image),
+ parameters={"VALUE": "URI"},
+ )
eventIcal.add("location", event.exact_location or event.location)
cal.add_component(eventIcal)
@@ -2011,27 +2269,54 @@ class Event(models.Model):
def get_count_modification(when):
start = datetime(when[0].year, when[0].month, when[0].day)
- end = start + timedelta(days=when[1])
+ end = start + timedelta(days=when[1])
- recurrentimport = RecurrentImport.objects.filter(source=OuterRef("import_sources__0")).order_by().annotate(count=Func(F('id'), function='Count')).values('count')
+ recurrentimport = (
+ RecurrentImport.objects.filter(source=OuterRef("import_sources__0"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ nb_manual_creation = (
+ Event.objects.filter(
+ created_date__gte=start,
+ created_date__lt=end,
+ imported_date__isnull=True,
+ )
+ .filter(Q(uuids__len=0) | Q(uuids=None))
+ .count()
+ )
+ nb_local_copies = Event.objects.filter(
+ created_date__gte=start,
+ created_date__lt=end,
+ imported_date__isnull=True,
+ uuids__len__gt=0,
+ ).count()
- nb_manual_creation = Event.objects.filter(created_date__gte=start, created_date__lt=end, imported_date__isnull=True).filter(Q(uuids__len=0)|Q(uuids=None)).count()
- nb_local_copies = Event.objects.filter(created_date__gte=start, created_date__lt=end, imported_date__isnull=True, uuids__len__gt=0).count()
+ imported = Event.objects.filter(
+ created_date__gte=start,
+ created_date__lt=end,
+ imported_date__isnull=False,
+ uuids__len__gt=0,
+ ).annotate(nb_rimport=Subquery(recurrentimport))
- imported = Event.objects.filter(created_date__gte=start, created_date__lt=end, imported_date__isnull=False, uuids__len__gt=0).annotate(nb_rimport=Subquery(recurrentimport))
-
- nb_manual_import = imported.filter(Q(import_sources__len=0)|Q(nb_rimport=0)).count()
+ nb_manual_import = imported.filter(
+ Q(import_sources__len=0) | Q(nb_rimport=0)
+ ).count()
nb_first_import = imported.filter(nb_rimport__gt=0).count()
- nb_moderated = Event.objects.filter(moderated_date__gte=start, moderated_date__lt=end).count()
+ nb_moderated = Event.objects.filter(
+ moderated_date__gte=start, moderated_date__lt=end
+ ).count()
- return {"when": (start, start + timedelta(days=when[1] - 1)),
- "nb_manual_creation": nb_manual_creation,
- "nb_manual_import": nb_manual_import,
- "nb_first_import": nb_first_import,
- "nb_local_copies": nb_local_copies,
- "nb_moderated": nb_moderated,
+ return {
+ "when": (start, start + timedelta(days=when[1] - 1)),
+ "nb_manual_creation": nb_manual_creation,
+ "nb_manual_import": nb_manual_import,
+ "nb_first_import": nb_first_import,
+ "nb_local_copies": nb_local_copies,
+ "nb_moderated": nb_moderated,
}
def get_count_modifications(when_list):
@@ -2046,21 +2331,21 @@ class Message(models.Model):
UPDATE_PROCESS = "update_process", _("Update process")
CONTACT_FORM = "contact_form", _("Contact form")
EVENT_REPORT = "event_report", _("Event report")
- FROM_CONTRIBUTOR_NO_MSG = "from_contrib_no_msg", _("From contributor (without message)")
+ FROM_CONTRIBUTOR_NO_MSG = "from_contrib_no_msg", _(
+ "From contributor (without message)"
+ )
WARNING = "warning", _("Warning")
class Meta:
verbose_name = _("Message")
verbose_name_plural = _("Messages")
indexes = [
- models.Index(fields=['related_event']),
- models.Index(fields=['user']),
- models.Index(fields=['date']),
- models.Index(fields=['spam', 'closed']),
+ models.Index(fields=["related_event"]),
+ models.Index(fields=["user"]),
+ models.Index(fields=["date"]),
+ models.Index(fields=["spam", "closed"]),
]
-
-
subject = models.CharField(
verbose_name=_("Subject"),
help_text=_("The subject of your message"),
@@ -2098,7 +2383,9 @@ class Message(models.Model):
blank=True,
null=True,
)
- message = CKEditor5Field(verbose_name=_("Message"), help_text=_("Your message"), blank=True)
+ message = CKEditor5Field(
+ verbose_name=_("Message"), help_text=_("Your message"), blank=True
+ )
date = models.DateTimeField(auto_now_add=True)
@@ -2127,11 +2414,22 @@ class Message(models.Model):
verbose_name=_("Type"),
max_length=20,
choices=TYPE.choices,
- default=None, null=True
+ default=None,
+ null=True,
)
def nb_open_messages():
- return Message.objects.filter(Q(closed=False)&Q(spam=False)&Q(message_type__in=[Message.TYPE.CONTACT_FORM, Message.TYPE.EVENT_REPORT, Message.TYPE.FROM_CONTRIBUTOR])).count()
+ return Message.objects.filter(
+ Q(closed=False)
+ & Q(spam=False)
+ & Q(
+ message_type__in=[
+ Message.TYPE.CONTACT_FORM,
+ Message.TYPE.EVENT_REPORT,
+ Message.TYPE.FROM_CONTRIBUTOR,
+ ]
+ )
+ ).count()
def get_absolute_url(self):
return reverse("message", kwargs={"pk": self.pk})
@@ -2156,13 +2454,13 @@ class RecurrentImport(models.Model):
FBEVENTS = "Facebook events", _("Événements d'une page FB")
BILLETTERIECF = "Billetterie CF", _("Billetterie Clermont-Ferrand")
ARACHNEE = "arachnee", _("Arachnée concert")
- LERIO = "rio", _('Le Rio')
- LARAYMONDE = "raymonde", _('La Raymonde')
- APIDAE = 'apidae', _('Agenda apidae tourisme')
- IGUANA = 'iguana', _('Agenda iguana (médiathèques)')
- MILLEFORMES = 'Mille formes', _('Mille formes')
- AMISCERISES = 'Amis cerises', _('Les Amis du Temps des Cerises')
- MOBILIZON = 'Mobilizon', _('Mobilizon')
+ LERIO = "rio", _("Le Rio")
+ LARAYMONDE = "raymonde", _("La Raymonde")
+ APIDAE = "apidae", _("Agenda apidae tourisme")
+ IGUANA = "iguana", _("Agenda iguana (médiathèques)")
+ MILLEFORMES = "Mille formes", _("Mille formes")
+ AMISCERISES = "Amis cerises", _("Les Amis du Temps des Cerises")
+ MOBILIZON = "Mobilizon", _("Mobilizon")
class DOWNLOADER(models.TextChoices):
SIMPLE = "simple", _("simple")
@@ -2234,7 +2532,7 @@ class RecurrentImport(models.Model):
forceLocation = models.BooleanField(
verbose_name=_("Force location"),
help_text=_("force location even if another is detected."),
- default=False
+ default=False,
)
defaultOrganiser = models.ForeignKey(
@@ -2298,9 +2596,9 @@ class BatchImportation(models.Model):
verbose_name_plural = _("Batch importations")
permissions = [("run_batchimportation", "Can run a batch importation")]
indexes = [
- models.Index(fields=['created_date']),
- models.Index(fields=['status']),
- models.Index(fields=['created_date', 'recurrentImport']),
+ models.Index(fields=["created_date"]),
+ models.Index(fields=["status"]),
+ models.Index(fields=["created_date", "recurrentImport"]),
]
created_date = models.DateTimeField(auto_now_add=True)
@@ -2317,9 +2615,7 @@ class BatchImportation(models.Model):
url_source = models.URLField(
verbose_name=_("URL (if not recurrent import)"),
- help_text=_(
- "Source URL if no RecurrentImport is associated."
- ),
+ help_text=_("Source URL if no RecurrentImport is associated."),
max_length=1024,
blank=True,
null=True,
@@ -2428,7 +2724,7 @@ class CategorisationRule(models.Model):
# all rules are applied, starting from the first to the last
def apply_rules(event):
c = CategorisationRule.get_category_from_rules(event)
-
+
if c is None:
return 0
else:
@@ -2438,7 +2734,11 @@ class CategorisationRule(models.Model):
def get_category_from_rules(event):
cats = defaultdict(lambda: 0)
if CategorisationRule.rules is None:
- CategorisationRule.rules = CategorisationRule.objects.all().select_related("category").select_related("place")
+ CategorisationRule.rules = (
+ CategorisationRule.objects.all()
+ .select_related("category")
+ .select_related("place")
+ )
for rule in CategorisationRule.rules:
if rule.match(event):
@@ -2487,7 +2787,4 @@ class CategorisationRule(models.Model):
if not event.exact_location == self.place:
return False
-
return True
-
-
diff --git a/src/agenda_culturel/settings/base.py b/src/agenda_culturel/settings/base.py
index d085539..5169baf 100644
--- a/src/agenda_culturel/settings/base.py
+++ b/src/agenda_culturel/settings/base.py
@@ -15,7 +15,7 @@ DEBUG = os_getenv("DEBUG", "true").lower() in ["True", "true", "1", "yes", "y"]
ALLOWED_HOSTS = os_getenv("ALLOWED_HOSTS", "localhost").split(",")
if DEBUG:
- ALLOWED_HOSTS = ALLOWED_HOSTS + ['testserver']
+ ALLOWED_HOSTS = ALLOWED_HOSTS + ["testserver"]
if DEBUG:
CSRF_TRUSTED_ORIGINS = os_getenv("CSRF_TRUSTED_ORIGINS", "http://localhost").split(
@@ -30,8 +30,8 @@ else:
","
)
-ADMINS = [tuple(a.split(',')) for a in os_getenv("ADMINS", "").split(";")]
-MANAGERS = [tuple(a.split(',')) for a in os_getenv("MANAGERS", "").split(";")]
+ADMINS = [tuple(a.split(",")) for a in os_getenv("ADMINS", "").split(";")]
+MANAGERS = [tuple(a.split(",")) for a in os_getenv("MANAGERS", "").split(";")]
SERVER_EMAIL = os_getenv("SERVER_EMAIL", "")
@@ -55,7 +55,7 @@ INSTALLED_APPS = [
"compressor",
"django_ckeditor_5",
"recurrence",
- 'django.contrib.gis',
+ "django.contrib.gis",
"location_field.apps.DefaultConfig",
"django.contrib.postgres",
"robots",
@@ -63,8 +63,8 @@ INSTALLED_APPS = [
"cache_cleaner",
"honeypot",
"template_profiler_panel",
- 'django_cleanup.apps.CleanupConfig',
- 'django_unused_media',
+ "django_cleanup.apps.CleanupConfig",
+ "django_unused_media",
]
HONEYPOT_FIELD_NAME = "alias_name"
@@ -81,19 +81,29 @@ MIDDLEWARE = [
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
- "debug_toolbar.middleware.DebugToolbarMiddleware",
- 'django.contrib.sites.middleware.CurrentSiteMiddleware',
-# "django.middleware.cache.UpdateCacheMiddleware",
-# "django.middleware.common.CommonMiddleware",
-# "django.middleware.cache.FetchFromCacheMiddleware",
+ "debug_toolbar.middleware.DebugToolbarMiddleware",
+ "django.contrib.sites.middleware.CurrentSiteMiddleware",
+ # "django.middleware.cache.UpdateCacheMiddleware",
+ # "django.middleware.common.CommonMiddleware",
+ # "django.middleware.cache.FetchFromCacheMiddleware",
]
CKEDITOR_5_CONFIGS = {
-'default': {
- 'toolbar': ['heading', '|', 'bold', 'italic', 'link', '|',
- 'bulletedList', 'numberedList', 'blockQuote',],
- 'language': 'fr',
- },}
+ "default": {
+ "toolbar": [
+ "heading",
+ "|",
+ "bold",
+ "italic",
+ "link",
+ "|",
+ "bulletedList",
+ "numberedList",
+ "blockQuote",
+ ],
+ "language": "fr",
+ },
+}
ROOT_URLCONF = "agenda_culturel.urls"
@@ -116,7 +126,7 @@ TEMPLATES = [
"django.template.loaders.app_directories.Loader",
],
),
- ]
+ ],
},
},
]
@@ -128,7 +138,7 @@ WSGI_APPLICATION = "agenda_culturel.wsgi.application"
DATABASES = {
"default": {
- 'ENGINE': 'django.contrib.gis.db.backends.postgis',
+ "ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": os_getenv("POSTGRES_DB", "postgres"),
"USER": os_getenv("POSTGRES_USER", "postgres"),
"PASSWORD": os_getenv("POSTGRES_PASSWORD", "postgres"),
@@ -166,9 +176,7 @@ USE_I18N = True
USE_TZ = False
-LANGUAGES = (
- ("fr", _("French")),
-)
+LANGUAGES = (("fr", _("French")),)
# Auth
@@ -260,13 +268,17 @@ LOCATION_FIELD = {
# stop robots
ROBOTS_USE_SITEMAP = False
-ROBOTS_SITE_BY_REQUEST = 'cached-sitemap'
+ROBOTS_SITE_BY_REQUEST = "cached-sitemap"
# debug
if DEBUG:
import socket
+
hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
- INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"]
+ INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + [
+ "127.0.0.1",
+ "10.0.2.2",
+ ]
# logging
@@ -297,20 +309,20 @@ LOGGING = {
# debug
DEBUG_TOOLBAR_PANELS = [
- 'debug_toolbar.panels.history.HistoryPanel',
- 'debug_toolbar.panels.versions.VersionsPanel',
- 'debug_toolbar.panels.timer.TimerPanel',
- 'debug_toolbar.panels.settings.SettingsPanel',
- 'debug_toolbar.panels.headers.HeadersPanel',
- 'debug_toolbar.panels.request.RequestPanel',
- 'debug_toolbar.panels.sql.SQLPanel',
- 'debug_toolbar.panels.staticfiles.StaticFilesPanel',
- 'debug_toolbar.panels.templates.TemplatesPanel',
- 'debug_toolbar.panels.alerts.AlertsPanel',
- 'debug_toolbar.panels.cache.CachePanel',
- 'debug_toolbar.panels.signals.SignalsPanel',
- 'debug_toolbar.panels.redirects.RedirectsPanel',
- 'debug_toolbar.panels.profiling.ProfilingPanel',
+ "debug_toolbar.panels.history.HistoryPanel",
+ "debug_toolbar.panels.versions.VersionsPanel",
+ "debug_toolbar.panels.timer.TimerPanel",
+ "debug_toolbar.panels.settings.SettingsPanel",
+ "debug_toolbar.panels.headers.HeadersPanel",
+ "debug_toolbar.panels.request.RequestPanel",
+ "debug_toolbar.panels.sql.SQLPanel",
+ "debug_toolbar.panels.staticfiles.StaticFilesPanel",
+ "debug_toolbar.panels.templates.TemplatesPanel",
+ "debug_toolbar.panels.alerts.AlertsPanel",
+ "debug_toolbar.panels.cache.CachePanel",
+ "debug_toolbar.panels.signals.SignalsPanel",
+ "debug_toolbar.panels.redirects.RedirectsPanel",
+ "debug_toolbar.panels.profiling.ProfilingPanel",
"template_profiler_panel.panels.template.TemplateProfilerPanel",
]
@@ -319,6 +331,6 @@ DJANGORESIZED_DEFAULT_SIZE = [1200, 1200]
DJANGORESIZED_DEFAULT_SCALE = 0.5
DJANGORESIZED_DEFAULT_QUALITY = 75
DJANGORESIZED_DEFAULT_KEEP_META = True
-DJANGORESIZED_DEFAULT_FORCE_FORMAT = 'JPEG'
-DJANGORESIZED_DEFAULT_FORMAT_EXTENSIONS = {'JPEG': ".jpg"}
-DJANGORESIZED_DEFAULT_NORMALIZE_ROTATION = True
\ No newline at end of file
+DJANGORESIZED_DEFAULT_FORCE_FORMAT = "JPEG"
+DJANGORESIZED_DEFAULT_FORMAT_EXTENSIONS = {"JPEG": ".jpg"}
+DJANGORESIZED_DEFAULT_NORMALIZE_ROTATION = True
diff --git a/src/agenda_culturel/settings/dev.py b/src/agenda_culturel/settings/dev.py
index 1517776..669689a 100644
--- a/src/agenda_culturel/settings/dev.py
+++ b/src/agenda_culturel/settings/dev.py
@@ -1,3 +1,3 @@
from .base import * # noqa
-SITE_ID=1
\ No newline at end of file
+SITE_ID = 1
diff --git a/src/agenda_culturel/sitemaps.py b/src/agenda_culturel/sitemaps.py
index d6b2e7d..a3ab15e 100644
--- a/src/agenda_culturel/sitemaps.py
+++ b/src/agenda_culturel/sitemaps.py
@@ -3,17 +3,27 @@ from django.urls import reverse
from .models import Category
+
class StaticViewSitemap(sitemaps.Sitemap):
priority = 0.5
protocol = "https"
changefreq = "daily"
def items(self):
- return ["home", "cette_semaine", "ce_mois_ci", "aujourdhui", "a_venir", "about", "contact"]
+ return [
+ "home",
+ "cette_semaine",
+ "ce_mois_ci",
+ "aujourdhui",
+ "a_venir",
+ "about",
+ "contact",
+ ]
def location(self, item):
return reverse(item)
+
class HomeCategorySitemap(sitemaps.Sitemap):
priority = 0.5
protocol = "https"
@@ -21,22 +31,24 @@ class HomeCategorySitemap(sitemaps.Sitemap):
path = "home_category"
def items(self):
- result = []
return Category.objects.values_list("slug", flat=True)
def location(self, item):
- return reverse(self.path, kwargs={'cat': item})
+ return reverse(self.path, kwargs={"cat": item})
+
class MonthCategorySitemap(HomeCategorySitemap):
priority = 0.3
protocol = "https"
path = "ce_mois_ci_category"
+
class WeekCategorySitemap(HomeCategorySitemap):
priority = 0.4
protocol = "https"
path = "cette_semaine_category"
+
class UpcomingCategorySitemap(HomeCategorySitemap):
priority = 0.4
protocol = "https"
diff --git a/src/agenda_culturel/templatetags/cat_extra.py b/src/agenda_culturel/templatetags/cat_extra.py
index 3098062..0fe4e86 100644
--- a/src/agenda_culturel/templatetags/cat_extra.py
+++ b/src/agenda_culturel/templatetags/cat_extra.py
@@ -4,7 +4,7 @@ from django.core.cache import cache
from agenda_culturel.models import Category
import colorsys
-from .utils_extra import *
+from .utils_extra import picto_from_name
import logging
@@ -48,12 +48,12 @@ def get_relative_luminance(hex_color):
def adjust_lightness_saturation(hex_color, shift_lightness=0.0, scale_saturation=1):
rgb = html_to_rgb(hex_color)
- h, l, s = colorsys.rgb_to_hls(*rgb)
+ h, lg, s = colorsys.rgb_to_hls(*rgb)
- l += shift_lightness
+ lg += shift_lightness
s *= scale_saturation
- r, g, b = colorsys.hls_to_rgb(h, l, s)
+ r, g, b = colorsys.hls_to_rgb(h, lg, s)
return rgb_to_html([r, g, b])
@@ -73,12 +73,13 @@ def background_color_adjust_color(color, alpha=1):
@register.simple_tag
def css_categories():
- result = cache.get('css_categories')
- if not result: #
+ result = cache.get("css_categories")
+ if not result: #
result = '"
- cache.set('css_categories', result, 86400) # 1 day
+ cache.set("css_categories", result, 86400) # 1 day
return mark_safe(result)
@@ -178,6 +179,7 @@ def small_cat(category, url=None, contrast=True, selected=True, recurrence=False
def small_cat_no_selected(category, url=None):
return small_cat(category, url=url, selected=False)
+
@register.filter
def small_cat_no_contrast(category, url=None):
if url is None:
@@ -185,7 +187,6 @@ def small_cat_no_contrast(category, url=None):
return small_cat(category, url=url, contrast=False)
-
@register.filter
def small_cat_recurrent(category, recurrence=False):
return small_cat(category, url=None, selected=True, recurrence=recurrence)
@@ -213,22 +214,52 @@ def circle_cat(category, recurrence=False):
'
'
)
+
def legend_cat(category, url, selected=True, first=False, with_title=False):
c = category.css_class()
n = category.name
- class_reduced = '' if selected else 'reduced'
+ class_reduced = "" if selected else "reduced"
if category.pictogram:
- result = '
' + '
'
+ result = (
+ '
'
+ + '
'
+ )
else:
- result = '
'
+ result = (
+ '
'
+ )
if with_title and selected:
- result = '
' + result + ' ' + n + ' ' + picto_from_name('x-circle') + '
'
+ result = (
+ '