Clean du code grâce à ruff

This commit is contained in:
Jean-Marie Favreau 2025-03-02 19:12:50 +01:00
parent d8c4c55c44
commit b5d4c0f0b1
225 changed files with 3748 additions and 1790 deletions

View File

@ -6,3 +6,17 @@ repos:
rev: 25.1.0
hooks:
- id: black
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.9.9
hooks:
# Run the linter.
- id: ruff
types_or: [ python, pyi ]
args: [ --fix ]

View File

@ -13,4 +13,3 @@ parser.add_simple_formatter("vc_raw_html", "")
plain_txt = parser.format(text)
print(plain_txt)

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,25 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import (
amisdutempsdescerises,
)
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), amisdutempsdescerises.CExtractor())
url = "https://amisdutempsdescerises.org/page.php"
url_human = "https://amisdutempsdescerises.org/"
try:
events = u2e.process(url, url_human, cache = "cache-amiscerices.xml", default_values = {"category": "Rencontres & Débats"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-amiscerices.xml",
default_values={"category": "Rencontres & Débats"},
published=True,
)
exportfile = "events-amiscerices.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,20 +18,25 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import arachnee
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), arachnee.CExtractor())
url = "https://www.arachnee-concerts.com/wp-admin/admin-ajax.php?action=movies-filter&per_page=9999&date=NaN.NaN.NaN&theatres=Clermont-Fd&cat=&sorting=&list_all_events=&current_page="
url_human = "https://www.arachnee-concerts.com/agenda-des-concerts/Clermont-Fd/"
try:
events = u2e.process(url, url_human, cache = "cache-arachnee.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-arachnee.html",
default_values={},
published=True,
)
exportfile = "events-arachnee.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,25 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import billetterie_cf
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), billetterie_cf.CExtractor())
url = "https://billetterie-c3c.clermont-ferrand.fr/"
url_human = "https://billetterie-c3c.clermont-ferrand.fr/"
try:
events = u2e.process(url, url_human, cache = "cache-c3c.html", default_values = {"location": "La Cour des 3 Coquins"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-c3c.html",
default_values={"location": "La Cour des 3 Coquins"},
published=True,
)
exportfile = "events-c3c.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,24 +18,21 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors.fbevent import *
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.generic_extractors.fbevent import (
CExtractor,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), CExtractor())
url="https://www.facebook.com/events/3575802569389184/3575802576055850/?active_tab=about"
url = "https://www.facebook.com/events/3575802569389184/3575802576055850/?active_tab=about"
events = u2e.process(url, cache = "fb.html", published = True)
events = u2e.process(url, cache="fb.html", published=True)
exportfile = "event-facebook.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,25 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.generic_extractors import fbevents
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), fbevents.CExtractor())
url = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
url_human = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
try:
events = u2e.process(url, url_human, cache = "cache-lajetee-fb.html", default_values = {"location": "La Jetée"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lajetee-fb.html",
default_values={"location": "La Jetée"},
published=True,
)
exportfile = "events-lajetee-fb.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,25 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import billetterie_cf
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), billetterie_cf.CExtractor())
url = "https://billetterie-gds.clermont-ferrand.fr/"
url_human = "https://billetterie-gds.clermont-ferrand.fr/"
try:
events = u2e.process(url, url_human, cache = "cache-gds.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-gds.html",
default_values={},
published=True,
)
exportfile = "events-gds.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,22 +18,27 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors.ical import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors.ical import (
ICALExtractor,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), ICALExtractor())
url = "https://calendar.google.com/calendar/ical/programmation.lesaugustes%40gmail.com/public/basic.ics"
url_human = "https://www.cafelesaugustes.fr/la-programmation/"
events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Sans catégorie", "location": "Café lecture les Augustes"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-augustes.ical",
default_values={
"category": "Sans catégorie",
"location": "Café lecture les Augustes",
},
published=True,
)
exportfile = "events-augustes.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,26 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import laraymonde
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), laraymonde.CExtractor())
url = "https://www.raymondbar.net/"
url_human = "https://www.raymondbar.net/"
try:
events = u2e.process(url, url_human, cache = "cache-la-raymonde.html", default_values = {"location": "La Raymonde", "category": "Fêtes & Concerts"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-la-raymonde.html",
default_values={
"location": "La Raymonde",
"category": "Fêtes & Concerts",
},
published=True,
)
exportfile = "events-la-raymonde.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lacomedie
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lacomedie.CExtractor())
url = "https://lacomediedeclermont.com/saison24-25/wp-admin/admin-ajax.php?action=load_dates_existantes"
url_human = "https://lacomediedeclermont.com/saison24-25/"
try:
events = u2e.process(url, url_human, cache = "cache-lacomedie.html", default_values = {"location": "La Comédie de Clermont"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lacomedie.html",
default_values={"location": "La Comédie de Clermont"},
published=True,
)
exportfile = "events-lacomedie.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,26 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lacoope
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lacoope.CExtractor())
url = "https://www.lacoope.org/concerts-calendrier/"
url_human = "https://www.lacoope.org/concerts-calendrier/"
try:
events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Fêtes & Concerts", "location": "La Coopérative"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lacoope.html",
default_values={
"category": "Fêtes & Concerts",
"location": "La Coopérative",
},
published=True,
)
exportfile = "events-lacoope.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lapucealoreille
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lapucealoreille.CExtractor())
url = "https://www.lapucealoreille63.fr/agenda"
url_human = "https://www.lapucealoreille63.fr/agenda"
try:
events = u2e.process(url, url_human, cache = "cache-lapucealoreille.xml", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lapucealoreille.xml",
default_values={},
published=True,
)
exportfile = "events-lapucealoreille.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.generic_extractors import wordpress_mec
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
url = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
url_human = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
try:
events = u2e.process(url, url_human, cache = "cache-le-poulailler.html", default_values = {"location": "Le Poulailler"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-le-poulailler.html",
default_values={"location": "Le Poulailler"},
published=True,
)
exportfile = "events-le-poulailler.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lerio
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lerio.CExtractor())
url = "https://www.cinemalerio.com/evenements/"
url_human = "https://www.cinemalerio.com/evenements/"
try:
events = u2e.process(url, url_human, cache = "cache-le-rio.html", default_values = {"location": "Cinéma le Rio", "category": "Cinéma"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-le-rio.html",
default_values={"location": "Cinéma le Rio", "category": "Cinéma"},
published=True,
)
exportfile = "events-le-roi.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lefotomat
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lefotomat.CExtractor())
url = "https://www.lefotomat.com/feed"
url_human = "https://www.lefotomat.com/"
try:
events = u2e.process(url, url_human, cache = "cache-lefotomat.xml", default_values = {"location": "Le Fotomat'"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lefotomat.xml",
default_values={"location": "Le Fotomat'"},
published=True,
)
exportfile = "events-lefotomat.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors import wordpress_mec
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
url = "https://www.lesvinzelles.com/index.php/programme/"
url_human = "https://www.lesvinzelles.com/index.php/programme/"
try:
events = u2e.process(url, url_human, cache = "cache-les-vinzelles.html", default_values = {"location": "Les Vinzelles"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-les-vinzelles.html",
default_values={"location": "Les Vinzelles"},
published=True,
)
exportfile = "events-les-vinzelles.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.extractor import iguana_agenda
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), iguana_agenda.CExtractor())
url = "https://bibliotheques-clermontmetropole.eu/iguana/Service.PubContainer.cls?uuid=a4a1f992-06da-4ff4-9176-4af0a095c7d1"
url_human = "https://bibliotheques-clermontmetropole.eu/iguana/www.main.cls?surl=AGENDA_Tout%20lagenda"
try:
events = u2e.process(url, url_human, cache = "cache-mediatheques.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-mediatheques.html",
default_values={},
published=True,
)
exportfile = "events-mediatheques.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import mille_formes
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), mille_formes.CExtractor())
url = "https://www.milleformes.fr/programme"
url_human = "https://www.milleformes.fr/programme"
try:
events = u2e.process(url, url_human, cache = "cache-1000formes.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-1000formes.html",
default_values={},
published=True,
)
exportfile = "events-1000formes.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors import mobilizon
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), mobilizon.CExtractor())
url = "https://mobilizon.fr/@attac63/events?"
url_human = "https://mobilizon.fr/@attac63/events"
try:
events = u2e.process(url, url_human, cache = "cache-attac63.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-attac63.html",
default_values={},
published=True,
)
exportfile = "events-attac63.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors import apidae_tourisme
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), apidae_tourisme.CExtractor())
url = "https://widgets.apidae-tourisme.com/filter.js?widget[id]=48"
url_human = "https://ens.puy-de-dome.fr/agenda.html"
try:
events = u2e.process(url, url_human, cache = "cache-puydedome.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-puydedome.html",
default_values={},
published=True,
)
exportfile = "events-puydedome.json"
print("Saving events to file {}".format(exportfile))

View File

@ -26,4 +26,3 @@ Si on récupère l'identifiant de l'événement (type event_11377_0), on peut fo
## La petite gaillarde
Le flux RSS https://lapetitegaillarde.fr/?feed=rss2 est à peu près bien structuré.

View File

@ -62,7 +62,7 @@ extend-exclude = '''
[tool.ruff]
format = "grouped"
output-format = "grouped"
line-length = 88 # black default
extend-exclude = [
"src/migrations/*",
@ -73,6 +73,7 @@ extend-exclude = [
"*__init__.py",
]
[tool.ruff.lint]
select = ["E", "F"]
ignore = [
"E501", # line too long, handled by black
@ -80,19 +81,17 @@ ignore = [
"C901", # too complex
"F405", # name may be undefined, or defined from star imports
]
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
# Assume Python 3.11.
target-version = "py311"
[tool.ruff.mccabe]
[tool.ruff.lint.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10
[tool.ruff.lint.per-file-ignores]
"experimentations/*.py" = ["E402"]
[tool.ruff.isort]
[tool.ruff.lint.isort]
force-to-top = ["src"]
known-first-party = ["src"]

View File

@ -1,22 +1,22 @@
from django.contrib import admin
from django import forms
from .models import (
Event,
Category,
Tag,
StaticContent,
DuplicatedEvents,
BatchImportation,
RecurrentImport,
Place,
Message,
ReferenceLocation,
Organisation,
)
from django.contrib import admin
from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
from django_better_admin_arrayfield.models.fields import DynamicArrayField
from .models import (
BatchImportation,
Category,
DuplicatedEvents,
Event,
Message,
Organisation,
Place,
RecurrentImport,
ReferenceLocation,
StaticContent,
Tag,
)
admin.site.register(Category)
admin.site.register(Tag)

View File

@ -1,14 +1,13 @@
from datetime import datetime, timedelta, date, time
import calendar
from django.db.models import Q, F
import logging
from datetime import date, datetime, time, timedelta
from django.db.models import CharField, F, Q
from django.db.models.functions import Lower
from django.http import Http404
from django.template.defaultfilters import date as _date
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django.template.defaultfilters import date as _date
from django.http import Http404
from django.db.models import CharField
from django.db.models.functions import Lower
import logging
CharField.register_lookup(Lower)
@ -88,9 +87,10 @@ class DayInCalendar:
self._add_event_internal(event)
def _add_event_internal(self, event):
from .models import Category
from copy import copy
from .models import Category
# copy event
local_event = copy(event)
@ -234,7 +234,6 @@ class DayInCalendar:
class IntervalInDay(DayInCalendar):
def __init__(self, d, id, name, short_name):
self.name = name
self.short_name = short_name
@ -244,7 +243,13 @@ class IntervalInDay(DayInCalendar):
class CalendarList:
def __init__(
self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None
self,
firstdate,
lastdate,
filter=None,
exact=False,
ignore_dup=None,
qs=None,
):
self.firstdate = firstdate
self.lastdate = lastdate
@ -308,7 +313,8 @@ class CalendarList:
timezone.get_default_timezone(),
)
lastdatetime = timezone.make_aware(
datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone()
datetime.combine(self.c_lastdate, time.max),
timezone.get_default_timezone(),
)
qs = (
qs.filter(
@ -369,14 +375,16 @@ class CalendarList:
self.calendar_days[e.start_day.__str__()].add_event(e)
else:
for d in daterange(
max(e.start_day, self.firstdate), min(e.end_day, self.lastdate)
max(e.start_day, self.firstdate),
min(e.end_day, self.lastdate),
):
self.calendar_days[d.__str__()].add_event(e)
else:
for e_rec in e.get_recurrences_between(firstdate, lastdate):
end = e_rec.start_day if e_rec.end_day is None else e_rec.end_day
for d in daterange(
max(e_rec.start_day, self.firstdate), min(end, self.lastdate)
max(e_rec.start_day, self.firstdate),
min(end, self.lastdate),
):
self.calendar_days[d.__str__()].add_event(e_rec)

View File

@ -1,23 +1,43 @@
import os
import json
from celery import Celery, Task, chain
from celery.schedules import crontab
from celery.utils.log import get_task_logger
from celery.exceptions import MaxRetriesExceededError
import os
import time as time_
from django.conf import settings
from celery.signals import worker_ready
from contextlib import contextmanager
from .import_tasks.extractor import Extractor
from .import_tasks.importer import URL2Events
from .import_tasks.downloader import SimpleDownloader, ChromiumHeadlessDownloader
from .import_tasks.custom_extractors import *
from .import_tasks.generic_extractors import *
from celery import Celery, Task, chain
from celery.exceptions import MaxRetriesExceededError
from celery.schedules import crontab
from celery.signals import worker_ready
from celery.utils.log import get_task_logger
from django.conf import settings
from django.core.cache import cache
from datetime import date
from .import_tasks.custom_extractors import (
lacomedie,
lacoope,
lefotomat,
lapucealoreille,
billetterie_cf,
arachnee,
lerio,
laraymonde,
mille_formes,
amisdutempsdescerises,
)
from .import_tasks.downloader import (
ChromiumHeadlessDownloader,
SimpleDownloader,
)
from .import_tasks.extractor import Extractor
from .import_tasks.generic_extractors import (
wordpress_mec,
apidae_tourisme,
iguana_agenda,
mobilizon,
ical,
fbevents,
)
from .import_tasks.importer import URL2Events
# Set the default Django settings module for the 'celery' program.
APP_ENV = os.getenv("APP_ENV", "dev")
@ -78,7 +98,13 @@ def close_import_task(taskid, success, error_message, importer):
task.nb_updated = 0 if importer is None else importer.get_nb_updated_events()
task.nb_removed = 0 if importer is None else importer.get_nb_removed_events()
fields = ["status", "nb_initial", "nb_updated", "nb_imported", "nb_removed"]
fields = [
"status",
"nb_initial",
"nb_updated",
"nb_imported",
"nb_removed",
]
if not success:
logger.error(error_message)
task.error_message = str(error_message)[:512]
@ -89,6 +115,7 @@ def close_import_task(taskid, success, error_message, importer):
@app.task(bind=True)
def import_events_from_json(self, json):
from agenda_culturel.models import BatchImportation
from .db_importer import DBImporterEvents
# create a batch importation
@ -124,7 +151,8 @@ class ChromiumTask(Task):
def run_recurrent_import_internal(rimport, downloader, req_id):
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import BatchImportation, RecurrentImport
from .db_importer import DBImporterEvents
logger.info("Run recurrent import: {}".format(req_id))
@ -282,7 +310,6 @@ def run_recurrent_import(self, pklist):
def run_recurrent_imports_from_list(pklist):
tasks = chain(
run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s()
for i in range(len(pklist))
@ -350,7 +377,7 @@ def run_all_recurrent_imports(self, only_fb=False):
@app.task(bind=True)
def run_all_recurrent_imports_failed(self):
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import BatchImportation, RecurrentImport
logger.info("Run only failed imports")
imports = RecurrentImport.objects.all().order_by("pk")
@ -367,7 +394,7 @@ def run_all_recurrent_imports_failed(self):
@app.task(bind=True)
def run_all_recurrent_imports_canceled(self):
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import BatchImportation, RecurrentImport
logger.info("Run only canceled imports")
imports = RecurrentImport.objects.all().order_by("pk")
@ -406,9 +433,9 @@ def import_events_from_url(
email=None,
comments=None,
):
from agenda_culturel.models import BatchImportation, Event
from .db_importer import DBImporterEvents
from agenda_culturel.models import BatchImportation
from agenda_culturel.models import Event
if isinstance(urls, list):
url = urls[0]
@ -419,7 +446,6 @@ def import_events_from_url(
with memcache_chromium_lock(self.app.oid) as acquired:
if acquired:
logger.info(
"URL import: {}".format(self.request.id) + " force " + str(force)
)
@ -431,7 +457,6 @@ def import_events_from_url(
existing = None if force else Event.objects.filter(uuids__contains=[url])
# if it's unknown
if force or len(existing) == 0:
# create an importer
importer = DBImporterEvents(self.request.id)
@ -474,7 +499,10 @@ def import_events_from_url(
)
else:
close_import_task(
self.request.id, False, "Cannot find any event", importer
self.request.id,
False,
"Cannot find any event",
importer,
)
except Exception as e:
logger.error(e)
@ -503,9 +531,9 @@ def import_events_from_urls(
@app.task(base=ChromiumTask, bind=True)
def update_orphan_pure_import_events(self):
from agenda_culturel.models import RecurrentImport
from agenda_culturel.models import Event
from django.db.models import Q, F
from django.db.models import F, Q
from agenda_culturel.models import Event, RecurrentImport
# get all recurrent sources
srcs = RecurrentImport.objects.all().values_list("source")

View File

@ -1,8 +1,9 @@
from agenda_culturel.models import Event
import json
import logging
from django.utils import timezone
import logging
from agenda_culturel.models import Event
logger = logging.getLogger(__name__)
@ -55,7 +56,10 @@ class DBImporterEvents:
if "url" in structure["header"]:
self.url = structure["header"]["url"]
else:
return (False, "JSON is not correctly structured: missing url in header")
return (
False,
"JSON is not correctly structured: missing url in header",
)
if "date" in structure["header"]:
self.date = structure["header"]["date"]

View File

@ -1,23 +1,22 @@
import django_filters
from django.utils.translation import gettext_lazy as _
from django import forms
from django.contrib.postgres.search import SearchQuery, SearchHeadline
from django.db.models import Q, F
from datetime import date, timedelta
from urllib.parse import urlparse, parse_qs, urlencode
from urllib.parse import parse_qs, urlencode, urlparse
from django.http import QueryDict
import django_filters
from django import forms
from django.contrib.gis.measure import D
from django.contrib.postgres.search import SearchHeadline, SearchQuery
from django.db.models import F, Q
from django.http import QueryDict
from django.utils.translation import gettext_lazy as _
from .models import (
ReferenceLocation,
RecurrentImport,
Tag,
Event,
Category,
Message,
DuplicatedEvents,
Event,
Message,
RecurrentImport,
ReferenceLocation,
Tag,
)
@ -188,7 +187,6 @@ class EventFilter(django_filters.FilterSet):
return self.request.get_full_path().split("?")[0]
def get_cleaned_data(self, name):
try:
return self.form.cleaned_data[name]
except AttributeError:
@ -510,7 +508,12 @@ class SimpleSearchEventFilter(django_filters.FilterSet):
| Q(exact_location__name__icontains=value)
| Q(description__icontains=value)
)
for f in ["title", "category__name", "exact_location__name", "description"]:
for f in [
"title",
"category__name",
"exact_location__name",
"description",
]:
params = {
f
+ "_hl": SearchHeadline(
@ -579,7 +582,14 @@ class SearchEventFilter(django_filters.FilterSet):
class Meta:
model = Event
fields = ["title", "location", "description", "category", "tags", "start_day"]
fields = [
"title",
"location",
"description",
"category",
"tags",
"start_day",
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -621,7 +631,6 @@ class DuplicatedEventsFilter(django_filters.FilterSet):
class RecurrentImportFilter(django_filters.FilterSet):
name = django_filters.ModelMultipleChoiceFilter(
label="Filtrer par nom",
field_name="name",

View File

@ -1,57 +1,58 @@
import logging
import os
from string import ascii_uppercase as auc
from django.conf import settings
from django.core.files import File
from django.forms import (
ModelForm,
ValidationError,
TextInput,
Form,
URLField,
MultipleHiddenInput,
Textarea,
BooleanField,
CharField,
ChoiceField,
RadioSelect,
MultipleChoiceField,
BooleanField,
EmailField,
Form,
HiddenInput,
ModelChoiceField,
EmailField,
ModelForm,
MultipleChoiceField,
MultipleHiddenInput,
RadioSelect,
Textarea,
TextInput,
URLField,
ValidationError,
formset_factory,
)
from django.forms import formset_factory
from django.utils.formats import localize
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
from .models import (
Event,
RecurrentImport,
CategorisationRule,
Place,
Category,
Tag,
Event,
Message,
Place,
RecurrentImport,
Tag,
)
from django.conf import settings
from django.core.files import File
from django.utils.translation import gettext_lazy as _
from string import ascii_uppercase as auc
from .templatetags.utils_extra import int_to_abc
from django.utils.safestring import mark_safe
from django.utils.formats import localize
from .templatetags.event_extra import event_field_verbose_name, field_to_html
import os
import logging
from .templatetags.utils_extra import int_to_abc
logger = logging.getLogger(__name__)
class GroupFormMixin:
template_name = "agenda_culturel/forms/div_group.html"
class FieldGroup:
def __init__(
self, id, label, display_label=False, maskable=False, default_masked=True
self,
id,
label,
display_label=False,
maskable=False,
default_masked=True,
):
self.id = id
self.label = label
@ -93,7 +94,10 @@ class GroupFormMixin:
def fields_by_group(self):
return [(g, self.get_fields_in_group(g)) for g in self.groups] + [
(GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())
(
GroupFormMixin.FieldGroup("other", _("Other")),
self.get_no_group_fields(),
)
]
def clean(self):
@ -434,7 +438,9 @@ class EventModerateForm(ModelForm):
required_css_class = "required"
tags = MultipleChoiceField(
label=_("Tags"), help_text=_("Select tags from existing ones."), required=False
label=_("Tags"),
help_text=_("Select tags from existing ones."),
required=False,
)
new_tags = MultipleChoiceFieldAcceptAll(
@ -534,12 +540,18 @@ class FixDuplicates(Form):
if initial is None:
initial = "Merge"
choices += [
("Merge", _("Create a new version by merging (interactive mode).") + extra)
(
"Merge",
_("Create a new version by merging (interactive mode).") + extra,
)
]
for i, e in enumerate(events):
if e.status != Event.STATUS.TRASH:
choices += [
("Remove-" + str(e.pk), _("Make {} independent.").format(auc[i]))
(
"Remove-" + str(e.pk),
_("Make {} independent.").format(auc[i]),
)
]
choices += [("NotDuplicates", _("Make all versions independent."))]
@ -611,9 +623,15 @@ class MergeDuplicates(Form):
if self.event:
choices = [
(
("event_" + str(e.pk), _("Value of version {}").format(e.pk))
(
"event_" + str(e.pk),
_("Value of version {}").format(e.pk),
)
if e != self.event
else ("event_" + str(e.pk), _("Value of the selected version"))
else (
"event_" + str(e.pk),
_("Value of the selected version"),
)
)
for e in self.events
]
@ -895,7 +913,6 @@ class PlaceForm(GroupFormMixin, ModelForm):
class MessageForm(ModelForm):
class Meta:
model = Message
fields = ["subject", "name", "email", "message", "related_event"]
@ -912,7 +929,6 @@ class MessageForm(ModelForm):
class MessageEventForm(ModelForm):
class Meta:
model = Message
fields = ["message"]

View File

@ -1,5 +1,5 @@
from os.path import dirname, basename, isfile, join
import glob
from os.path import basename, dirname, isfile, join
modules = glob.glob(join(dirname(__file__), "*.py"))
__all__ = [

View File

@ -1,23 +1,29 @@
from ..extractor import Extractor
import json
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import pytz
import html
import json
from datetime import datetime
from urllib.parse import urlparse
import pytz
from bs4 import BeautifulSoup
from ..extractor import Extractor
# A class dedicated to get events from les amis du temps des cerises
# Website https://amisdutempsdescerises.org/
class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.data = b'------toto\r\nContent-Disposition: form-data; name="p"\r\n\r\nfutur\r\n------toto--\r\n'
self.content_type = "multipart/form-data; boundary=----toto"
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
self.set_header(url)
self.clear_events()

View File

@ -1,14 +1,15 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
import re
from datetime import datetime, timedelta, date
from datetime import date, datetime, timedelta
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Arachnée Concert
# URL: https://www.arachnee-concerts.com/agenda-des-concerts/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
self.possible_dates = {}
@ -29,11 +30,16 @@ class CExtractor(TwoStepsExtractorNoPause):
self.theater = match[1]
return super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
containers = soup.select("ul.event_container>li")
@ -59,7 +65,6 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
title = ", ".join(
[

View File

@ -1,15 +1,16 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
import re
from datetime import datetime, timedelta
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La Cour des 3 Coquins and Graines de spectacle
# URL: https://billetterie-c3c.clermont-ferrand.fr//
class CExtractor(TwoStepsExtractor):
def extract(
self,
content,
@ -22,7 +23,13 @@ class CExtractor(TwoStepsExtractor):
):
self.root_address = "https://" + urlparse(url).netloc + "/"
return super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
def category_agenda(self, category):
@ -177,7 +184,6 @@ class CExtractor(TwoStepsExtractor):
category = categories[0]
for dt in datetimes:
self.add_event_with_props(
default_values,
event_url,

View File

@ -1,7 +1,9 @@
from ..twosteps_extractor import TwoStepsExtractor
from datetime import date
import json5
from bs4 import BeautifulSoup
from datetime import datetime, date
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La Coopérative de Mai:
@ -120,7 +122,12 @@ class CExtractor(TwoStepsExtractor):
# on ajoute éventuellement les informations complémentaires
d_suite = ""
for d in ["#typespec", "#dureespec", "#lieuspec", ".lkuncontdroitespec"]:
for d in [
"#typespec",
"#dureespec",
"#lieuspec",
".lkuncontdroitespec",
]:
comp_desc = soup.select(d)
if comp_desc and len(comp_desc) > 0:
for desc in comp_desc:

View File

@ -1,9 +1,11 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..generic_extractors.ggcal_link import GGCalendar
import re
import json5
from bs4 import BeautifulSoup
from ..generic_extractors.ggcal_link import GGCalendar
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La Coopérative de Mai:
# URL: https://www.lacoope.org/concerts-calendrier/

View File

@ -1,7 +1,8 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La puce à l'oreille
# URL: https://www.lapucealoreille63.fr/

View File

@ -1,16 +1,15 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from bs4 import BeautifulSoup
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Raymond Bar
# URL: https://www.raymondbar.net/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
links = soup.select(".showsList .showMore")

View File

@ -1,7 +1,8 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from Le Fotomat'
# URL: https://www.lefotomat.com/

View File

@ -1,20 +1,20 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from datetime import datetime
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Cinéma Le Rio (Clermont-Ferrand)
# URL: https://www.cinemalerio.com/evenements/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
self.possible_dates = {}
self.theater = None
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
links = soup.select("td.seance_link a")
@ -37,7 +37,6 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
title = soup.select_one("h1").text

View File

@ -1,14 +1,15 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from datetime import date
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Mille formes
# URL: https://www.milleformes.fr/programme
class CExtractor(TwoStepsExtractorNoPause):
def extract(
self,
content,
@ -22,7 +23,13 @@ class CExtractor(TwoStepsExtractorNoPause):
self.root_address = "https://" + urlparse(url).netloc + "/"
self.today = date.today()
return super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
def parse_category(self, cat):
@ -74,7 +81,6 @@ class CExtractor(TwoStepsExtractorNoPause):
return result
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
links = soup.select(".cell a.evenement")
for lk in links:

View File

@ -1,19 +1,20 @@
from urllib.parse import urlencode
import urllib.request
from urllib.request import Request
import os
import time
import urllib.request
from abc import ABC, abstractmethod
from urllib.parse import urlencode
from urllib.request import Request
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import (
StaleElementReferenceException,
NoSuchElementException,
SessionNotCreatedException,
StaleElementReferenceException,
TimeoutException,
WebDriverException,
SessionNotCreatedException,
)
from abc import ABC, abstractmethod
import time
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
class Downloader(ABC):
@ -25,7 +26,13 @@ class Downloader(ABC):
pass
def get_content(
self, url, cache=None, referer=None, post=None, content_type=None, data=None
self,
url,
cache=None,
referer=None,
post=None,
content_type=None,
data=None,
):
if cache and os.path.exists(cache):
print("Loading cache ({})".format(cache))
@ -33,7 +40,11 @@ class Downloader(ABC):
content = "\n".join(f.readlines())
else:
content = self.download(
url, referer=referer, post=post, content_type=content_type, data=data
url,
referer=referer,
post=post,
content_type=content_type,
data=data,
)
if cache:

View File

@ -1,8 +1,9 @@
from abc import ABC, abstractmethod
from enum import IntEnum
from datetime import datetime, time, date, timedelta
import re
import unicodedata
from abc import ABC, abstractmethod
from datetime import date, datetime, time, timedelta
from enum import IntEnum
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
@ -71,7 +72,8 @@ class Extractor(ABC):
def parse_french_date(text, default_year=None, default_year_by_proximity=None):
# format NomJour Numero Mois Année
m = re.search(
"[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text
"[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)",
text,
)
if m:
day = m.group(1)
@ -172,7 +174,12 @@ class Extractor(ABC):
@abstractmethod
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
pass
@ -290,8 +297,10 @@ class Extractor(ABC):
return {"header": self.header, "events": self.events}
def clean_url(url):
from .generic_extractors.fbevent import (
CExtractor as FacebookEventExtractor,
)
from .generic_extractors.ical import ICALExtractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
result = url
for e in [ICALExtractor, FacebookEventExtractor]:
@ -299,11 +308,13 @@ class Extractor(ABC):
return result
def get_default_extractors(single_event=False):
from .generic_extractors.ical import ICALExtractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
from .generic_extractors.fbevent import (
CExtractor as FacebookEventExtractor,
)
from .generic_extractors.ggcal_link import (
CExtractor as GoogleCalendarLinkEventExtractor,
)
from .generic_extractors.ical import ICALExtractor
if single_event:
return [
@ -322,9 +333,13 @@ class Extractor(ABC):
# A class that only produce a not found event
class EventNotFoundExtractor(Extractor):
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
self.set_header(url)
self.clear_events()

View File

@ -1,5 +1,5 @@
from os.path import dirname, basename, isfile, join
import glob
from os.path import basename, dirname, isfile, join
modules = glob.glob(join(dirname(__file__), "*.py"))
__all__ = [

View File

@ -1,13 +1,12 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from apidae-tourisme widgets
class CExtractor(TwoStepsExtractorNoPause):
def build_event_url_list(self, content, infuture_days=180):
# Get line starting with wrapper.querySelector(".results_agenda").innerHTML = "
# split using "=" and keep the end
# strip it, and remove the first character (") and the two last ones (";)

View File

@ -1,14 +1,14 @@
from datetime import datetime
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import time as t
from django.utils.translation import gettext_lazy as _
import json
import logging
import re
import time as t
from datetime import datetime
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from django.utils.translation import gettext_lazy as _
from ..extractor import Extractor
import json
import logging
logger = logging.getLogger(__name__)
@ -210,7 +210,6 @@ class FacebookEvent:
return self.get_element("parent_if_exists_or_self")["id"]
def build_events(self, url):
if self.neighbor_events is None or len(self.neighbor_events) == 0:
return [self.build_event(url)]
else:
@ -228,7 +227,6 @@ class FacebookEvent:
class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.has_2nd_method = True
@ -236,8 +234,8 @@ class CExtractor(Extractor):
def prepare_2nd_extract_dler(downloader):
if downloader.support_2nd_extract:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
path = './/div[not(@aria-hidden)]/div[@aria-label="Allow all cookies"]'
try:
@ -307,7 +305,12 @@ class CExtractor(Extractor):
return u.netloc in url_list
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
# NOTE: this method does not use url_human = None and default_values = None

View File

@ -1,13 +1,14 @@
from ..twosteps_extractor import TwoStepsExtractor
from .fbevent import FacebookEvent, CExtractor as FacebookEventExtractor
from bs4 import BeautifulSoup
import json
import logging
import os
from datetime import datetime
from bs4 import BeautifulSoup
from django.utils.translation import gettext_lazy as _
import logging
from ..twosteps_extractor import TwoStepsExtractor
from .fbevent import CExtractor as FacebookEventExtractor
from .fbevent import FacebookEvent
logger = logging.getLogger(__name__)
@ -15,7 +16,6 @@ logger = logging.getLogger(__name__)
# A class dedicated to get events from a facebook events page
# such as https://www.facebook.com/laJeteeClermont/events
class CExtractor(TwoStepsExtractor):
def __init__(self):
super().__init__()
self.has_2nd_method_in_list = True
@ -41,7 +41,6 @@ class CExtractor(TwoStepsExtractor):
self.find_event_id_fragment_in_array(e)
def find_in_js(self, soup):
for json_script in soup.find_all("script", type="application/json"):
json_txt = json_script.get_text()
json_struct = json.loads(json_txt)

View File

@ -1,11 +1,11 @@
from datetime import datetime
from bs4 import BeautifulSoup
from urllib.parse import urlparse, parse_qs
import dateutil.parser
from ..extractor import Extractor
import bbcode
import logging
from datetime import datetime
from urllib.parse import parse_qs, urlparse
import dateutil.parser
from bs4 import BeautifulSoup
from ..extractor import Extractor
logger = logging.getLogger(__name__)
@ -115,15 +115,18 @@ class CExtractor(Extractor):
return image
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
soup = BeautifulSoup(content, "html.parser")
for ggu in self.possible_urls:
link_calendar = soup.select('a[href^="' + ggu + '"]')
if len(link_calendar) != 0:
gg_cal = GGCalendar(link_calendar[0]["href"])
if gg_cal.is_valid_event():

View File

@ -1,17 +1,14 @@
import icalendar
import warnings
from datetime import date, datetime, timedelta
import bbcode
from datetime import datetime, date, timedelta
from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning
import icalendar
import pytz
from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning
from celery.utils.log import get_task_logger
from ..extractor import Extractor
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@ -68,7 +65,12 @@ class ICALExtractor(Extractor):
return url
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning)

View File

@ -1,13 +1,14 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Raymond Bar
# URL: https://www.raymondbar.net/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
@ -35,7 +36,6 @@ class CExtractor(TwoStepsExtractorNoPause):
return tags
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
root_address_human = self.url_human.split("?")[0]
@ -72,7 +72,6 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
):
root_address_human = "https://" + urlparse(self.url_human).netloc + "/"
soup = BeautifulSoup(event_content, "xml")

View File

@ -1,11 +1,12 @@
from ..extractor import Extractor
import dateutil.parser
import logging
from datetime import datetime, timezone
import requests
from urllib.parse import urlparse
import dateutil.parser
import requests
from bs4 import BeautifulSoup
import logging
from ..extractor import Extractor
logger = logging.getLogger(__name__)
@ -13,14 +14,12 @@ logger = logging.getLogger(__name__)
# A class dedicated to get events from les amis du temps des cerises
# Website https://amisdutempsdescerises.org/
class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.no_downloader = True
# Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python
def _request(self, body, data):
headers = {}
response = requests.post(
@ -33,17 +32,16 @@ class CExtractor(Extractor):
response_json = response.json()
if "errors" in response_json:
raise Exception(
f'Errors while requesting { body }. { str(response_json["errors"]) }'
f"Errors while requesting {body}. {str(response_json['errors'])}"
)
return response_json["data"]
else:
raise Exception(
f"Error while requesting. Status code: { response.status_code }"
f"Error while requesting. Status code: {response.status_code}"
)
def _oncoming_events_number(self):
query = """
query($preferredUsername: String!, $afterDatetime: DateTime) {
group(preferredUsername: $preferredUsername) {
@ -123,13 +121,17 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
return events
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
self.set_header(url)
self.clear_events()
if "@" in url:
# TODO: quand on a
# https://mobilizon.fr/@xr_clermont_ferrand@mobilizon.extinctionrebellion.fr/events
# on doit retourner :

View File

@ -1,12 +1,12 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from MEC Wordpress plugin
# URL: https://webnus.net/modern-events-calendar/
class CExtractor(TwoStepsExtractor):
def local2agendaCategory(self, category):
mapping = {
"Musique": "Fêtes & Concerts",

View File

@ -1,9 +1,9 @@
import logging
from .downloader import SimpleDownloader
from .extractor import Extractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
import logging
logger = logging.getLogger(__name__)
@ -37,7 +37,11 @@ class URL2Events:
if content is None:
content = self.downloader.get_content(
url, cache, referer=referer, content_type=content_type, data=data
url,
cache,
referer=referer,
content_type=content_type,
data=data,
)
if content is None:
@ -70,6 +74,11 @@ class URL2Events:
logger.info("Using cookie trick on a facebook event")
e.prepare_2nd_extract()
return self.process(
url, url_human, cache, default_values, published, False
url,
url_human,
cache,
default_values,
published,
False,
)
return None

View File

@ -1,8 +1,8 @@
import datetime
import logging
from abc import abstractmethod
import logging
from .extractor import Extractor
import datetime
logger = logging.getLogger(__name__)
@ -12,7 +12,6 @@ logger = logging.getLogger(__name__)
# - then for each document downloaded from these urls, build the events
# This class is an abstract class
class TwoStepsExtractor(Extractor):
def __init__(self):
super().__init__()
self.has_2nd_method_in_list = False
@ -145,7 +144,6 @@ class TwoStepsExtractor(Extractor):
ignore_404=True,
first=True,
):
first = True
self.only_future = only_future
self.now = datetime.datetime.now().date()
@ -180,7 +178,11 @@ class TwoStepsExtractor(Extractor):
# then extract event information from this html document
try:
self.add_event_from_content(
content_event, event_url, url_human, default_values, published
content_event,
event_url,
url_human,
default_values,
published,
)
except Exception as e:
# some website (FB) sometime need a second step
@ -216,7 +218,6 @@ class TwoStepsExtractor(Extractor):
class TwoStepsExtractorNoPause(TwoStepsExtractor):
def extract(
self,
content,
@ -233,7 +234,13 @@ class TwoStepsExtractorNoPause(TwoStepsExtractor):
pause = False
self.downloader.pause = False
result = super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
self.downloader.pause = pause

View File

@ -3,27 +3,35 @@
import autoslug.fields
from django.db import migrations
def migrate_data_slug_forward(apps, schema_editor):
ReferenceLocation = apps.get_model("agenda_culturel", "ReferenceLocation")
for instance in ReferenceLocation.objects.all():
print("Generating slug for %s"%instance)
instance.save() # Will trigger slug update
print("Generating slug for %s" % instance)
instance.save() # Will trigger slug update
def migrate_data_slug_backward(apps, schema_editor):
pass
class Migration(migrations.Migration):
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0001_squashed_0150_alter_event_local_image'),
("agenda_culturel", "0001_squashed_0150_alter_event_local_image"),
]
operations = [
migrations.AddField(
model_name='referencelocation',
name='slug',
field=autoslug.fields.AutoSlugField(default=None, editable=False, null=True, populate_from='name', unique=True),
model_name="referencelocation",
name="slug",
field=autoslug.fields.AutoSlugField(
default=None,
editable=False,
null=True,
populate_from="name",
unique=True,
),
),
migrations.RunPython(
code=migrate_data_slug_forward,

View File

@ -1,55 +1,51 @@
from django.db import models, connection
from django_better_admin_arrayfield.models.fields import ArrayField
from django.utils.translation import gettext_lazy as _
from django.template.defaultfilters import slugify
from django.utils.dateparse import parse_date
from django.urls import reverse
from colorfield.fields import ColorField
from django_ckeditor_5.fields import CKEditor5Field
import copy
import hashlib
import logging
import os
import unicodedata
import urllib.request
import uuid
from collections import defaultdict
from datetime import date, time, timedelta
from urllib.parse import urlparse
import emoji
import recurrence
import recurrence.fields
from autoslug import AutoSlugField
from colorfield.fields import ColorField
from django.contrib.auth.models import User
from django.contrib.gis.geos import Point
from django.contrib.postgres.search import TrigramSimilarity
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.core.cache.utils import make_template_fragment_key
from django.contrib.auth.models import User
import emoji
from django.core.files.storage import default_storage
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from django.template.loader import render_to_string
from autoslug import AutoSlugField
from django_resized import ResizedImageField
import uuid
import hashlib
import urllib.request
import os
from django.core.files import File
from django.utils import timezone
from django.contrib.postgres.search import TrigramSimilarity
from django.db.models import Q, Count, F, Subquery, OuterRef, Func
from django.core.files.storage import default_storage
from django.core.mail import send_mail
from django.db import connection, models
from django.db.models import Count, F, Func, OuterRef, Q, Subquery
from django.db.models.functions import Lower
import recurrence.fields
import recurrence
import copy
import unicodedata
from collections import defaultdict
from django.template.defaultfilters import date as _date
from django.template.defaultfilters import slugify
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.dateparse import parse_date
from django.utils.timezone import datetime
from django.utils.translation import gettext_lazy as _
from django_better_admin_arrayfield.models.fields import ArrayField
from django_ckeditor_5.fields import CKEditor5Field
from django_resized import ResizedImageField
from icalendar import Calendar as icalCal
from icalendar import Event as icalEvent
from location_field.models.spatial import LocationField
from .calendar import CalendarDay
from .import_tasks.extractor import Extractor
from .import_tasks.generic_extractors.fbevent import (
CExtractor as FacebookEventExtractor,
)
from .import_tasks.extractor import Extractor
from django.template.defaultfilters import date as _date
from datetime import time, timedelta, date
from django.utils.timezone import datetime
from location_field.models.spatial import LocationField
from django.contrib.gis.geos import Point
from .calendar import CalendarDay
from icalendar import Calendar as icalCal
from icalendar import Event as icalEvent
import logging
logger = logging.getLogger(__name__)
@ -188,9 +184,11 @@ class Category(models.Model):
class Tag(models.Model):
name = models.CharField(
verbose_name=_("Name"), help_text=_("Tag name"), max_length=512, unique=True
verbose_name=_("Name"),
help_text=_("Tag name"),
max_length=512,
unique=True,
)
description = CKEditor5Field(
@ -253,7 +251,6 @@ class Tag(models.Model):
result = cache.get(id_cache)
if not result:
free_tags = Event.get_all_tags(False)
f_tags = [t["tag"] for t in free_tags]
@ -323,7 +320,6 @@ class Tag(models.Model):
class DuplicatedEvents(models.Model):
representative = models.ForeignKey(
"Event",
verbose_name=_("Representative event"),
@ -473,7 +469,6 @@ class DuplicatedEvents(models.Model):
class ReferenceLocation(models.Model):
name = models.CharField(
verbose_name=_("Name"),
help_text=_("Name of the location"),
@ -481,7 +476,10 @@ class ReferenceLocation(models.Model):
null=False,
)
location = LocationField(
based_fields=["name"], zoom=12, default=Point(3.08333, 45.783329), srid=4326
based_fields=["name"],
zoom=12,
default=Point(3.08333, 45.783329),
srid=4326,
)
main = models.IntegerField(
verbose_name=_("Main"),
@ -570,7 +568,8 @@ class Place(models.Model):
def get_absolute_url(self):
return reverse(
"view_place_fullname", kwargs={"pk": self.pk, "extra": slugify(self.name)}
"view_place_fullname",
kwargs={"pk": self.pk, "extra": slugify(self.name)},
)
def nb_events(self):
@ -716,7 +715,10 @@ class Event(models.Model):
title = models.CharField(verbose_name=_("Title"), max_length=512)
status = models.CharField(
_("Status"), max_length=20, choices=STATUS.choices, default=STATUS.DRAFT
_("Status"),
max_length=20,
choices=STATUS.choices,
default=STATUS.DRAFT,
)
category = models.ForeignKey(
@ -742,7 +744,10 @@ class Event(models.Model):
end_time = models.TimeField(verbose_name=_("End time"), blank=True, null=True)
recurrences = recurrence.fields.RecurrenceField(
verbose_name=_("Recurrence"), include_dtstart=False, blank=True, null=True
verbose_name=_("Recurrence"),
include_dtstart=False,
blank=True,
null=True,
)
exact_location = models.ForeignKey(
@ -845,7 +850,10 @@ class Event(models.Model):
def get_import_messages(self):
return self.message_set.filter(
message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS]
message_type__in=[
Message.TYPE.IMPORT_PROCESS,
Message.TYPE.UPDATE_PROCESS,
]
).order_by("date")
def get_consolidated_end_day(self, intuitive=True):
@ -995,7 +1003,12 @@ class Event(models.Model):
if not simple:
c += [
{"timestamp": m.date, "data": m, "user": m.user, "is_date": False}
{
"timestamp": m.date,
"data": m,
"user": m.user,
"is_date": False,
}
for m in self.message_set.filter(spam=False)
]
@ -1408,7 +1421,10 @@ class Event(models.Model):
CategorisationRule.apply_rules(self)
def get_contributor_message(self):
types = [Message.TYPE.FROM_CONTRIBUTOR, Message.TYPE.FROM_CONTRIBUTOR_NO_MSG]
types = [
Message.TYPE.FROM_CONTRIBUTOR,
Message.TYPE.FROM_CONTRIBUTOR_NO_MSG,
]
if self.other_versions is None or self.other_versions.representative is None:
return Message.objects.filter(
related_event=self.pk, message_type__in=types, closed=False
@ -1481,7 +1497,7 @@ class Event(models.Model):
# check if we need to clean the other_versions
if (
not clone
and not "noclean_other_versions" in kwargs
and "noclean_other_versions" not in kwargs
and self.pk
and self.other_versions is not None
and self.other_versions.nb_duplicated() == 1
@ -1901,7 +1917,6 @@ class Event(models.Model):
return events[0]
def update(self, other, all):
# integrate pending organisers
if other.has_pending_organisers() and other.pending_organisers is not None:
self.organisers.set(other.pending_organisers)
@ -2030,7 +2045,8 @@ class Event(models.Model):
else:
# we only update local information if it's a pure import and has no moderated_date
same_imported.update(
event, pure and same_imported.moderated_date is None
event,
pure and same_imported.moderated_date is None,
)
# save messages
if event.has_message():
@ -2046,7 +2062,8 @@ class Event(models.Model):
):
same_imported.download_image()
same_imported.save(
update_fields=["local_image"], noclean_other_versions=True
update_fields=["local_image"],
noclean_other_versions=True,
)
to_update.append(same_imported)
@ -2104,7 +2121,13 @@ class Event(models.Model):
nb_updated = Event.objects.bulk_update(
to_update,
fields=Event.data_fields(no_m2m=True)
+ ["imported_date", "modified_date", "uuids", "status", "import_sources"],
+ [
"imported_date",
"modified_date",
"uuids",
"status",
"import_sources",
],
)
nb_draft = 0
@ -2329,15 +2352,15 @@ class Event(models.Model):
class Message(models.Model):
class TYPE(models.TextChoices):
FROM_CONTRIBUTOR = "from_contributor", _("From contributor")
IMPORT_PROCESS = "import_process", _("Import process")
UPDATE_PROCESS = "update_process", _("Update process")
CONTACT_FORM = "contact_form", _("Contact form")
EVENT_REPORT = "event_report", _("Event report")
FROM_CONTRIBUTOR_NO_MSG = "from_contrib_no_msg", _(
"From contributor (without message)"
FROM_CONTRIBUTOR_NO_MSG = (
"from_contrib_no_msg",
_("From contributor (without message)"),
)
WARNING = "warning", _("Warning")
@ -2470,7 +2493,10 @@ class RecurrentImport(models.Model):
class DOWNLOADER(models.TextChoices):
SIMPLE = "simple", _("simple")
CHROMIUMHEADLESS = "chromium headless", _("Headless Chromium")
CHROMIUMHEADLESSPAUSE = "chromium (pause)", _("Headless Chromium (pause)")
CHROMIUMHEADLESSPAUSE = (
"chromium (pause)",
_("Headless Chromium (pause)"),
)
class RECURRENCE(models.TextChoices):
DAILY = (
@ -2489,7 +2515,10 @@ class RecurrentImport(models.Model):
default="",
)
processor = models.CharField(
_("Processor"), max_length=20, choices=PROCESSOR.choices, default=PROCESSOR.ICAL
_("Processor"),
max_length=20,
choices=PROCESSOR.choices,
default=PROCESSOR.ICAL,
)
downloader = models.CharField(
_("Downloader"),
@ -2628,7 +2657,10 @@ class BatchImportation(models.Model):
)
status = models.CharField(
_("Status"), max_length=20, choices=STATUS.choices, default=STATUS.RUNNING
_("Status"),
max_length=20,
choices=STATUS.choices,
default=STATUS.RUNNING,
)
error_message = models.CharField(

View File

@ -1,8 +1,9 @@
from os import getenv as os_getenv, path as os_path # noqa
from os import getenv as os_getenv # noqa
from os import path as os_path
from pathlib import Path
from django.utils.translation import gettext_lazy as _
from django.core.management.utils import get_random_secret_key
from django.utils.translation import gettext_lazy as _
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent.parent

View File

@ -1,6 +1,5 @@
from .base import * # noqa
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",

View File

@ -102,4 +102,3 @@
</div>
{% endblock %}

View File

@ -38,4 +38,3 @@
{% include "agenda_culturel/duplicate-info-inc.html" with object=object %}
{% endblock %}

View File

@ -5,4 +5,3 @@ Nous avons la dure tâche de t'informer que l'événement « {{ event_title }}
Nous te remercions pour cette proposition, et espérons qu'une prochaine fois, ta proposition correspondra à la ligne portée par {{ sitename }}.
L'équipe de modération.

View File

@ -42,4 +42,3 @@
{% include "agenda_culturel/duplicate-info-inc.html" with object=object %}
{% endblock %}

View File

@ -28,4 +28,3 @@
</form>
</article>
{% endblock %}

View File

@ -17,4 +17,3 @@
{% else %}
Page 1 sur 1
{% endif %}

View File

@ -27,4 +27,3 @@
{% include "agenda_culturel/single-event/event-single-inc.html" with event=event %}
</article>
{% endblock %}

View File

@ -115,4 +115,3 @@
{% endif %}
</article>

View File

@ -172,4 +172,3 @@
{% endif %}
</footer>
</article>

View File

@ -125,4 +125,3 @@
</script>
{% endblock %}

View File

@ -29,4 +29,3 @@
</form>
</article>
{% endblock %}

View File

@ -1,12 +1,13 @@
import colorsys
import logging
from django import template
from django.utils.safestring import mark_safe
from django.core.cache import cache
from django.utils.safestring import mark_safe
from agenda_culturel.models import Category
import colorsys
from .utils_extra import picto_from_name
import logging
from .utils_extra import picto_from_name
logger = logging.getLogger(__name__)
@ -82,7 +83,10 @@ def css_categories():
for c in Category.objects.all()
]
cats.append(
{"color": Category.default_color, "css_class": Category.default_css_class}
{
"color": Category.default_color,
"css_class": Category.default_css_class,
}
)
for c in cats:

View File

@ -1,8 +1,7 @@
from django import template
from django.utils.safestring import mark_safe
from django.urls import reverse_lazy
from django.template.defaultfilters import pluralize
from django.urls import reverse_lazy
from django.utils.safestring import mark_safe
from agenda_culturel.models import DuplicatedEvents
@ -13,7 +12,6 @@ register = template.Library()
@register.simple_tag
def show_badge_duplicated(placement="top"):
nb_duplicated = DuplicatedEvents.not_fixed_qs().count()
if nb_duplicated != 0:

View File

@ -1,10 +1,11 @@
from django import template
from django.utils.safestring import mark_safe
from django.urls import reverse_lazy
from django.template.defaultfilters import pluralize, linebreaks, urlize
from django.db.models import Q, F
import re
from datetime import timedelta, datetime
from datetime import datetime, timedelta
from django import template
from django.db.models import F, Q
from django.template.defaultfilters import linebreaks, pluralize, urlize
from django.urls import reverse_lazy
from django.utils.safestring import mark_safe
from agenda_culturel.models import Event

View File

@ -1,9 +1,10 @@
from django import template
from django.utils.safestring import mark_safe
from .utils_extra import picto_from_name
from agenda_culturel.models import ReferenceLocation
from .utils_extra import picto_from_name
register = template.Library()

View File

@ -1,8 +1,7 @@
from django import template
from django.utils.safestring import mark_safe
from django.urls import reverse_lazy
from django.template.defaultfilters import pluralize
from django.urls import reverse_lazy
from django.utils.safestring import mark_safe
from agenda_culturel.models import Message

View File

@ -1,14 +1,14 @@
import logging
from django import template
from django.utils.safestring import mark_safe
from django.urls import reverse_lazy
from django.db.models import Count, OuterRef, Subquery
from django.template.defaultfilters import pluralize
from django.db.models import OuterRef, Subquery, Count
from django.urls import reverse_lazy
from django.utils.safestring import mark_safe
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import BatchImportation, RecurrentImport
from .utils_extra import picto_from_name
import logging
register = template.Library()

View File

@ -1,7 +1,9 @@
from django import template
from django.utils.safestring import mark_safe
from django.urls import reverse_lazy
from django.utils.safestring import mark_safe
from agenda_culturel.models import Tag
from .cat_extra import circle_cat
register = template.Library()

View File

@ -1,15 +1,15 @@
from django import template
from django.utils.safestring import mark_safe
from django.template.defaultfilters import stringfilter
from urllib.parse import urlparse
from datetime import timedelta, date, datetime
from dateutil.relativedelta import relativedelta
from django.urls import reverse_lazy
from django.templatetags.static import static
from datetime import date, datetime, timedelta
from string import ascii_uppercase as auc
from django.utils.html import strip_tags
from urllib.parse import urlparse
import emoji
from dateutil.relativedelta import relativedelta
from django import template
from django.template.defaultfilters import stringfilter
from django.templatetags.static import static
from django.urls import reverse_lazy
from django.utils.html import strip_tags
from django.utils.safestring import mark_safe
register = template.Library()

View File

@ -1,21 +1,109 @@
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.urls import path, include, re_path
from django.views.i18n import JavaScriptCatalog
from django.contrib.sitemaps.views import sitemap
from django.contrib.sitemaps import GenericSitemap
from django.contrib.sitemaps.views import sitemap
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.urls import include, path, re_path
from django.views.decorators.cache import cache_page
from django.views.i18n import JavaScriptCatalog
from .sitemaps import (
StaticViewSitemap,
HomeCategorySitemap,
MonthCategorySitemap,
WeekCategorySitemap,
StaticViewSitemap,
UpcomingCategorySitemap,
WeekCategorySitemap,
)
from .models import Event, Place, Organisation, Category
from .views import (
home,
week_view,
month_view,
day_view,
upcoming_events,
export_ical,
view_tag,
view_tag_past,
TagUpdateView,
recent,
administration,
activite,
OrganisationDeleteView,
OrganisationCreateView,
OrganisationDetailView,
OrganisationDetailViewPast,
OrganisationListView,
OrganisationUpdateView,
PlaceDeleteView,
PlaceDetailView,
PlaceDetailViewPast,
PlaceUpdateView,
PlaceListView,
PlaceListAdminView,
UnknownPlaceAddView,
UnknownPlacesListView,
fix_duplicate,
fix_unknown_places,
clear_cache,
export_event_ical,
MessageDeleteView,
imports,
add_import,
update_orphan_events,
cancel_import,
run_all_fb_rimports,
run_all_rimports,
tag_list,
TagDeleteView,
rename_tag,
delete_tag,
TagCreateView,
EventDetailView,
EventUpdateView,
EventModerateView,
moderate_event_next,
RecurrentImportCreateView,
RecurrentImportDeleteView,
RecurrentImportUpdateView,
run_rimport,
categorisation_rules,
duplicates,
DuplicatedEventsDetailView,
StaticContentCreateView,
StaticContentUpdateView,
about,
thank_you,
MessageCreateView,
merge_duplicate,
EventCreateView,
event_search,
event_search_full,
recurrent_imports,
delete_cm_spam,
page_not_found,
internal_server_error,
PlaceCreateView,
PlaceFromEventCreateView,
moderate_from_date,
update_from_source,
change_status_event,
EventDeleteView,
set_duplicate,
import_event_proxy,
import_from_url,
import_from_urls,
mentions_legales,
view_messages,
MessageUpdateView,
statistics,
view_rimport,
update_duplicate_event,
CategorisationRuleCreateView,
CategorisationRuleDeleteView,
CategorisationRuleUpdateView,
apply_categorisation_rules,
)
from django.views.decorators.cache import cache_page
from .views import *
event_dict = {
"queryset": Event.objects.all(),
@ -48,11 +136,15 @@ urlpatterns = [
path("", home, name="home"),
path("cat:<cat>/", home, name="home_category"),
path(
"cat:<cat>/semaine/<int:year>/<int:week>/", week_view, name="week_view_category"
"cat:<cat>/semaine/<int:year>/<int:week>/",
week_view,
name="week_view_category",
),
path("cat:<cat>/cette-semaine/", week_view, name="cette_semaine_category"),
path(
"cat:<cat>/mois/<int:year>/<int:month>/", month_view, name="month_view_category"
"cat:<cat>/mois/<int:year>/<int:month>/",
month_view,
name="month_view_category",
),
path(
"cat:<cat>/jour/<int:year>/<int:month>/<int:day>/",
@ -87,7 +179,11 @@ urlpatterns = [
path("tag/<t>/past", view_tag_past, name="view_tag_past"),
path("tags/", tag_list, name="view_all_tags"),
path("tag/<int:pk>/edit", TagUpdateView.as_view(), name="edit_tag"),
path("tag/<int:pk>/delete", TagDeleteView.as_view(), name="delete_object_tag"),
path(
"tag/<int:pk>/delete",
TagDeleteView.as_view(),
name="delete_object_tag",
),
path("tag/<t>/rename", rename_tag, name="rename_tag"),
path("tag/<t>/delete", delete_tag, name="delete_tag"),
path("tags/add", TagCreateView.as_view(), name="add_tag"),
@ -101,14 +197,20 @@ urlpatterns = [
),
path("event/<int:pk>/", EventDetailView.as_view(), name="edit_event_pk"),
path("event/<int:pk>/edit", EventUpdateView.as_view(), name="edit_event"),
path("event/<int:pk>/moderate", EventModerateView.as_view(), name="moderate_event"),
path(
"event/<int:pk>/moderate",
EventModerateView.as_view(),
name="moderate_event",
),
path(
"event/<int:pk>/moderate/after/<int:pred>",
EventModerateView.as_view(),
name="moderate_event_step",
),
path(
"event/<int:pk>/moderate-next", moderate_event_next, name="moderate_event_next"
"event/<int:pk>/moderate-next",
moderate_event_next,
name="moderate_event_next",
),
path("moderate", EventModerateView.as_view(), name="moderate"),
path(
@ -121,9 +223,15 @@ urlpatterns = [
EventUpdateView.as_view(),
name="simple_clone_edit",
),
path("event/<int:pk>/clone/edit", EventUpdateView.as_view(), name="clone_edit"),
path(
"event/<int:pk>/message", MessageCreateView.as_view(), name="message_for_event"
"event/<int:pk>/clone/edit",
EventUpdateView.as_view(),
name="clone_edit",
),
path(
"event/<int:pk>/message",
MessageCreateView.as_view(),
name="message_for_event",
),
path(
"event/<int:pk>/update-from-source",
@ -179,16 +287,24 @@ urlpatterns = [
),
path("imports/", imports, name="imports"),
path("imports/add", add_import, name="add_import"),
path("imports/orphans/run", update_orphan_events, name="update_orphan_events"),
path(
"imports/orphans/run",
update_orphan_events,
name="update_orphan_events",
),
path("imports/<int:pk>/cancel", cancel_import, name="cancel_import"),
path("rimports/", recurrent_imports, name="recurrent_imports"),
path("rimports/run", run_all_rimports, name="run_all_rimports"),
path("rimports/fb/run", run_all_fb_rimports, name="run_all_fb_rimports"),
path(
"rimports/status/<status>", recurrent_imports, name="recurrent_imports_status"
"rimports/status/<status>",
recurrent_imports,
name="recurrent_imports_status",
),
path(
"rimports/status/<status>/run", run_all_rimports, name="run_all_rimports_status"
"rimports/status/<status>/run",
run_all_rimports,
name="run_all_rimports_status",
),
path("rimports/add", RecurrentImportCreateView.as_view(), name="add_rimport"),
path("rimports/<int:pk>/view", view_rimport, name="view_rimport"),
@ -204,7 +320,11 @@ urlpatterns = [
),
path("rimports/<int:pk>/run", run_rimport, name="run_rimport"),
path("catrules/", categorisation_rules, name="categorisation_rules"),
path("catrules/add", CategorisationRuleCreateView.as_view(), name="add_catrule"),
path(
"catrules/add",
CategorisationRuleCreateView.as_view(),
name="add_catrule",
),
path(
"catrules/<int:pk>/edit",
CategorisationRuleUpdateView.as_view(),
@ -266,9 +386,21 @@ urlpatterns = [
OrganisationDeleteView.as_view(),
name="delete_organisation",
),
path("organismes/", OrganisationListView.as_view(), name="view_organisations"),
path("organisme/add", OrganisationCreateView.as_view(), name="add_organisation"),
path("place/<int:pk>/past", PlaceDetailViewPast.as_view(), name="view_place_past"),
path(
"organismes/",
OrganisationListView.as_view(),
name="view_organisations",
),
path(
"organisme/add",
OrganisationCreateView.as_view(),
name="add_organisation",
),
path(
"place/<int:pk>/past",
PlaceDetailViewPast.as_view(),
name="view_place_past",
),
path("place/<int:pk>", PlaceDetailView.as_view(), name="view_place"),
path(
"place/<int:pk>-<extra>/past",
@ -276,7 +408,9 @@ urlpatterns = [
name="view_place_past_fullname",
),
path(
"place/<int:pk>-<extra>", PlaceDetailView.as_view(), name="view_place_fullname"
"place/<int:pk>-<extra>",
PlaceDetailView.as_view(),
name="view_place_fullname",
),
path("place/<int:pk>/edit", PlaceUpdateView.as_view(), name="edit_place"),
path("place/<int:pk>/delete", PlaceDeleteView.as_view(), name="delete_place"),
@ -293,7 +427,11 @@ urlpatterns = [
UnknownPlacesListView.as_view(),
name="view_unknown_places",
),
path("events/unknown-places/fix", fix_unknown_places, name="fix_unknown_places"),
path(
"events/unknown-places/fix",
fix_unknown_places,
name="fix_unknown_places",
),
path(
"event/<int:pk>/addplace",
UnknownPlaceAddView.as_view(),
@ -330,7 +468,9 @@ js_info_dict = {
# jsi18n can be anything you like here
urlpatterns += [
path(
"jsi18n.js", JavaScriptCatalog.as_view(packages=["recurrence"]), name="jsi18n"
"jsi18n.js",
JavaScriptCatalog.as_view(packages=["recurrence"]),
name="jsi18n",
),
]

View File

@ -1,10 +1,10 @@
from agenda_culturel.models import ReferenceLocation
import re
import unicodedata
from agenda_culturel.models import ReferenceLocation
class PlaceGuesser:
def __init__(self):
self.__citynames = list(
ReferenceLocation.objects.values_list("name__lower__unaccent", "name")

View File

@ -1,108 +1,106 @@
from django.shortcuts import render, get_object_or_404
from django.views.generic import ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView, ModelFormMixin
import calendar as _calendar
import hashlib
import logging
from datetime import date, timedelta
import emoji
from django.contrib import messages
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.mixins import (
LoginRequiredMixin,
UserPassesTestMixin,
PermissionRequiredMixin,
UserPassesTestMixin,
)
from django.http import Http404
from django.utils.safestring import mark_safe
from django.utils.decorators import method_decorator
from honeypot.decorators import check_honeypot
from .utils import PlaceGuesser
import hashlib
from django.contrib.messages.views import SuccessMessageMixin
from django.core.cache import cache
from django.core.mail import mail_admins
import calendar as _calendar
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseForbidden
from django.urls import reverse
import emoji
from .forms import (
EventForm,
EventFormWithContact,
BatchImportationForm,
FixDuplicates,
SelectEventInList,
MergeDuplicates,
RecurrentImportForm,
CategorisationRuleImportForm,
CategorisationForm,
EventAddPlaceForm,
PlaceForm,
EventModerateForm,
TagForm,
TagRenameForm,
MessageForm,
MessageEventForm,
URLSubmissionFormWithContact,
URLSubmissionFormSet,
SimpleContactForm,
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db.models import Count, F, Func, OuterRef, Q, Subquery
from django.http import (
Http404,
HttpResponse,
HttpResponseForbidden,
HttpResponseRedirect,
)
from django.shortcuts import get_object_or_404, render
from django.urls import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.timezone import datetime
from django.utils.translation import gettext_lazy as _
from django.views.generic import DetailView, ListView
from django.views.generic.edit import (
CreateView,
DeleteView,
ModelFormMixin,
UpdateView,
)
from honeypot.decorators import check_honeypot
from .calendar import CalendarDay, CalendarList, CalendarMonth, CalendarWeek
from .celery import app as celery_app
from .celery import (
import_events_from_json,
import_events_from_url,
import_events_from_urls,
run_all_recurrent_imports,
run_all_recurrent_imports_canceled,
run_all_recurrent_imports_failed,
run_recurrent_import,
update_orphan_pure_import_events,
)
from .filters import (
DuplicatedEventsFilter,
EventFilter,
EventFilterAdmin,
MessagesFilterAdmin,
SimpleSearchEventFilter,
SearchEventFilter,
DuplicatedEventsFilter,
RecurrentImportFilter,
SearchEventFilter,
SimpleSearchEventFilter,
)
from .models import (
Event,
Category,
Tag,
StaticContent,
Message,
BatchImportation,
DuplicatedEvents,
RecurrentImport,
CategorisationRule,
remove_accents,
Place,
Organisation,
from .forms import (
BatchImportationForm,
CategorisationForm,
CategorisationRuleImportForm,
EventAddPlaceForm,
EventForm,
EventFormWithContact,
EventModerateForm,
FixDuplicates,
MergeDuplicates,
MessageEventForm,
MessageForm,
PlaceForm,
RecurrentImportForm,
SelectEventInList,
SimpleContactForm,
TagForm,
TagRenameForm,
URLSubmissionFormSet,
URLSubmissionFormWithContact,
)
from django.utils.html import escape
from datetime import date, timedelta
from django.utils.timezone import datetime
from django.db.models import Q, Subquery, OuterRef, Count, F, Func
from django.urls import reverse_lazy
from django.utils.translation import gettext_lazy as _
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib import messages
from django.contrib.messages.views import SuccessMessageMixin
from .calendar import CalendarMonth, CalendarWeek, CalendarDay, CalendarList
from .import_tasks.extractor import Extractor
from .celery import (
app as celery_app,
import_events_from_json,
run_recurrent_import,
run_all_recurrent_imports,
run_all_recurrent_imports_failed,
run_all_recurrent_imports_canceled,
import_events_from_url,
import_events_from_urls,
update_orphan_pure_import_events,
from .models import (
BatchImportation,
CategorisationRule,
Category,
DuplicatedEvents,
Event,
Message,
Organisation,
Place,
RecurrentImport,
StaticContent,
Tag,
remove_accents,
)
import logging
from .utils import PlaceGuesser
logger = logging.getLogger(__name__)
class PaginatorFilter(Paginator):
def __init__(self, filter, nb, request):
self.request = request
self.filter = filter
@ -133,7 +131,9 @@ class PaginatorFilter(Paginator):
try:
page.url_previous_page = PaginatorFilter.update_param(
self.request.get_full_path(), "page", page.previous_page_number()
self.request.get_full_path(),
"page",
page.previous_page_number(),
)
except EmptyPage:
page.url_previous_page = self.request.get_full_path()
@ -360,7 +360,10 @@ def upcoming_events(request, year=None, month=None, day=None, neighsize=1, cat=N
return HttpResponseRedirect(filter.get_new_url())
cal = CalendarList(
day + timedelta(days=-neighsize), day + timedelta(days=neighsize), filter, True
day + timedelta(days=-neighsize),
day + timedelta(days=neighsize),
filter,
True,
)
context = {
@ -389,7 +392,10 @@ class StaticContentCreateView(LoginRequiredMixin, CreateView):
class StaticContentUpdateView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
UpdateView,
):
model = StaticContent
permission_required = "agenda_culturel.change_staticcontent"
@ -410,7 +416,11 @@ def update_from_source(request, pk):
)
else:
import_events_from_url.delay(
url, None, None, True, user_id=request.user.pk if request.user else None
url,
None,
None,
True,
user_id=request.user.pk if request.user else None,
)
messages.success(
request,
@ -421,7 +431,10 @@ def update_from_source(request, pk):
class EventUpdateView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
UpdateView,
):
model = Event
permission_required = "agenda_culturel.change_event"
@ -490,7 +503,10 @@ class EventUpdateView(
class EventModerateView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
UpdateView,
):
model = Event
permission_required = "agenda_culturel.change_event"
@ -593,7 +609,6 @@ class EventModerateView(
@login_required(login_url="/accounts/login/")
@permission_required("agenda_culturel.change_event")
def error_next_event(request, pk):
obj = Event.objects.filter(pk=pk).first()
return render(
@ -633,7 +648,10 @@ def moderate_from_date(request, y, m, d):
class EventDeleteView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, DeleteView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
DeleteView,
):
model = Event
permission_required = "agenda_culturel.delete_event"
@ -765,7 +783,6 @@ class EventCreateView(SuccessMessageMixin, CreateView):
)
def form_valid(self, form):
if form.cleaned_data["simple_cloning"]:
form.instance.set_skip_duplicate_check()
@ -809,7 +826,6 @@ class EventCreateView(SuccessMessageMixin, CreateView):
# A class to evaluate the URL according to the existing events and the authentification
# level of the user
class URLEventEvaluation:
def __init__(self, form, is_authenticated):
self.form = form
self.is_authenticated = is_authenticated
@ -871,7 +887,6 @@ class URLEventEvaluation:
def import_from_urls(request):
if request.method == "POST":
formset = URLSubmissionFormSet(request.POST, request.FILES)
@ -946,7 +961,6 @@ def import_from_urls(request):
def import_from_url(request):
form = URLSubmissionFormWithContact(is_authenticated=request.user.is_authenticated)
initial = {
@ -973,8 +987,7 @@ def import_from_url(request):
request,
mark_safe(
_(
"{} has not been submitted since it"
"s already known: {}."
"{} has not been submitted since its already known: {}."
).format(uc.url, uc.get_link())
),
)
@ -991,7 +1004,8 @@ def import_from_url(request):
else:
messages.info(
request, _("Integrating {} into our import process.").format(uc.url)
request,
_("Integrating {} into our import process.").format(uc.url),
)
import_events_from_url.delay(
uc.url,
@ -1125,7 +1139,10 @@ class MessageDeleteView(SuccessMessageMixin, DeleteView):
class MessageUpdateView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
UpdateView,
):
model = Message
permission_required = "agenda_culturel.change_message"
@ -1308,7 +1325,6 @@ def view_messages(request):
@login_required(login_url="/accounts/login/")
@permission_required("agenda_culturel.view_message")
def delete_cm_spam(request):
if request.method == "POST":
Message.objects.filter(spam=True).delete()
@ -1322,7 +1338,11 @@ def delete_cm_spam(request):
return render(
request,
"agenda_culturel/delete_spams_confirm.html",
{"nb_total": nb_total, "nb_spams": nb_spams, "cancel_url": cancel_url},
{
"nb_total": nb_total,
"nb_spams": nb_spams,
"cancel_url": cancel_url,
},
)
@ -1366,7 +1386,10 @@ def event_search(request, full=False):
.distinct()
)
tags = [
(t, emoji.demojize(remove_accents(t).lower(), delimiters=("000", "")))
(
t,
emoji.demojize(remove_accents(t).lower(), delimiters=("000", "")),
)
for t in tags
]
tags = [t for t in tags if s_q == t[1]]
@ -1459,13 +1482,19 @@ def imports(request):
return render(
request,
"agenda_culturel/imports.html",
{"paginator_filter": response, "nb_in_orphan_import": nb_in_orphan_import},
{
"paginator_filter": response,
"nb_in_orphan_import": nb_in_orphan_import,
},
)
@login_required(login_url="/accounts/login/")
@permission_required(
["agenda_culturel.add_batchimportation", "agenda_culturel.run_batchimportation"]
[
"agenda_culturel.add_batchimportation",
"agenda_culturel.run_batchimportation",
]
)
def add_import(request):
form = BatchImportationForm()
@ -1484,7 +1513,10 @@ def add_import(request):
@login_required(login_url="/accounts/login/")
@permission_required(
["agenda_culturel.view_batchimportation", "agenda_culturel.run_batchimportation"]
[
"agenda_culturel.view_batchimportation",
"agenda_culturel.run_batchimportation",
]
)
def cancel_import(request, pk):
import_process = get_object_or_404(BatchImportation, pk=pk)
@ -1508,7 +1540,10 @@ def cancel_import(request, pk):
@login_required(login_url="/accounts/login/")
@permission_required(
["agenda_culturel.view_batchimportation", "agenda_culturel.run_batchimportation"]
[
"agenda_culturel.view_batchimportation",
"agenda_culturel.run_batchimportation",
]
)
def update_orphan_events(request):
if request.method == "POST":
@ -1518,7 +1553,6 @@ def update_orphan_events(request):
messages.success(request, _("The orphan event update has been launched."))
return HttpResponseRedirect(reverse_lazy("imports"))
else:
today = date.today()
srcs = RecurrentImport.objects.all().values_list("source")
@ -1548,7 +1582,6 @@ def update_orphan_events(request):
@login_required(login_url="/accounts/login/")
@permission_required("agenda_culturel.view_recurrentimport")
def recurrent_imports(request, status=None):
newest = BatchImportation.objects.filter(recurrentImport=OuterRef("pk")).order_by(
"-created_date"
)
@ -1605,7 +1638,10 @@ class RecurrentImportCreateView(
class RecurrentImportUpdateView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
UpdateView,
):
model = RecurrentImport
permission_required = "agenda_culturel.change_recurrentimport"
@ -1614,7 +1650,10 @@ class RecurrentImportUpdateView(
class RecurrentImportDeleteView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, DeleteView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
DeleteView,
):
model = RecurrentImport
permission_required = "agenda_culturel.delete_recurrentimport"
@ -1624,7 +1663,10 @@ class RecurrentImportDeleteView(
@login_required(login_url="/accounts/login/")
@permission_required(
["agenda_culturel.view_recurrentimport", "agenda_culturel.view_batchimportation"]
[
"agenda_culturel.view_recurrentimport",
"agenda_culturel.view_batchimportation",
]
)
def view_rimport(request, pk):
obj = get_object_or_404(RecurrentImport, pk=pk)
@ -1650,7 +1692,10 @@ def view_rimport(request, pk):
@login_required(login_url="/accounts/login/")
@permission_required(
["agenda_culturel.view_recurrentimport", "agenda_culturel.run_recurrentimport"]
[
"agenda_culturel.view_recurrentimport",
"agenda_culturel.run_recurrentimport",
]
)
def run_rimport(request, pk):
rimport = get_object_or_404(RecurrentImport, pk=pk)
@ -1663,13 +1708,18 @@ def run_rimport(request, pk):
return HttpResponseRedirect(reverse_lazy("view_rimport", args=[pk]))
else:
return render(
request, "agenda_culturel/run_rimport_confirm.html", {"object": rimport}
request,
"agenda_culturel/run_rimport_confirm.html",
{"object": rimport},
)
@login_required(login_url="/accounts/login/")
@permission_required(
["agenda_culturel.view_recurrentimport", "agenda_culturel.run_recurrentimport"]
[
"agenda_culturel.view_recurrentimport",
"agenda_culturel.run_recurrentimport",
]
)
def run_all_rimports(request, status=None):
if request.method == "POST":
@ -1694,7 +1744,10 @@ def run_all_rimports(request, status=None):
@login_required(login_url="/accounts/login/")
@permission_required(
["agenda_culturel.view_recurrentimport", "agenda_culturel.run_recurrentimport"]
[
"agenda_culturel.view_recurrentimport",
"agenda_culturel.run_recurrentimport",
]
)
def run_all_fb_rimports(request, status=None):
if request.method == "POST":
@ -1729,7 +1782,6 @@ def update_duplicate_event(request, pk, epk):
if request.method == "POST":
form = MergeDuplicates(request.POST, duplicates=edup)
if form.is_valid():
for f in edup.get_items_comparison():
if not f["similar"]:
selected = form.get_selected_events(f["key"])
@ -1749,7 +1801,11 @@ def update_duplicate_event(request, pk, epk):
if f["key"] == "organisers":
event.organisers.set(selected.organisers.all())
else:
setattr(event, f["key"], getattr(selected, f["key"]))
setattr(
event,
f["key"],
getattr(selected, f["key"]),
)
if f["key"] == "image":
setattr(
event,
@ -1766,7 +1822,11 @@ def update_duplicate_event(request, pk, epk):
return render(
request,
"agenda_culturel/update_duplicate.html",
context={"form": form, "object": edup, "event_id": edup.get_event_index(event)},
context={
"form": form,
"object": edup,
"event_id": edup.get_event_index(event),
},
)
@ -1884,7 +1944,8 @@ def fix_duplicate(request, pk):
else:
edup.fix(selected)
messages.success(
request, _("The selected event has been set as representative")
request,
_("The selected event has been set as representative"),
)
return HttpResponseRedirect(edup.get_absolute_url())
elif form.is_action_remove():
@ -1977,7 +2038,11 @@ def duplicates(request):
return render(
request,
"agenda_culturel/duplicates.html",
{"filter": filter, "paginator_filter": response, "paginator": paginator},
{
"filter": filter,
"paginator_filter": response,
"paginator": paginator,
},
)
@ -2064,7 +2129,10 @@ class CategorisationRuleCreateView(
class CategorisationRuleUpdateView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, UpdateView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
UpdateView,
):
model = CategorisationRule
permission_required = "agenda_culturel.change_categorisationrule"
@ -2074,7 +2142,10 @@ class CategorisationRuleUpdateView(
class CategorisationRuleDeleteView(
SuccessMessageMixin, PermissionRequiredMixin, LoginRequiredMixin, DeleteView
SuccessMessageMixin,
PermissionRequiredMixin,
LoginRequiredMixin,
DeleteView,
):
model = CategorisationRule
permission_required = "agenda_culturel.delete_categorisationrule"
@ -2242,7 +2313,6 @@ class PlaceDetailView(ListView):
class PlaceDetailViewPast(PlaceDetailView):
def get_queryset(self):
self.place = get_object_or_404(Place, pk=self.kwargs["pk"])
self.past = True
@ -2278,7 +2348,8 @@ class UpdatePlaces:
if self.nb_applied > 1:
messages.success(
self.request, _("{} events have been updated.").format(self.nb_applied)
self.request,
_("{} events have been updated.").format(self.nb_applied),
)
elif self.nb_applied == 1:
messages.success(self.request, _("1 event has been updated."))
@ -2366,11 +2437,13 @@ class UnknownPlaceAddView(PermissionRequiredMixin, SuccessMessageMixin, UpdateVi
if form.cleaned_data.get("place"):
messages.success(
self.request, _("The selected place has been assigned to the event.")
self.request,
_("The selected place has been assigned to the event."),
)
if form.cleaned_data.get("add_alias"):
messages.success(
self.request, _("A new alias has been added to the selected place.")
self.request,
_("A new alias has been added to the selected place."),
)
nb_applied = form.cleaned_data.get("place").associate_matching_events()
@ -2396,7 +2469,6 @@ class UnknownPlaceAddView(PermissionRequiredMixin, SuccessMessageMixin, UpdateVi
class PlaceFromEventCreateView(PlaceCreateView):
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context["event"] = self.event
@ -2470,7 +2542,6 @@ class OrganisationDetailView(ListView):
class OrganisationDetailViewPast(OrganisationDetailView):
def get_queryset(self):
self.organisation = (
Organisation.objects.filter(pk=self.kwargs["pk"])
@ -2595,7 +2666,6 @@ def view_tag(request, t, past=False):
def statistics(request):
stats = {}
first = {}
last = {}
@ -2776,7 +2846,6 @@ def delete_tag(request, t):
respage = reverse_lazy("view_all_tags")
if request.method == "POST":
# remove tag from events
events = Event.objects.filter(tags__contains=[t])
for e in events:
@ -2809,7 +2878,13 @@ def delete_tag(request, t):
return render(
request,
"agenda_culturel/tag_confirm_delete_by_name.html",
{"tag": t, "nb": nb, "nbi": nbi, "cancel_url": cancel_url, "obj": obj},
{
"tag": t,
"nb": nb,
"nbi": nbi,
"cancel_url": cancel_url,
"obj": obj,
},
)

View File

@ -3,6 +3,6 @@ pytest==7.3.1
pytest-django==4.5.2
pytest-sugar==0.9.7
coverage[toml]==7.2.5
black==23.3.0
ruff==0.0.265
black==25.1.0
ruff==0.9.9
pre-commit==4.1.0

View File

@ -1,8 +1,10 @@
from djipsum.faker import FakerModel
from agenda_culturel.models import Event
import random
from datetime import datetime, timedelta
from djipsum.faker import FakerModel
from agenda_culturel.models import Event
def run():
tags = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."

View File

@ -1,7 +1,8 @@
from django.test import RequestFactory
import cProfile
import django.urls
from django.contrib.auth.models import AnonymousUser
import cProfile
from django.test import RequestFactory
# inspiré de https://enix.io/fr/blog/django-performance-profiler/