Clean du code grâce à ruff

This commit is contained in:
Jean-Marie Favreau 2025-03-02 19:12:50 +01:00
parent d8c4c55c44
commit b5d4c0f0b1
225 changed files with 3748 additions and 1790 deletions

View File

@ -6,3 +6,17 @@ repos:
rev: 25.1.0
hooks:
- id: black
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.9.9
hooks:
# Run the linter.
- id: ruff
types_or: [ python, pyi ]
args: [ --fix ]

View File

@ -71,4 +71,4 @@ On peut ensuite modifier le mot de passe de l'utilisateur root qui a tous les dr
### Complètement réinitialiser une instance
* ```docker compose down --rmi all --volumes```
* ```make build-dev```
* ```make build-dev```

View File

@ -5,4 +5,4 @@ wait_for_it=$(dirname "$0")/wait-for-it.sh
chmod +x $wait_for_it
chmod +x $1
$wait_for_it -h $POSTGRES_HOST -p $POSTGRES_PORT -- $1
$wait_for_it -h $POSTGRES_HOST -p $POSTGRES_PORT -- $1

View File

@ -13,4 +13,3 @@ parser.add_simple_formatter("vc_raw_html", "")
plain_txt = parser.format(text)
print(plain_txt)

View File

@ -1,40 +1,42 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import (
amisdutempsdescerises,
)
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), amisdutempsdescerises.CExtractor())
url = "https://amisdutempsdescerises.org/page.php"
url_human = "https://amisdutempsdescerises.org/"
try:
events = u2e.process(url, url_human, cache = "cache-amiscerices.xml", default_values = {"category": "Rencontres & Débats"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-amiscerices.xml",
default_values={"category": "Rencontres & Débats"},
published=True,
)
exportfile = "events-amiscerices.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,37 +1,42 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import arachnee
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), arachnee.CExtractor())
url = "https://www.arachnee-concerts.com/wp-admin/admin-ajax.php?action=movies-filter&per_page=9999&date=NaN.NaN.NaN&theatres=Clermont-Fd&cat=&sorting=&list_all_events=&current_page="
url_human = "https://www.arachnee-concerts.com/agenda-des-concerts/Clermont-Fd/"
try:
events = u2e.process(url, url_human, cache = "cache-arachnee.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-arachnee.html",
default_values={},
published=True,
)
exportfile = "events-arachnee.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,42 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import billetterie_cf
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), billetterie_cf.CExtractor())
url = "https://billetterie-c3c.clermont-ferrand.fr/"
url_human = "https://billetterie-c3c.clermont-ferrand.fr/"
try:
events = u2e.process(url, url_human, cache = "cache-c3c.html", default_values = {"location": "La Cour des 3 Coquins"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-c3c.html",
default_values={"location": "La Cour des 3 Coquins"},
published=True,
)
exportfile = "events-c3c.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,41 +1,38 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors.fbevent import *
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.generic_extractors.fbevent import (
CExtractor,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), CExtractor())
url="https://www.facebook.com/events/3575802569389184/3575802576055850/?active_tab=about"
url = "https://www.facebook.com/events/3575802569389184/3575802576055850/?active_tab=about"
events = u2e.process(url, cache = "fb.html", published = True)
events = u2e.process(url, cache="fb.html", published=True)
exportfile = "event-facebook.json"
print("Saving events to file {}".format(exportfile))
with open(exportfile, "w") as f:
json.dump(events, f, indent=4, default=str)

View File

@ -1,40 +1,42 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.generic_extractors import fbevents
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), fbevents.CExtractor())
url = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
url_human = "https://www.facebook.com/laJeteeClermont/upcoming_hosted_events"
try:
events = u2e.process(url, url_human, cache = "cache-lajetee-fb.html", default_values = {"location": "La Jetée"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lajetee-fb.html",
default_values={"location": "La Jetée"},
published=True,
)
exportfile = "events-lajetee-fb.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,25 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import billetterie_cf
from src.agenda_culturel.import_tasks.downloader import (
ChromiumHeadlessDownloader,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(ChromiumHeadlessDownloader(), billetterie_cf.CExtractor())
url = "https://billetterie-gds.clermont-ferrand.fr/"
url_human = "https://billetterie-gds.clermont-ferrand.fr/"
try:
events = u2e.process(url, url_human, cache = "cache-gds.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-gds.html",
default_values={},
published=True,
)
exportfile = "events-gds.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,39 +1,44 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors.ical import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors.ical import (
ICALExtractor,
)
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), ICALExtractor())
url = "https://calendar.google.com/calendar/ical/programmation.lesaugustes%40gmail.com/public/basic.ics"
url_human = "https://www.cafelesaugustes.fr/la-programmation/"
events = u2e.process(url, url_human, cache = "cache-augustes.ical", default_values = {"category": "Sans catégorie", "location": "Café lecture les Augustes"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-augustes.ical",
default_values={
"category": "Sans catégorie",
"location": "Café lecture les Augustes",
},
published=True,
)
exportfile = "events-augustes.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,43 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import laraymonde
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), laraymonde.CExtractor())
url = "https://www.raymondbar.net/"
url_human = "https://www.raymondbar.net/"
try:
events = u2e.process(url, url_human, cache = "cache-la-raymonde.html", default_values = {"location": "La Raymonde", "category": "Fêtes & Concerts"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-la-raymonde.html",
default_values={
"location": "La Raymonde",
"category": "Fêtes & Concerts",
},
published=True,
)
exportfile = "events-la-raymonde.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lacomedie
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lacomedie.CExtractor())
url = "https://lacomediedeclermont.com/saison24-25/wp-admin/admin-ajax.php?action=load_dates_existantes"
url_human = "https://lacomediedeclermont.com/saison24-25/"
try:
events = u2e.process(url, url_human, cache = "cache-lacomedie.html", default_values = {"location": "La Comédie de Clermont"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lacomedie.html",
default_values={"location": "La Comédie de Clermont"},
published=True,
)
exportfile = "events-lacomedie.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,43 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lacoope
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lacoope.CExtractor())
url = "https://www.lacoope.org/concerts-calendrier/"
url_human = "https://www.lacoope.org/concerts-calendrier/"
try:
events = u2e.process(url, url_human, cache = "cache-lacoope.html", default_values = {"category": "Fêtes & Concerts", "location": "La Coopérative"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lacoope.html",
default_values={
"category": "Fêtes & Concerts",
"location": "La Coopérative",
},
published=True,
)
exportfile = "events-lacoope.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lapucealoreille
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lapucealoreille.CExtractor())
url = "https://www.lapucealoreille63.fr/agenda"
url_human = "https://www.lapucealoreille63.fr/agenda"
try:
events = u2e.process(url, url_human, cache = "cache-lapucealoreille.xml", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lapucealoreille.xml",
default_values={},
published=True,
)
exportfile = "events-lapucealoreille.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.generic_extractors import wordpress_mec
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
url = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
url_human = "https://www.cabaretlepoulailler.fr/agenda/tout-lagenda/"
try:
events = u2e.process(url, url_human, cache = "cache-le-poulailler.html", default_values = {"location": "Le Poulailler"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-le-poulailler.html",
default_values={"location": "Le Poulailler"},
published=True,
)
exportfile = "events-le-poulailler.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lerio
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lerio.CExtractor())
url = "https://www.cinemalerio.com/evenements/"
url_human = "https://www.cinemalerio.com/evenements/"
try:
events = u2e.process(url, url_human, cache = "cache-le-rio.html", default_values = {"location": "Cinéma le Rio", "category": "Cinéma"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-le-rio.html",
default_values={"location": "Cinéma le Rio", "category": "Cinéma"},
published=True,
)
exportfile = "events-le-roi.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import lefotomat
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), lefotomat.CExtractor())
url = "https://www.lefotomat.com/feed"
url_human = "https://www.lefotomat.com/"
try:
events = u2e.process(url, url_human, cache = "cache-lefotomat.xml", default_values = {"location": "Le Fotomat'"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-lefotomat.xml",
default_values={"location": "Le Fotomat'"},
published=True,
)
exportfile = "events-lefotomat.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors import wordpress_mec
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), wordpress_mec.CExtractor())
url = "https://www.lesvinzelles.com/index.php/programme/"
url_human = "https://www.lesvinzelles.com/index.php/programme/"
try:
events = u2e.process(url, url_human, cache = "cache-les-vinzelles.html", default_values = {"location": "Les Vinzelles"}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-les-vinzelles.html",
default_values={"location": "Les Vinzelles"},
published=True,
)
exportfile = "events-les-vinzelles.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.extractor import iguana_agenda
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), iguana_agenda.CExtractor())
url = "https://bibliotheques-clermontmetropole.eu/iguana/Service.PubContainer.cls?uuid=a4a1f992-06da-4ff4-9176-4af0a095c7d1"
url_human = "https://bibliotheques-clermontmetropole.eu/iguana/www.main.cls?surl=AGENDA_Tout%20lagenda"
try:
events = u2e.process(url, url_human, cache = "cache-mediatheques.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-mediatheques.html",
default_values={},
published=True,
)
exportfile = "events-mediatheques.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,8 +1,8 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
@ -18,23 +18,23 @@ parent = os.path.dirname(current)
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.custom_extractors import *
from src.agenda_culturel.import_tasks.custom_extractors import mille_formes
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), mille_formes.CExtractor())
url = "https://www.milleformes.fr/programme"
url_human = "https://www.milleformes.fr/programme"
try:
events = u2e.process(url, url_human, cache = "cache-1000formes.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-1000formes.html",
default_values={},
published=True,
)
exportfile = "events-1000formes.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors import mobilizon
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), mobilizon.CExtractor())
url = "https://mobilizon.fr/@attac63/events?"
url_human = "https://mobilizon.fr/@attac63/events"
try:
events = u2e.process(url, url_human, cache = "cache-attac63.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-attac63.html",
default_values={},
published=True,
)
exportfile = "events-attac63.json"
print("Saving events to file {}".format(exportfile))

View File

@ -1,40 +1,40 @@
#!/usr/bin/python3
# coding: utf-8
import os
import json
import os
import sys
# getting the name of the directory
# where the this file is present.
current = os.path.dirname(os.path.realpath(__file__))
# Getting the parent directory name
# where the current directory is present.
parent = os.path.dirname(current)
# adding the parent directory to
# adding the parent directory to
# the sys.path.
sys.path.append(parent)
sys.path.append(parent + "/src")
from src.agenda_culturel.import_tasks.downloader import *
from src.agenda_culturel.import_tasks.extractor import *
from src.agenda_culturel.import_tasks.importer import *
from src.agenda_culturel.import_tasks.generic_extractors import *
from src.agenda_culturel.import_tasks.downloader import SimpleDownloader
from src.agenda_culturel.import_tasks.generic_extractors import apidae_tourisme
from src.agenda_culturel.import_tasks.importer import URL2Events
if __name__ == "__main__":
u2e = URL2Events(SimpleDownloader(), apidae_tourisme.CExtractor())
url = "https://widgets.apidae-tourisme.com/filter.js?widget[id]=48"
url_human = "https://ens.puy-de-dome.fr/agenda.html"
try:
events = u2e.process(url, url_human, cache = "cache-puydedome.html", default_values = {}, published = True)
events = u2e.process(
url,
url_human,
cache="cache-puydedome.html",
default_values={},
published=True,
)
exportfile = "events-puydedome.json"
print("Saving events to file {}".format(exportfile))

View File

@ -3,7 +3,7 @@
## La Comédie de Clermont
URL des dates avec événements: https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php?action=load_dates_existantes
URL des informations d'une date avec paramètres en post:
URL des informations d'une date avec paramètres en post:
```curl --data "action=load_evenements_jour" --data "jour=2024-04-19" "https://lacomediedeclermont.com/saison23-24/wp-admin/admin-ajax.php"```
La donnée retournée est du html assez succinct, avec l'essentiel dedans.
@ -12,7 +12,7 @@ La donnée retournée est du html assez succinct, avec l'essentiel dedans.
Dans le source de https://www.lacoope.org/concerts-calendrier/ on trouve un tableau javascript qui contient les urls des événements. Ce tableau peut contenir "Gratuit" en tag. Il n'y a pas l'heure de l'événement.
Sur chaque page événémenet, il y a :
- meta name="description"
- meta name="description"
- une url https://calendar.google.com/calendar/ avec la plupart des données
## Le caveau de la michodière
@ -20,10 +20,9 @@ Sur chaque page événémenet, il y a :
L'adresse https://www.lecaveaudelamichodiere.com/concerts/ donne les concerts du mois en cours.
La page est peuplée par une requête javascript qui semble difficile à rejouer indépendamment, car on se prend un erreur 403 (fucking plugin propriétaire eventon).
Si on récupère l'identifiant de l'événement (type event_11377_0), on peut forger une url du type
Si on récupère l'identifiant de l'événement (type event_11377_0), on peut forger une url du type
```https://www.lecaveaudelamichodiere.com/wp-admin/admin-ajax.php?action=eventon_ics_download&event_id=11377&ri=0``` pour récupérer un ical de l'événement.
## La petite gaillarde
Le flux RSS https://lapetitegaillarde.fr/?feed=rss2 est à peu près bien structuré.

View File

@ -62,7 +62,7 @@ extend-exclude = '''
[tool.ruff]
format = "grouped"
output-format = "grouped"
line-length = 88 # black default
extend-exclude = [
"src/migrations/*",
@ -73,6 +73,7 @@ extend-exclude = [
"*__init__.py",
]
[tool.ruff.lint]
select = ["E", "F"]
ignore = [
"E501", # line too long, handled by black
@ -80,19 +81,17 @@ ignore = [
"C901", # too complex
"F405", # name may be undefined, or defined from star imports
]
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
# Assume Python 3.11.
target-version = "py311"
[tool.ruff.mccabe]
[tool.ruff.lint.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10
[tool.ruff.lint.per-file-ignores]
"experimentations/*.py" = ["E402"]
[tool.ruff.isort]
[tool.ruff.lint.isort]
force-to-top = ["src"]
known-first-party = ["src"]

View File

@ -3,4 +3,4 @@
APP_PATH="src"
ruff $APP_PATH --fix
black $APP_PATH
black $APP_PATH

View File

@ -1,22 +1,22 @@
from django.contrib import admin
from django import forms
from .models import (
Event,
Category,
Tag,
StaticContent,
DuplicatedEvents,
BatchImportation,
RecurrentImport,
Place,
Message,
ReferenceLocation,
Organisation,
)
from django.contrib import admin
from django_better_admin_arrayfield.admin.mixins import DynamicArrayMixin
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
from django_better_admin_arrayfield.models.fields import DynamicArrayField
from .models import (
BatchImportation,
Category,
DuplicatedEvents,
Event,
Message,
Organisation,
Place,
RecurrentImport,
ReferenceLocation,
StaticContent,
Tag,
)
admin.site.register(Category)
admin.site.register(Tag)

View File

@ -1,14 +1,13 @@
from datetime import datetime, timedelta, date, time
import calendar
from django.db.models import Q, F
import logging
from datetime import date, datetime, time, timedelta
from django.db.models import CharField, F, Q
from django.db.models.functions import Lower
from django.http import Http404
from django.template.defaultfilters import date as _date
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django.template.defaultfilters import date as _date
from django.http import Http404
from django.db.models import CharField
from django.db.models.functions import Lower
import logging
CharField.register_lookup(Lower)
@ -88,9 +87,10 @@ class DayInCalendar:
self._add_event_internal(event)
def _add_event_internal(self, event):
from .models import Category
from copy import copy
from .models import Category
# copy event
local_event = copy(event)
@ -234,7 +234,6 @@ class DayInCalendar:
class IntervalInDay(DayInCalendar):
def __init__(self, d, id, name, short_name):
self.name = name
self.short_name = short_name
@ -244,7 +243,13 @@ class IntervalInDay(DayInCalendar):
class CalendarList:
def __init__(
self, firstdate, lastdate, filter=None, exact=False, ignore_dup=None, qs=None
self,
firstdate,
lastdate,
filter=None,
exact=False,
ignore_dup=None,
qs=None,
):
self.firstdate = firstdate
self.lastdate = lastdate
@ -308,7 +313,8 @@ class CalendarList:
timezone.get_default_timezone(),
)
lastdatetime = timezone.make_aware(
datetime.combine(self.c_lastdate, time.max), timezone.get_default_timezone()
datetime.combine(self.c_lastdate, time.max),
timezone.get_default_timezone(),
)
qs = (
qs.filter(
@ -369,14 +375,16 @@ class CalendarList:
self.calendar_days[e.start_day.__str__()].add_event(e)
else:
for d in daterange(
max(e.start_day, self.firstdate), min(e.end_day, self.lastdate)
max(e.start_day, self.firstdate),
min(e.end_day, self.lastdate),
):
self.calendar_days[d.__str__()].add_event(e)
else:
for e_rec in e.get_recurrences_between(firstdate, lastdate):
end = e_rec.start_day if e_rec.end_day is None else e_rec.end_day
for d in daterange(
max(e_rec.start_day, self.firstdate), min(end, self.lastdate)
max(e_rec.start_day, self.firstdate),
min(end, self.lastdate),
):
self.calendar_days[d.__str__()].add_event(e_rec)

View File

@ -1,23 +1,43 @@
import os
import json
from celery import Celery, Task, chain
from celery.schedules import crontab
from celery.utils.log import get_task_logger
from celery.exceptions import MaxRetriesExceededError
import os
import time as time_
from django.conf import settings
from celery.signals import worker_ready
from contextlib import contextmanager
from .import_tasks.extractor import Extractor
from .import_tasks.importer import URL2Events
from .import_tasks.downloader import SimpleDownloader, ChromiumHeadlessDownloader
from .import_tasks.custom_extractors import *
from .import_tasks.generic_extractors import *
from celery import Celery, Task, chain
from celery.exceptions import MaxRetriesExceededError
from celery.schedules import crontab
from celery.signals import worker_ready
from celery.utils.log import get_task_logger
from django.conf import settings
from django.core.cache import cache
from datetime import date
from .import_tasks.custom_extractors import (
lacomedie,
lacoope,
lefotomat,
lapucealoreille,
billetterie_cf,
arachnee,
lerio,
laraymonde,
mille_formes,
amisdutempsdescerises,
)
from .import_tasks.downloader import (
ChromiumHeadlessDownloader,
SimpleDownloader,
)
from .import_tasks.extractor import Extractor
from .import_tasks.generic_extractors import (
wordpress_mec,
apidae_tourisme,
iguana_agenda,
mobilizon,
ical,
fbevents,
)
from .import_tasks.importer import URL2Events
# Set the default Django settings module for the 'celery' program.
APP_ENV = os.getenv("APP_ENV", "dev")
@ -78,7 +98,13 @@ def close_import_task(taskid, success, error_message, importer):
task.nb_updated = 0 if importer is None else importer.get_nb_updated_events()
task.nb_removed = 0 if importer is None else importer.get_nb_removed_events()
fields = ["status", "nb_initial", "nb_updated", "nb_imported", "nb_removed"]
fields = [
"status",
"nb_initial",
"nb_updated",
"nb_imported",
"nb_removed",
]
if not success:
logger.error(error_message)
task.error_message = str(error_message)[:512]
@ -89,6 +115,7 @@ def close_import_task(taskid, success, error_message, importer):
@app.task(bind=True)
def import_events_from_json(self, json):
from agenda_culturel.models import BatchImportation
from .db_importer import DBImporterEvents
# create a batch importation
@ -124,7 +151,8 @@ class ChromiumTask(Task):
def run_recurrent_import_internal(rimport, downloader, req_id):
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import BatchImportation, RecurrentImport
from .db_importer import DBImporterEvents
logger.info("Run recurrent import: {}".format(req_id))
@ -282,7 +310,6 @@ def run_recurrent_import(self, pklist):
def run_recurrent_imports_from_list(pklist):
tasks = chain(
run_recurrent_import.s(pklist) if i == 0 else run_recurrent_import.s()
for i in range(len(pklist))
@ -350,7 +377,7 @@ def run_all_recurrent_imports(self, only_fb=False):
@app.task(bind=True)
def run_all_recurrent_imports_failed(self):
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import BatchImportation, RecurrentImport
logger.info("Run only failed imports")
imports = RecurrentImport.objects.all().order_by("pk")
@ -367,7 +394,7 @@ def run_all_recurrent_imports_failed(self):
@app.task(bind=True)
def run_all_recurrent_imports_canceled(self):
from agenda_culturel.models import RecurrentImport, BatchImportation
from agenda_culturel.models import BatchImportation, RecurrentImport
logger.info("Run only canceled imports")
imports = RecurrentImport.objects.all().order_by("pk")
@ -406,9 +433,9 @@ def import_events_from_url(
email=None,
comments=None,
):
from agenda_culturel.models import BatchImportation, Event
from .db_importer import DBImporterEvents
from agenda_culturel.models import BatchImportation
from agenda_culturel.models import Event
if isinstance(urls, list):
url = urls[0]
@ -419,7 +446,6 @@ def import_events_from_url(
with memcache_chromium_lock(self.app.oid) as acquired:
if acquired:
logger.info(
"URL import: {}".format(self.request.id) + " force " + str(force)
)
@ -431,7 +457,6 @@ def import_events_from_url(
existing = None if force else Event.objects.filter(uuids__contains=[url])
# if it's unknown
if force or len(existing) == 0:
# create an importer
importer = DBImporterEvents(self.request.id)
@ -474,7 +499,10 @@ def import_events_from_url(
)
else:
close_import_task(
self.request.id, False, "Cannot find any event", importer
self.request.id,
False,
"Cannot find any event",
importer,
)
except Exception as e:
logger.error(e)
@ -503,9 +531,9 @@ def import_events_from_urls(
@app.task(base=ChromiumTask, bind=True)
def update_orphan_pure_import_events(self):
from agenda_culturel.models import RecurrentImport
from agenda_culturel.models import Event
from django.db.models import Q, F
from django.db.models import F, Q
from agenda_culturel.models import Event, RecurrentImport
# get all recurrent sources
srcs = RecurrentImport.objects.all().values_list("source")

View File

@ -1,8 +1,9 @@
from agenda_culturel.models import Event
import json
import logging
from django.utils import timezone
import logging
from agenda_culturel.models import Event
logger = logging.getLogger(__name__)
@ -55,7 +56,10 @@ class DBImporterEvents:
if "url" in structure["header"]:
self.url = structure["header"]["url"]
else:
return (False, "JSON is not correctly structured: missing url in header")
return (
False,
"JSON is not correctly structured: missing url in header",
)
if "date" in structure["header"]:
self.date = structure["header"]["date"]

View File

@ -1,23 +1,22 @@
import django_filters
from django.utils.translation import gettext_lazy as _
from django import forms
from django.contrib.postgres.search import SearchQuery, SearchHeadline
from django.db.models import Q, F
from datetime import date, timedelta
from urllib.parse import urlparse, parse_qs, urlencode
from urllib.parse import parse_qs, urlencode, urlparse
from django.http import QueryDict
import django_filters
from django import forms
from django.contrib.gis.measure import D
from django.contrib.postgres.search import SearchHeadline, SearchQuery
from django.db.models import F, Q
from django.http import QueryDict
from django.utils.translation import gettext_lazy as _
from .models import (
ReferenceLocation,
RecurrentImport,
Tag,
Event,
Category,
Message,
DuplicatedEvents,
Event,
Message,
RecurrentImport,
ReferenceLocation,
Tag,
)
@ -188,7 +187,6 @@ class EventFilter(django_filters.FilterSet):
return self.request.get_full_path().split("?")[0]
def get_cleaned_data(self, name):
try:
return self.form.cleaned_data[name]
except AttributeError:
@ -510,7 +508,12 @@ class SimpleSearchEventFilter(django_filters.FilterSet):
| Q(exact_location__name__icontains=value)
| Q(description__icontains=value)
)
for f in ["title", "category__name", "exact_location__name", "description"]:
for f in [
"title",
"category__name",
"exact_location__name",
"description",
]:
params = {
f
+ "_hl": SearchHeadline(
@ -579,7 +582,14 @@ class SearchEventFilter(django_filters.FilterSet):
class Meta:
model = Event
fields = ["title", "location", "description", "category", "tags", "start_day"]
fields = [
"title",
"location",
"description",
"category",
"tags",
"start_day",
]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -621,7 +631,6 @@ class DuplicatedEventsFilter(django_filters.FilterSet):
class RecurrentImportFilter(django_filters.FilterSet):
name = django_filters.ModelMultipleChoiceFilter(
label="Filtrer par nom",
field_name="name",

View File

@ -1,57 +1,58 @@
import logging
import os
from string import ascii_uppercase as auc
from django.conf import settings
from django.core.files import File
from django.forms import (
ModelForm,
ValidationError,
TextInput,
Form,
URLField,
MultipleHiddenInput,
Textarea,
BooleanField,
CharField,
ChoiceField,
RadioSelect,
MultipleChoiceField,
BooleanField,
EmailField,
Form,
HiddenInput,
ModelChoiceField,
EmailField,
ModelForm,
MultipleChoiceField,
MultipleHiddenInput,
RadioSelect,
Textarea,
TextInput,
URLField,
ValidationError,
formset_factory,
)
from django.forms import formset_factory
from django.utils.formats import localize
from django.utils.safestring import mark_safe
from django.utils.translation import gettext_lazy as _
from django_better_admin_arrayfield.forms.widgets import DynamicArrayWidget
from .models import (
Event,
RecurrentImport,
CategorisationRule,
Place,
Category,
Tag,
Event,
Message,
Place,
RecurrentImport,
Tag,
)
from django.conf import settings
from django.core.files import File
from django.utils.translation import gettext_lazy as _
from string import ascii_uppercase as auc
from .templatetags.utils_extra import int_to_abc
from django.utils.safestring import mark_safe
from django.utils.formats import localize
from .templatetags.event_extra import event_field_verbose_name, field_to_html
import os
import logging
from .templatetags.utils_extra import int_to_abc
logger = logging.getLogger(__name__)
class GroupFormMixin:
template_name = "agenda_culturel/forms/div_group.html"
class FieldGroup:
def __init__(
self, id, label, display_label=False, maskable=False, default_masked=True
self,
id,
label,
display_label=False,
maskable=False,
default_masked=True,
):
self.id = id
self.label = label
@ -93,7 +94,10 @@ class GroupFormMixin:
def fields_by_group(self):
return [(g, self.get_fields_in_group(g)) for g in self.groups] + [
(GroupFormMixin.FieldGroup("other", _("Other")), self.get_no_group_fields())
(
GroupFormMixin.FieldGroup("other", _("Other")),
self.get_no_group_fields(),
)
]
def clean(self):
@ -434,7 +438,9 @@ class EventModerateForm(ModelForm):
required_css_class = "required"
tags = MultipleChoiceField(
label=_("Tags"), help_text=_("Select tags from existing ones."), required=False
label=_("Tags"),
help_text=_("Select tags from existing ones."),
required=False,
)
new_tags = MultipleChoiceFieldAcceptAll(
@ -534,12 +540,18 @@ class FixDuplicates(Form):
if initial is None:
initial = "Merge"
choices += [
("Merge", _("Create a new version by merging (interactive mode).") + extra)
(
"Merge",
_("Create a new version by merging (interactive mode).") + extra,
)
]
for i, e in enumerate(events):
if e.status != Event.STATUS.TRASH:
choices += [
("Remove-" + str(e.pk), _("Make {} independent.").format(auc[i]))
(
"Remove-" + str(e.pk),
_("Make {} independent.").format(auc[i]),
)
]
choices += [("NotDuplicates", _("Make all versions independent."))]
@ -611,9 +623,15 @@ class MergeDuplicates(Form):
if self.event:
choices = [
(
("event_" + str(e.pk), _("Value of version {}").format(e.pk))
(
"event_" + str(e.pk),
_("Value of version {}").format(e.pk),
)
if e != self.event
else ("event_" + str(e.pk), _("Value of the selected version"))
else (
"event_" + str(e.pk),
_("Value of the selected version"),
)
)
for e in self.events
]
@ -895,7 +913,6 @@ class PlaceForm(GroupFormMixin, ModelForm):
class MessageForm(ModelForm):
class Meta:
model = Message
fields = ["subject", "name", "email", "message", "related_event"]
@ -912,7 +929,6 @@ class MessageForm(ModelForm):
class MessageEventForm(ModelForm):
class Meta:
model = Message
fields = ["message"]

View File

@ -1,5 +1,5 @@
from os.path import dirname, basename, isfile, join
import glob
from os.path import basename, dirname, isfile, join
modules = glob.glob(join(dirname(__file__), "*.py"))
__all__ = [

View File

@ -1,23 +1,29 @@
from ..extractor import Extractor
import json
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import pytz
import html
import json
from datetime import datetime
from urllib.parse import urlparse
import pytz
from bs4 import BeautifulSoup
from ..extractor import Extractor
# A class dedicated to get events from les amis du temps des cerises
# Website https://amisdutempsdescerises.org/
class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.data = b'------toto\r\nContent-Disposition: form-data; name="p"\r\n\r\nfutur\r\n------toto--\r\n'
self.content_type = "multipart/form-data; boundary=----toto"
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
self.set_header(url)
self.clear_events()

View File

@ -1,14 +1,15 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
import re
from datetime import datetime, timedelta, date
from datetime import date, datetime, timedelta
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Arachnée Concert
# URL: https://www.arachnee-concerts.com/agenda-des-concerts/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
self.possible_dates = {}
@ -29,11 +30,16 @@ class CExtractor(TwoStepsExtractorNoPause):
self.theater = match[1]
return super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
containers = soup.select("ul.event_container>li")
@ -59,7 +65,6 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
title = ", ".join(
[

View File

@ -1,15 +1,16 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
import re
from datetime import datetime, timedelta
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La Cour des 3 Coquins and Graines de spectacle
# URL: https://billetterie-c3c.clermont-ferrand.fr//
class CExtractor(TwoStepsExtractor):
def extract(
self,
content,
@ -22,7 +23,13 @@ class CExtractor(TwoStepsExtractor):
):
self.root_address = "https://" + urlparse(url).netloc + "/"
return super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
def category_agenda(self, category):
@ -177,7 +184,6 @@ class CExtractor(TwoStepsExtractor):
category = categories[0]
for dt in datetimes:
self.add_event_with_props(
default_values,
event_url,

View File

@ -1,7 +1,9 @@
from ..twosteps_extractor import TwoStepsExtractor
from datetime import date
import json5
from bs4 import BeautifulSoup
from datetime import datetime, date
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La Coopérative de Mai:
@ -120,7 +122,12 @@ class CExtractor(TwoStepsExtractor):
# on ajoute éventuellement les informations complémentaires
d_suite = ""
for d in ["#typespec", "#dureespec", "#lieuspec", ".lkuncontdroitespec"]:
for d in [
"#typespec",
"#dureespec",
"#lieuspec",
".lkuncontdroitespec",
]:
comp_desc = soup.select(d)
if comp_desc and len(comp_desc) > 0:
for desc in comp_desc:

View File

@ -1,9 +1,11 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..generic_extractors.ggcal_link import GGCalendar
import re
import json5
from bs4 import BeautifulSoup
from ..generic_extractors.ggcal_link import GGCalendar
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La Coopérative de Mai:
# URL: https://www.lacoope.org/concerts-calendrier/

View File

@ -1,7 +1,8 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from La puce à l'oreille
# URL: https://www.lapucealoreille63.fr/

View File

@ -1,16 +1,15 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from bs4 import BeautifulSoup
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Raymond Bar
# URL: https://www.raymondbar.net/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
links = soup.select(".showsList .showMore")

View File

@ -1,7 +1,8 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from Le Fotomat'
# URL: https://www.lefotomat.com/

View File

@ -1,20 +1,20 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from datetime import datetime
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Cinéma Le Rio (Clermont-Ferrand)
# URL: https://www.cinemalerio.com/evenements/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
self.possible_dates = {}
self.theater = None
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
links = soup.select("td.seance_link a")
@ -37,7 +37,6 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
):
soup = BeautifulSoup(event_content, "html.parser")
title = soup.select_one("h1").text

View File

@ -1,14 +1,15 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from datetime import date
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Mille formes
# URL: https://www.milleformes.fr/programme
class CExtractor(TwoStepsExtractorNoPause):
def extract(
self,
content,
@ -22,7 +23,13 @@ class CExtractor(TwoStepsExtractorNoPause):
self.root_address = "https://" + urlparse(url).netloc + "/"
self.today = date.today()
return super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
def parse_category(self, cat):
@ -74,7 +81,6 @@ class CExtractor(TwoStepsExtractorNoPause):
return result
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
links = soup.select(".cell a.evenement")
for lk in links:

View File

@ -1,19 +1,20 @@
from urllib.parse import urlencode
import urllib.request
from urllib.request import Request
import os
import time
import urllib.request
from abc import ABC, abstractmethod
from urllib.parse import urlencode
from urllib.request import Request
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import (
StaleElementReferenceException,
NoSuchElementException,
SessionNotCreatedException,
StaleElementReferenceException,
TimeoutException,
WebDriverException,
SessionNotCreatedException,
)
from abc import ABC, abstractmethod
import time
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
class Downloader(ABC):
@ -25,7 +26,13 @@ class Downloader(ABC):
pass
def get_content(
self, url, cache=None, referer=None, post=None, content_type=None, data=None
self,
url,
cache=None,
referer=None,
post=None,
content_type=None,
data=None,
):
if cache and os.path.exists(cache):
print("Loading cache ({})".format(cache))
@ -33,7 +40,11 @@ class Downloader(ABC):
content = "\n".join(f.readlines())
else:
content = self.download(
url, referer=referer, post=post, content_type=content_type, data=data
url,
referer=referer,
post=post,
content_type=content_type,
data=data,
)
if cache:

View File

@ -1,8 +1,9 @@
from abc import ABC, abstractmethod
from enum import IntEnum
from datetime import datetime, time, date, timedelta
import re
import unicodedata
from abc import ABC, abstractmethod
from datetime import date, datetime, time, timedelta
from enum import IntEnum
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
@ -71,7 +72,8 @@ class Extractor(ABC):
def parse_french_date(text, default_year=None, default_year_by_proximity=None):
# format NomJour Numero Mois Année
m = re.search(
"[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)", text
"[a-zA-ZéÉûÛ:.]+[ ]*([0-9]+)[er]*[ ]*([a-zA-ZéÉûÛ:.]+)[ ]*([0-9]+)",
text,
)
if m:
day = m.group(1)
@ -172,7 +174,12 @@ class Extractor(ABC):
@abstractmethod
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
pass
@ -290,8 +297,10 @@ class Extractor(ABC):
return {"header": self.header, "events": self.events}
def clean_url(url):
from .generic_extractors.fbevent import (
CExtractor as FacebookEventExtractor,
)
from .generic_extractors.ical import ICALExtractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
result = url
for e in [ICALExtractor, FacebookEventExtractor]:
@ -299,11 +308,13 @@ class Extractor(ABC):
return result
def get_default_extractors(single_event=False):
from .generic_extractors.ical import ICALExtractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
from .generic_extractors.fbevent import (
CExtractor as FacebookEventExtractor,
)
from .generic_extractors.ggcal_link import (
CExtractor as GoogleCalendarLinkEventExtractor,
)
from .generic_extractors.ical import ICALExtractor
if single_event:
return [
@ -322,9 +333,13 @@ class Extractor(ABC):
# A class that only produce a not found event
class EventNotFoundExtractor(Extractor):
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
self.set_header(url)
self.clear_events()

View File

@ -1,5 +1,5 @@
from os.path import dirname, basename, isfile, join
import glob
from os.path import basename, dirname, isfile, join
modules = glob.glob(join(dirname(__file__), "*.py"))
__all__ = [

View File

@ -1,13 +1,12 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from apidae-tourisme widgets
class CExtractor(TwoStepsExtractorNoPause):
def build_event_url_list(self, content, infuture_days=180):
# Get line starting with wrapper.querySelector(".results_agenda").innerHTML = "
# split using "=" and keep the end
# strip it, and remove the first character (") and the two last ones (";)

View File

@ -1,14 +1,14 @@
from datetime import datetime
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import time as t
from django.utils.translation import gettext_lazy as _
import json
import logging
import re
import time as t
from datetime import datetime
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from django.utils.translation import gettext_lazy as _
from ..extractor import Extractor
import json
import logging
logger = logging.getLogger(__name__)
@ -210,7 +210,6 @@ class FacebookEvent:
return self.get_element("parent_if_exists_or_self")["id"]
def build_events(self, url):
if self.neighbor_events is None or len(self.neighbor_events) == 0:
return [self.build_event(url)]
else:
@ -228,7 +227,6 @@ class FacebookEvent:
class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.has_2nd_method = True
@ -236,8 +234,8 @@ class CExtractor(Extractor):
def prepare_2nd_extract_dler(downloader):
if downloader.support_2nd_extract:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
path = './/div[not(@aria-hidden)]/div[@aria-label="Allow all cookies"]'
try:
@ -307,7 +305,12 @@ class CExtractor(Extractor):
return u.netloc in url_list
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
# NOTE: this method does not use url_human = None and default_values = None

View File

@ -1,13 +1,14 @@
from ..twosteps_extractor import TwoStepsExtractor
from .fbevent import FacebookEvent, CExtractor as FacebookEventExtractor
from bs4 import BeautifulSoup
import json
import logging
import os
from datetime import datetime
from bs4 import BeautifulSoup
from django.utils.translation import gettext_lazy as _
import logging
from ..twosteps_extractor import TwoStepsExtractor
from .fbevent import CExtractor as FacebookEventExtractor
from .fbevent import FacebookEvent
logger = logging.getLogger(__name__)
@ -15,7 +16,6 @@ logger = logging.getLogger(__name__)
# A class dedicated to get events from a facebook events page
# such as https://www.facebook.com/laJeteeClermont/events
class CExtractor(TwoStepsExtractor):
def __init__(self):
super().__init__()
self.has_2nd_method_in_list = True
@ -41,7 +41,6 @@ class CExtractor(TwoStepsExtractor):
self.find_event_id_fragment_in_array(e)
def find_in_js(self, soup):
for json_script in soup.find_all("script", type="application/json"):
json_txt = json_script.get_text()
json_struct = json.loads(json_txt)

View File

@ -1,11 +1,11 @@
from datetime import datetime
from bs4 import BeautifulSoup
from urllib.parse import urlparse, parse_qs
import dateutil.parser
from ..extractor import Extractor
import bbcode
import logging
from datetime import datetime
from urllib.parse import parse_qs, urlparse
import dateutil.parser
from bs4 import BeautifulSoup
from ..extractor import Extractor
logger = logging.getLogger(__name__)
@ -115,15 +115,18 @@ class CExtractor(Extractor):
return image
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
soup = BeautifulSoup(content, "html.parser")
for ggu in self.possible_urls:
link_calendar = soup.select('a[href^="' + ggu + '"]')
if len(link_calendar) != 0:
gg_cal = GGCalendar(link_calendar[0]["href"])
if gg_cal.is_valid_event():

View File

@ -1,17 +1,14 @@
import icalendar
import warnings
from datetime import date, datetime, timedelta
import bbcode
from datetime import datetime, date, timedelta
from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning
import icalendar
import pytz
from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning
from celery.utils.log import get_task_logger
from ..extractor import Extractor
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@ -68,7 +65,12 @@ class ICALExtractor(Extractor):
return url
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning)

View File

@ -1,13 +1,14 @@
from ..twosteps_extractor import TwoStepsExtractorNoPause
from ..extractor import Extractor
from bs4 import BeautifulSoup
from urllib.parse import urlparse
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractorNoPause
# A class dedicated to get events from Raymond Bar
# URL: https://www.raymondbar.net/
class CExtractor(TwoStepsExtractorNoPause):
def __init__(self):
super().__init__()
@ -35,7 +36,6 @@ class CExtractor(TwoStepsExtractorNoPause):
return tags
def build_event_url_list(self, content, infuture_days=180):
soup = BeautifulSoup(content, "html.parser")
root_address_human = self.url_human.split("?")[0]
@ -72,7 +72,6 @@ class CExtractor(TwoStepsExtractorNoPause):
default_values=None,
published=False,
):
root_address_human = "https://" + urlparse(self.url_human).netloc + "/"
soup = BeautifulSoup(event_content, "xml")

View File

@ -1,11 +1,12 @@
from ..extractor import Extractor
import dateutil.parser
import logging
from datetime import datetime, timezone
import requests
from urllib.parse import urlparse
import dateutil.parser
import requests
from bs4 import BeautifulSoup
import logging
from ..extractor import Extractor
logger = logging.getLogger(__name__)
@ -13,14 +14,12 @@ logger = logging.getLogger(__name__)
# A class dedicated to get events from les amis du temps des cerises
# Website https://amisdutempsdescerises.org/
class CExtractor(Extractor):
def __init__(self):
super().__init__()
self.no_downloader = True
# Source code adapted from https://framagit.org/Marc-AntoineA/mobilizon-client-python
def _request(self, body, data):
headers = {}
response = requests.post(
@ -33,17 +32,16 @@ class CExtractor(Extractor):
response_json = response.json()
if "errors" in response_json:
raise Exception(
f'Errors while requesting { body }. { str(response_json["errors"]) }'
f"Errors while requesting {body}. {str(response_json['errors'])}"
)
return response_json["data"]
else:
raise Exception(
f"Error while requesting. Status code: { response.status_code }"
f"Error while requesting. Status code: {response.status_code}"
)
def _oncoming_events_number(self):
query = """
query($preferredUsername: String!, $afterDatetime: DateTime) {
group(preferredUsername: $preferredUsername) {
@ -123,13 +121,17 @@ query($preferredUsername: String!, $afterDatetime: DateTime, $page: Int) {
return events
def extract(
self, content, url, url_human=None, default_values=None, published=False
self,
content,
url,
url_human=None,
default_values=None,
published=False,
):
self.set_header(url)
self.clear_events()
if "@" in url:
# TODO: quand on a
# https://mobilizon.fr/@xr_clermont_ferrand@mobilizon.extinctionrebellion.fr/events
# on doit retourner :

View File

@ -1,12 +1,12 @@
from ..twosteps_extractor import TwoStepsExtractor
from ..extractor import Extractor
from bs4 import BeautifulSoup
from ..extractor import Extractor
from ..twosteps_extractor import TwoStepsExtractor
# A class dedicated to get events from MEC Wordpress plugin
# URL: https://webnus.net/modern-events-calendar/
class CExtractor(TwoStepsExtractor):
def local2agendaCategory(self, category):
mapping = {
"Musique": "Fêtes & Concerts",

View File

@ -1,9 +1,9 @@
import logging
from .downloader import SimpleDownloader
from .extractor import Extractor
from .generic_extractors.fbevent import CExtractor as FacebookEventExtractor
import logging
logger = logging.getLogger(__name__)
@ -37,7 +37,11 @@ class URL2Events:
if content is None:
content = self.downloader.get_content(
url, cache, referer=referer, content_type=content_type, data=data
url,
cache,
referer=referer,
content_type=content_type,
data=data,
)
if content is None:
@ -70,6 +74,11 @@ class URL2Events:
logger.info("Using cookie trick on a facebook event")
e.prepare_2nd_extract()
return self.process(
url, url_human, cache, default_values, published, False
url,
url_human,
cache,
default_values,
published,
False,
)
return None

View File

@ -1,8 +1,8 @@
import datetime
import logging
from abc import abstractmethod
import logging
from .extractor import Extractor
import datetime
logger = logging.getLogger(__name__)
@ -12,7 +12,6 @@ logger = logging.getLogger(__name__)
# - then for each document downloaded from these urls, build the events
# This class is an abstract class
class TwoStepsExtractor(Extractor):
def __init__(self):
super().__init__()
self.has_2nd_method_in_list = False
@ -145,7 +144,6 @@ class TwoStepsExtractor(Extractor):
ignore_404=True,
first=True,
):
first = True
self.only_future = only_future
self.now = datetime.datetime.now().date()
@ -180,7 +178,11 @@ class TwoStepsExtractor(Extractor):
# then extract event information from this html document
try:
self.add_event_from_content(
content_event, event_url, url_human, default_values, published
content_event,
event_url,
url_human,
default_values,
published,
)
except Exception as e:
# some website (FB) sometime need a second step
@ -216,7 +218,6 @@ class TwoStepsExtractor(Extractor):
class TwoStepsExtractorNoPause(TwoStepsExtractor):
def extract(
self,
content,
@ -233,7 +234,13 @@ class TwoStepsExtractorNoPause(TwoStepsExtractor):
pause = False
self.downloader.pause = False
result = super().extract(
content, url, url_human, default_values, published, only_future, ignore_404
content,
url,
url_human,
default_values,
published,
only_future,
ignore_404,
)
self.downloader.pause = pause

View File

@ -3,27 +3,35 @@
import autoslug.fields
from django.db import migrations
def migrate_data_slug_forward(apps, schema_editor):
ReferenceLocation = apps.get_model("agenda_culturel", "ReferenceLocation")
for instance in ReferenceLocation.objects.all():
print("Generating slug for %s"%instance)
instance.save() # Will trigger slug update
print("Generating slug for %s" % instance)
instance.save() # Will trigger slug update
def migrate_data_slug_backward(apps, schema_editor):
pass
class Migration(migrations.Migration):
class Migration(migrations.Migration):
dependencies = [
('agenda_culturel', '0001_squashed_0150_alter_event_local_image'),
("agenda_culturel", "0001_squashed_0150_alter_event_local_image"),
]
operations = [
migrations.AddField(
model_name='referencelocation',
name='slug',
field=autoslug.fields.AutoSlugField(default=None, editable=False, null=True, populate_from='name', unique=True),
model_name="referencelocation",
name="slug",
field=autoslug.fields.AutoSlugField(
default=None,
editable=False,
null=True,
populate_from="name",
unique=True,
),
),
migrations.RunPython(
code=migrate_data_slug_forward,

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m11 4a4 4 0 0 1 4 4 4 4 0 0 1 -4 4 4 4 0 0 1 -4-4 4 4 0 0 1 4-4m0 2a2 2 0 0 0 -2 2 2 2 0 0 0 2 2 2 2 0 0 0 2-2 2 2 0 0 0 -2-2m0 7c1.1 0 2.66.23 4.11.69-.61.38-1.11.91-1.5 1.54-.82-.2-1.72-.33-2.61-.33-2.97 0-6.1 1.46-6.1 2.1v1.1h8.14c.09.7.34 1.34.72 1.9h-10.76v-3c0-2.66 5.33-4 8-4m7.5-3h1.5 2v2h-2v5.5a2.5 2.5 0 0 1 -2.5 2.5 2.5 2.5 0 0 1 -2.5-2.5 2.5 2.5 0 0 1 2.5-2.5c.36 0 .69.07 1 .21z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m11 4a4 4 0 0 1 4 4 4 4 0 0 1 -4 4 4 4 0 0 1 -4-4 4 4 0 0 1 4-4m0 2a2 2 0 0 0 -2 2 2 2 0 0 0 2 2 2 2 0 0 0 2-2 2 2 0 0 0 -2-2m0 7c1.1 0 2.66.23 4.11.69-.61.38-1.11.91-1.5 1.54-.82-.2-1.72-.33-2.61-.33-2.97 0-6.1 1.46-6.1 2.1v1.1h8.14c.09.7.34 1.34.72 1.9h-10.76v-3c0-2.66 5.33-4 8-4m7.5-3h1.5 2v2h-2v5.5a2.5 2.5 0 0 1 -2.5 2.5 2.5 2.5 0 0 1 -2.5-2.5 2.5 2.5 0 0 1 2.5-2.5c.36 0 .69.07 1 .21z"/></svg>

Before

Width:  |  Height:  |  Size: 492 B

After

Width:  |  Height:  |  Size: 493 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m19 19h-14v-11h14m-3-7v2h-8v-2h-2v2h-1c-1.11 0-2 .89-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-14c0-1.11-.9-2-2-2h-1v-2m-1 11h-5v5h5z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m19 19h-14v-11h14m-3-7v2h-8v-2h-2v2h-1c-1.11 0-2 .89-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2v-14c0-1.11-.9-2-2-2h-1v-2m-1 11h-5v5h5z"/></svg>

Before

Width:  |  Height:  |  Size: 231 B

After

Width:  |  Height:  |  Size: 232 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m7 17 3.2-6.8 6.8-3.2-3.2 6.8zm5-5.9a.9.9 0 0 0 -.9.9.9.9 0 0 0 .9.9.9.9 0 0 0 .9-.9.9.9 0 0 0 -.9-.9m0-9.1a10 10 0 0 1 10 10 10 10 0 0 1 -10 10 10 10 0 0 1 -10-10 10 10 0 0 1 10-10m0 2a8 8 0 0 0 -8 8 8 8 0 0 0 8 8 8 8 0 0 0 8-8 8 8 0 0 0 -8-8z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m7 17 3.2-6.8 6.8-3.2-3.2 6.8zm5-5.9a.9.9 0 0 0 -.9.9.9.9 0 0 0 .9.9.9.9 0 0 0 .9-.9.9.9 0 0 0 -.9-.9m0-9.1a10 10 0 0 1 10 10 10 10 0 0 1 -10 10 10 10 0 0 1 -10-10 10 10 0 0 1 10-10m0 2a8 8 0 0 0 -8 8 8 8 0 0 0 8 8 8 8 0 0 0 8-8 8 8 0 0 0 -8-8z"/></svg>

Before

Width:  |  Height:  |  Size: 345 B

After

Width:  |  Height:  |  Size: 346 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m14 3.5c0 .83-.67 1.5-1.5 1.5s-1.5-.67-1.5-1.5.67-1.5 1.5-1.5 1.5.67 1.5 1.5m-5.5 1.5c-.83 0-1.5.67-1.5 1.5s.67 1.5 1.5 1.5 1.5-.67 1.5-1.5-.67-1.5-1.5-1.5m5.5 7-.78-2.25h2.96l2.16-1.08c.37-.17.52-.63.33-1-.17-.37-.63-.53-1-.34l-.82.41-.49-.84c-.29-.65-1-1.02-1.7-.86l-2.47.53c-.69.15-1.19.78-1.19 1.5v.7l-2.43 1.62h.01c-.08.07-.19.16-.25.28l-.89 1.77-1.78.89c-.37.17-.52.64-.33 1.01.13.26.4.41.67.41.11 0 .23-.02.34-.08l2.22-1.11 1.04-2.06 1.4 1.5c-1 3-8 7-8 7s4 2 9 2 9-2 9-2-5-4-7-8m2.85-.91-.32.16h-1.2l.06.16c.52 1.03 1.28 2.09 2.11 3.03l-.53-3.41z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m14 3.5c0 .83-.67 1.5-1.5 1.5s-1.5-.67-1.5-1.5.67-1.5 1.5-1.5 1.5.67 1.5 1.5m-5.5 1.5c-.83 0-1.5.67-1.5 1.5s.67 1.5 1.5 1.5 1.5-.67 1.5-1.5-.67-1.5-1.5-1.5m5.5 7-.78-2.25h2.96l2.16-1.08c.37-.17.52-.63.33-1-.17-.37-.63-.53-1-.34l-.82.41-.49-.84c-.29-.65-1-1.02-1.7-.86l-2.47.53c-.69.15-1.19.78-1.19 1.5v.7l-2.43 1.62h.01c-.08.07-.19.16-.25.28l-.89 1.77-1.78.89c-.37.17-.52.64-.33 1.01.13.26.4.41.67.41.11 0 .23-.02.34-.08l2.22-1.11 1.04-2.06 1.4 1.5c-1 3-8 7-8 7s4 2 9 2 9-2 9-2-5-4-7-8m2.85-.91-.32.16h-1.2l.06.16c.52 1.03 1.28 2.09 2.11 3.03l-.53-3.41z"/></svg>

Before

Width:  |  Height:  |  Size: 654 B

After

Width:  |  Height:  |  Size: 655 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m8.11 19.45c-2.17-.8-3.89-2.67-4.4-5.1l-1.66-7.81c-.24-1.08.45-2.14 1.53-2.37l9.77-2.07.03-.01c1.07-.21 2.12.48 2.34 1.54l.35 1.67 4.35.93h.03c1.05.24 1.73 1.3 1.51 2.36l-1.66 7.82c-.8 3.77-4.52 6.19-8.3 5.38-1.58-.33-2.92-1.18-3.89-2.34zm11.89-11.27-9.77-2.08-1.66 7.82v.03c-.57 2.68 1.16 5.32 3.85 5.89s5.35-1.15 5.92-3.84zm-4 8.32c-.63 1.07-1.89 1.66-3.17 1.39-1.27-.27-2.18-1.32-2.33-2.55zm-7.53-11.33-4.47.96 1.66 7.81.01.03c.15.71.45 1.35.86 1.9-.1-.77-.08-1.57.09-2.37l.43-2c-.45-.08-.84-.33-1.05-.69.06-.61.56-1.15 1.25-1.31h.25l.78-3.81c.04-.19.1-.36.19-.52m6.56 7.06c.32-.53 1-.81 1.69-.66.69.14 1.19.67 1.28 1.29-.33.52-1 .8-1.7.64-.69-.13-1.19-.66-1.27-1.27m-4.88-1.04c.32-.53.99-.81 1.68-.66.67.14 1.2.68 1.28 1.29-.33.52-1 .81-1.69.68-.69-.17-1.19-.7-1.27-1.31m1.82-6.76 1.96.42-.16-.8z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m8.11 19.45c-2.17-.8-3.89-2.67-4.4-5.1l-1.66-7.81c-.24-1.08.45-2.14 1.53-2.37l9.77-2.07.03-.01c1.07-.21 2.12.48 2.34 1.54l.35 1.67 4.35.93h.03c1.05.24 1.73 1.3 1.51 2.36l-1.66 7.82c-.8 3.77-4.52 6.19-8.3 5.38-1.58-.33-2.92-1.18-3.89-2.34zm11.89-11.27-9.77-2.08-1.66 7.82v.03c-.57 2.68 1.16 5.32 3.85 5.89s5.35-1.15 5.92-3.84zm-4 8.32c-.63 1.07-1.89 1.66-3.17 1.39-1.27-.27-2.18-1.32-2.33-2.55zm-7.53-11.33-4.47.96 1.66 7.81.01.03c.15.71.45 1.35.86 1.9-.1-.77-.08-1.57.09-2.37l.43-2c-.45-.08-.84-.33-1.05-.69.06-.61.56-1.15 1.25-1.31h.25l.78-3.81c.04-.19.1-.36.19-.52m6.56 7.06c.32-.53 1-.81 1.69-.66.69.14 1.19.67 1.28 1.29-.33.52-1 .8-1.7.64-.69-.13-1.19-.66-1.27-1.27m-4.88-1.04c.32-.53.99-.81 1.68-.66.67.14 1.2.68 1.28 1.29-.33.52-1 .81-1.69.68-.69-.17-1.19-.7-1.27-1.31m1.82-6.76 1.96.42-.16-.8z"/></svg>

Before

Width:  |  Height:  |  Size: 901 B

After

Width:  |  Height:  |  Size: 902 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m12 19c.86 0 1.59.54 1.87 1.29.55-.12 1.08-.29 1.59-.52l-1.76-4.15c-.52.25-1.09.38-1.7.38s-1.18-.13-1.7-.38l-1.76 4.15c.51.23 1.04.4 1.59.52.28-.75 1.01-1.29 1.87-1.29m6.25-1.24c-.25-.34-.44-.76-.44-1.26 0-1.09.9-2 2-2l.31.03c.25-.8.38-1.65.38-2.53s-.13-1.73-.38-2.5h-.31c-1.11 0-2-.89-2-2 0-.5.16-.91.44-1.26-1.15-1.24-2.66-2.15-4.38-2.53-.28.75-1.01 1.29-1.87 1.29s-1.59-.54-1.87-1.29c-1.72.38-3.23 1.29-4.38 2.53.28.35.45.79.45 1.26 0 1.11-.9 2-2 2h-.32c-.25.78-.38 1.62-.38 2.5 0 .89.14 1.74.39 2.55l.31-.05c1.11 0 2 .92 2 2 0 .5-.16.93-.44 1.27.32.35.68.67 1.05.96l1.9-4.46c-.45-.65-.71-1.43-.71-2.27a4 4 0 0 1 4-4 4 4 0 0 1 4 4c0 .84-.26 1.62-.71 2.27l1.9 4.46c.38-.29.73-.62 1.06-.97m-6.25 5.24c-1 0-1.84-.74-2-1.71-.63-.13-1.25-.34-1.85-.6l-.98 2.31h-2.17l1.41-3.32c-.53-.38-1.02-.82-1.45-1.31-.24.1-.49.13-.76.13a2 2 0 0 1 -2-2c0-.62.3-1.18.77-1.55-.31-.95-.47-1.92-.47-2.95 0-1 .16-2 .46-2.92-.46-.37-.76-.93-.76-1.58 0-1.09.89-2 2-2 .26 0 .51.06.73.15 1.32-1.47 3.07-2.52 5.07-2.94.16-.97 1-1.71 2-1.71s1.84.74 2 1.71c2 .42 3.74 1.47 5.06 2.93.23-.09.48-.14.75-.14a2 2 0 0 1 2 2c0 .64-.31 1.21-.77 1.57.3.93.46 1.93.46 2.93s-.16 2-.46 2.93c.46.37.77.94.77 1.57 0 1.12-.89 2-2 2-.27 0-.52-.04-.76-.14-.44.49-.93.93-1.46 1.32l1.41 3.32h-2.17l-.98-2.31c-.6.26-1.22.47-1.85.6-.16.97-1 1.71-2 1.71z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m12 19c.86 0 1.59.54 1.87 1.29.55-.12 1.08-.29 1.59-.52l-1.76-4.15c-.52.25-1.09.38-1.7.38s-1.18-.13-1.7-.38l-1.76 4.15c.51.23 1.04.4 1.59.52.28-.75 1.01-1.29 1.87-1.29m6.25-1.24c-.25-.34-.44-.76-.44-1.26 0-1.09.9-2 2-2l.31.03c.25-.8.38-1.65.38-2.53s-.13-1.73-.38-2.5h-.31c-1.11 0-2-.89-2-2 0-.5.16-.91.44-1.26-1.15-1.24-2.66-2.15-4.38-2.53-.28.75-1.01 1.29-1.87 1.29s-1.59-.54-1.87-1.29c-1.72.38-3.23 1.29-4.38 2.53.28.35.45.79.45 1.26 0 1.11-.9 2-2 2h-.32c-.25.78-.38 1.62-.38 2.5 0 .89.14 1.74.39 2.55l.31-.05c1.11 0 2 .92 2 2 0 .5-.16.93-.44 1.27.32.35.68.67 1.05.96l1.9-4.46c-.45-.65-.71-1.43-.71-2.27a4 4 0 0 1 4-4 4 4 0 0 1 4 4c0 .84-.26 1.62-.71 2.27l1.9 4.46c.38-.29.73-.62 1.06-.97m-6.25 5.24c-1 0-1.84-.74-2-1.71-.63-.13-1.25-.34-1.85-.6l-.98 2.31h-2.17l1.41-3.32c-.53-.38-1.02-.82-1.45-1.31-.24.1-.49.13-.76.13a2 2 0 0 1 -2-2c0-.62.3-1.18.77-1.55-.31-.95-.47-1.92-.47-2.95 0-1 .16-2 .46-2.92-.46-.37-.76-.93-.76-1.58 0-1.09.89-2 2-2 .26 0 .51.06.73.15 1.32-1.47 3.07-2.52 5.07-2.94.16-.97 1-1.71 2-1.71s1.84.74 2 1.71c2 .42 3.74 1.47 5.06 2.93.23-.09.48-.14.75-.14a2 2 0 0 1 2 2c0 .64-.31 1.21-.77 1.57.3.93.46 1.93.46 2.93s-.16 2-.46 2.93c.46.37.77.94.77 1.57 0 1.12-.89 2-2 2-.27 0-.52-.04-.76-.14-.44.49-.93.93-1.46 1.32l1.41 3.32h-2.17l-.98-2.31c-.6.26-1.22.47-1.85.6-.16.97-1 1.71-2 1.71z"/></svg>

Before

Width:  |  Height:  |  Size: 1.4 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m17 8c-9 2-11.1 8.17-13.18 13.34l1.89.66.95-2.3c.48.17.98.3 1.34.3 11 0 14-17 14-17-1 2-8 2.25-13 3.25s-7 5.25-7 7.25 1.75 3.75 1.75 3.75c3.25-9.25 13.25-9.25 13.25-9.25z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m17 8c-9 2-11.1 8.17-13.18 13.34l1.89.66.95-2.3c.48.17.98.3 1.34.3 11 0 14-17 14-17-1 2-8 2.25-13 3.25s-7 5.25-7 7.25 1.75 3.75 1.75 3.75c3.25-9.25 13.25-9.25 13.25-9.25z"/></svg>

Before

Width:  |  Height:  |  Size: 271 B

After

Width:  |  Height:  |  Size: 272 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m14.53 1.45-1.08 1.08 1.6 1.6c.22.25.33.54.33.87s-.11.64-.33.86l-3.55 3.61 1 1.08 3.63-3.61c.53-.59.79-1.24.79-1.94s-.26-1.36-.79-1.95zm-3.98 2.02-1.08 1.08.61.56c.22.22.33.52.33.89s-.11.67-.33.89l-.61.56 1.08 1.08.56-.61c.53-.59.8-1.23.8-1.92 0-.72-.27-1.37-.8-1.97zm10.45 1.59c-.69 0-1.33.27-1.92.8l-5.63 5.64 1.08 1 5.58-5.56c.25-.25.55-.38.89-.38s.64.13.89.38l.61.61 1.03-1.08-.56-.61c-.59-.53-1.25-.8-1.97-.8m-14 2.94-5 14 14-5zm12 3.06c-.7 0-1.34.27-1.94.8l-1.59 1.59 1.08 1.08 1.59-1.59c.25-.25.53-.38.86-.38s.63.13.88.38l1.62 1.59 1.05-1.03-1.6-1.64c-.59-.53-1.25-.8-1.95-.8z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m14.53 1.45-1.08 1.08 1.6 1.6c.22.25.33.54.33.87s-.11.64-.33.86l-3.55 3.61 1 1.08 3.63-3.61c.53-.59.79-1.24.79-1.94s-.26-1.36-.79-1.95zm-3.98 2.02-1.08 1.08.61.56c.22.22.33.52.33.89s-.11.67-.33.89l-.61.56 1.08 1.08.56-.61c.53-.59.8-1.23.8-1.92 0-.72-.27-1.37-.8-1.97zm10.45 1.59c-.69 0-1.33.27-1.92.8l-5.63 5.64 1.08 1 5.58-5.56c.25-.25.55-.38.89-.38s.64.13.89.38l.61.61 1.03-1.08-.56-.61c-.59-.53-1.25-.8-1.97-.8m-14 2.94-5 14 14-5zm12 3.06c-.7 0-1.34.27-1.94.8l-1.59 1.59 1.08 1.08 1.59-1.59c.25-.25.53-.38.86-.38s.63.13.88.38l1.62 1.59 1.05-1.03-1.6-1.64c-.59-.53-1.25-.8-1.95-.8z"/></svg>

Before

Width:  |  Height:  |  Size: 684 B

After

Width:  |  Height:  |  Size: 685 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m12 3-11 6 4 2.18v6l7 3.82 7-3.82v-6l2-1.09v6.91h2v-8zm6.82 6-6.82 3.72-6.82-3.72 6.82-3.72zm-1.82 7-5 2.72-5-2.72v-3.73l5 2.73 5-2.73z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m12 3-11 6 4 2.18v6l7 3.82 7-3.82v-6l2-1.09v6.91h2v-8zm6.82 6-6.82 3.72-6.82-3.72 6.82-3.72zm-1.82 7-5 2.72-5-2.72v-3.73l5 2.73 5-2.73z"/></svg>

Before

Width:  |  Height:  |  Size: 236 B

After

Width:  |  Height:  |  Size: 237 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m4 15h2a2 2 0 0 1 2 2v2h1v-2a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2v2h1v-2a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2v2h1v3h-22v-3h1v-2a2 2 0 0 1 2-2m7-8 4 3-4 3zm-7-5h16a2 2 0 0 1 2 2v9.54c-.59-.35-1.27-.54-2-.54v-9h-16v9c-.73 0-1.41.19-2 .54v-9.54a2 2 0 0 1 2-2z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m4 15h2a2 2 0 0 1 2 2v2h1v-2a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2v2h1v-2a2 2 0 0 1 2-2h2a2 2 0 0 1 2 2v2h1v3h-22v-3h1v-2a2 2 0 0 1 2-2m7-8 4 3-4 3zm-7-5h16a2 2 0 0 1 2 2v9.54c-.59-.35-1.27-.54-2-.54v-9h-16v9c-.73 0-1.41.19-2 .54v-9.54a2 2 0 0 1 2-2z"/></svg>

Before

Width:  |  Height:  |  Size: 343 B

After

Width:  |  Height:  |  Size: 344 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m21.71 20.29-1.42 1.42a1 1 0 0 1 -1.41 0l-11.88-11.86a3.81 3.81 0 0 1 -1 .15 4 4 0 0 1 -3.78-5.3l2.54 2.54.53-.53 1.42-1.42.53-.53-2.54-2.54a4 4 0 0 1 5.3 3.78 3.81 3.81 0 0 1 -.15 1l11.86 11.88a1 1 0 0 1 0 1.41m-19.42-1.41a1 1 0 0 0 0 1.41l1.42 1.42a1 1 0 0 0 1.41 0l5.47-5.46-2.83-2.83m12.24-11.42-4 2v2l-2.17 2.17 2 2 2.17-2.17h2l2-4z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m21.71 20.29-1.42 1.42a1 1 0 0 1 -1.41 0l-11.88-11.86a3.81 3.81 0 0 1 -1 .15 4 4 0 0 1 -3.78-5.3l2.54 2.54.53-.53 1.42-1.42.53-.53-2.54-2.54a4 4 0 0 1 5.3 3.78 3.81 3.81 0 0 1 -.15 1l11.86 11.88a1 1 0 0 1 0 1.41m-19.42-1.41a1 1 0 0 0 0 1.41l1.42 1.42a1 1 0 0 0 1.41 0l5.47-5.46-2.83-2.83m12.24-11.42-4 2v2l-2.17 2.17 2 2 2.17-2.17h2l2-4z"/></svg>

Before

Width:  |  Height:  |  Size: 438 B

After

Width:  |  Height:  |  Size: 439 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m6 1v2h3v3.4l-4.89-2.02-2.68 6.46 5.54 2.3 4.97 3.68 1.85.77 3.83-9.24-1.85-.77-4.77-.71v-3.87h3v-2zm15.81 5.29-2.31.96.76 1.85 2.31-.96zm-2.03 7.28-.78 1.85 2.79 1.15.76-1.85zm-3.59 5.36-1.85.76.96 2.31 1.85-.77z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m6 1v2h3v3.4l-4.89-2.02-2.68 6.46 5.54 2.3 4.97 3.68 1.85.77 3.83-9.24-1.85-.77-4.77-.71v-3.87h3v-2zm15.81 5.29-2.31.96.76 1.85 2.31-.96zm-2.03 7.28-.78 1.85 2.79 1.15.76-1.85zm-3.59 5.36-1.85.76.96 2.31 1.85-.77z"/></svg>

Before

Width:  |  Height:  |  Size: 314 B

After

Width:  |  Height:  |  Size: 315 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m6 1v2h3v3.4l-4.89-2.02-2.68 6.46 5.54 2.3 4.97 3.68 1.85.77 3.83-9.24-1.85-.77-4.77-.71v-3.87h3v-2zm15.81 5.29-2.31.96.76 1.85 2.31-.96zm-2.03 7.28-.78 1.85 2.79 1.15.76-1.85zm-3.59 5.36-1.85.76.96 2.31 1.85-.77z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m6 1v2h3v3.4l-4.89-2.02-2.68 6.46 5.54 2.3 4.97 3.68 1.85.77 3.83-9.24-1.85-.77-4.77-.71v-3.87h3v-2zm15.81 5.29-2.31.96.76 1.85 2.31-.96zm-2.03 7.28-.78 1.85 2.79 1.15.76-1.85zm-3.59 5.36-1.85.76.96 2.31 1.85-.77z"/></svg>

Before

Width:  |  Height:  |  Size: 314 B

After

Width:  |  Height:  |  Size: 315 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m6 19h2v2h-2zm6-16-10 5v13h2v-8h16v8h2v-13zm-4 8h-4v-2h4zm6 0h-4v-2h4zm6 0h-4v-2h4zm-14 4h2v2h-2zm4 0h2v2h-2zm0 4h2v2h-2zm4 0h2v2h-2z"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m6 19h2v2h-2zm6-16-10 5v13h2v-8h16v8h2v-13zm-4 8h-4v-2h4zm6 0h-4v-2h4zm6 0h-4v-2h4zm-14 4h2v2h-2zm4 0h2v2h-2zm0 4h2v2h-2zm4 0h2v2h-2z"/></svg>

Before

Width:  |  Height:  |  Size: 234 B

After

Width:  |  Height:  |  Size: 235 B

View File

@ -1 +1 @@
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m19 7s-5 7-12.5 7c-2 0-5.5 1-5.5 5v4h11v-4c0-2.5 3-1 7-8l-1.5-1.5m-14.5-4.5v-3h20v14h-3m-9-15h4v2h-4zm-4.5 13c1.93299662 0 3.5-1.5670034 3.5-3.5 0-1.93299662-1.56700338-3.5-3.5-3.5s-3.5 1.56700338-3.5 3.5c0 1.9329966 1.56700338 3.5 3.5 3.5z" fill="none" stroke="#000" stroke-width="2"/></svg>
<svg height="24" viewBox="0 0 24 24" width="24" xmlns="http://www.w3.org/2000/svg"><path d="m19 7s-5 7-12.5 7c-2 0-5.5 1-5.5 5v4h11v-4c0-2.5 3-1 7-8l-1.5-1.5m-14.5-4.5v-3h20v14h-3m-9-15h4v2h-4zm-4.5 13c1.93299662 0 3.5-1.5670034 3.5-3.5 0-1.93299662-1.56700338-3.5-3.5-3.5s-3.5 1.56700338-3.5 3.5c0 1.9329966 1.56700338 3.5 3.5 3.5z" fill="none" stroke="#000" stroke-width="2"/></svg>

Before

Width:  |  Height:  |  Size: 384 B

After

Width:  |  Height:  |  Size: 385 B

View File

@ -1,55 +1,51 @@
from django.db import models, connection
from django_better_admin_arrayfield.models.fields import ArrayField
from django.utils.translation import gettext_lazy as _
from django.template.defaultfilters import slugify
from django.utils.dateparse import parse_date
from django.urls import reverse
from colorfield.fields import ColorField
from django_ckeditor_5.fields import CKEditor5Field
import copy
import hashlib
import logging
import os
import unicodedata
import urllib.request
import uuid
from collections import defaultdict
from datetime import date, time, timedelta
from urllib.parse import urlparse
import emoji
import recurrence
import recurrence.fields
from autoslug import AutoSlugField
from colorfield.fields import ColorField
from django.contrib.auth.models import User
from django.contrib.gis.geos import Point
from django.contrib.postgres.search import TrigramSimilarity
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.core.cache.utils import make_template_fragment_key
from django.contrib.auth.models import User
import emoji
from django.core.files.storage import default_storage
from django.contrib.sites.models import Site
from django.core.mail import send_mail
from django.template.loader import render_to_string
from autoslug import AutoSlugField
from django_resized import ResizedImageField
import uuid
import hashlib
import urllib.request
import os
from django.core.files import File
from django.utils import timezone
from django.contrib.postgres.search import TrigramSimilarity
from django.db.models import Q, Count, F, Subquery, OuterRef, Func
from django.core.files.storage import default_storage
from django.core.mail import send_mail
from django.db import connection, models
from django.db.models import Count, F, Func, OuterRef, Q, Subquery
from django.db.models.functions import Lower
import recurrence.fields
import recurrence
import copy
import unicodedata
from collections import defaultdict
from django.template.defaultfilters import date as _date
from django.template.defaultfilters import slugify
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.dateparse import parse_date
from django.utils.timezone import datetime
from django.utils.translation import gettext_lazy as _
from django_better_admin_arrayfield.models.fields import ArrayField
from django_ckeditor_5.fields import CKEditor5Field
from django_resized import ResizedImageField
from icalendar import Calendar as icalCal
from icalendar import Event as icalEvent
from location_field.models.spatial import LocationField
from .calendar import CalendarDay
from .import_tasks.extractor import Extractor
from .import_tasks.generic_extractors.fbevent import (
CExtractor as FacebookEventExtractor,
)
from .import_tasks.extractor import Extractor
from django.template.defaultfilters import date as _date
from datetime import time, timedelta, date
from django.utils.timezone import datetime
from location_field.models.spatial import LocationField
from django.contrib.gis.geos import Point
from .calendar import CalendarDay
from icalendar import Calendar as icalCal
from icalendar import Event as icalEvent
import logging
logger = logging.getLogger(__name__)
@ -188,9 +184,11 @@ class Category(models.Model):
class Tag(models.Model):
name = models.CharField(
verbose_name=_("Name"), help_text=_("Tag name"), max_length=512, unique=True
verbose_name=_("Name"),
help_text=_("Tag name"),
max_length=512,
unique=True,
)
description = CKEditor5Field(
@ -253,7 +251,6 @@ class Tag(models.Model):
result = cache.get(id_cache)
if not result:
free_tags = Event.get_all_tags(False)
f_tags = [t["tag"] for t in free_tags]
@ -323,7 +320,6 @@ class Tag(models.Model):
class DuplicatedEvents(models.Model):
representative = models.ForeignKey(
"Event",
verbose_name=_("Representative event"),
@ -473,7 +469,6 @@ class DuplicatedEvents(models.Model):
class ReferenceLocation(models.Model):
name = models.CharField(
verbose_name=_("Name"),
help_text=_("Name of the location"),
@ -481,7 +476,10 @@ class ReferenceLocation(models.Model):
null=False,
)
location = LocationField(
based_fields=["name"], zoom=12, default=Point(3.08333, 45.783329), srid=4326
based_fields=["name"],
zoom=12,
default=Point(3.08333, 45.783329),
srid=4326,
)
main = models.IntegerField(
verbose_name=_("Main"),
@ -570,7 +568,8 @@ class Place(models.Model):
def get_absolute_url(self):
return reverse(
"view_place_fullname", kwargs={"pk": self.pk, "extra": slugify(self.name)}
"view_place_fullname",
kwargs={"pk": self.pk, "extra": slugify(self.name)},
)
def nb_events(self):
@ -716,7 +715,10 @@ class Event(models.Model):
title = models.CharField(verbose_name=_("Title"), max_length=512)
status = models.CharField(
_("Status"), max_length=20, choices=STATUS.choices, default=STATUS.DRAFT
_("Status"),
max_length=20,
choices=STATUS.choices,
default=STATUS.DRAFT,
)
category = models.ForeignKey(
@ -742,7 +744,10 @@ class Event(models.Model):
end_time = models.TimeField(verbose_name=_("End time"), blank=True, null=True)
recurrences = recurrence.fields.RecurrenceField(
verbose_name=_("Recurrence"), include_dtstart=False, blank=True, null=True
verbose_name=_("Recurrence"),
include_dtstart=False,
blank=True,
null=True,
)
exact_location = models.ForeignKey(
@ -845,7 +850,10 @@ class Event(models.Model):
def get_import_messages(self):
return self.message_set.filter(
message_type__in=[Message.TYPE.IMPORT_PROCESS, Message.TYPE.UPDATE_PROCESS]
message_type__in=[
Message.TYPE.IMPORT_PROCESS,
Message.TYPE.UPDATE_PROCESS,
]
).order_by("date")
def get_consolidated_end_day(self, intuitive=True):
@ -995,7 +1003,12 @@ class Event(models.Model):
if not simple:
c += [
{"timestamp": m.date, "data": m, "user": m.user, "is_date": False}
{
"timestamp": m.date,
"data": m,
"user": m.user,
"is_date": False,
}
for m in self.message_set.filter(spam=False)
]
@ -1408,7 +1421,10 @@ class Event(models.Model):
CategorisationRule.apply_rules(self)
def get_contributor_message(self):
types = [Message.TYPE.FROM_CONTRIBUTOR, Message.TYPE.FROM_CONTRIBUTOR_NO_MSG]
types = [
Message.TYPE.FROM_CONTRIBUTOR,
Message.TYPE.FROM_CONTRIBUTOR_NO_MSG,
]
if self.other_versions is None or self.other_versions.representative is None:
return Message.objects.filter(
related_event=self.pk, message_type__in=types, closed=False
@ -1481,7 +1497,7 @@ class Event(models.Model):
# check if we need to clean the other_versions
if (
not clone
and not "noclean_other_versions" in kwargs
and "noclean_other_versions" not in kwargs
and self.pk
and self.other_versions is not None
and self.other_versions.nb_duplicated() == 1
@ -1901,7 +1917,6 @@ class Event(models.Model):
return events[0]
def update(self, other, all):
# integrate pending organisers
if other.has_pending_organisers() and other.pending_organisers is not None:
self.organisers.set(other.pending_organisers)
@ -2030,7 +2045,8 @@ class Event(models.Model):
else:
# we only update local information if it's a pure import and has no moderated_date
same_imported.update(
event, pure and same_imported.moderated_date is None
event,
pure and same_imported.moderated_date is None,
)
# save messages
if event.has_message():
@ -2046,7 +2062,8 @@ class Event(models.Model):
):
same_imported.download_image()
same_imported.save(
update_fields=["local_image"], noclean_other_versions=True
update_fields=["local_image"],
noclean_other_versions=True,
)
to_update.append(same_imported)
@ -2104,7 +2121,13 @@ class Event(models.Model):
nb_updated = Event.objects.bulk_update(
to_update,
fields=Event.data_fields(no_m2m=True)
+ ["imported_date", "modified_date", "uuids", "status", "import_sources"],
+ [
"imported_date",
"modified_date",
"uuids",
"status",
"import_sources",
],
)
nb_draft = 0
@ -2329,15 +2352,15 @@ class Event(models.Model):
class Message(models.Model):
class TYPE(models.TextChoices):
FROM_CONTRIBUTOR = "from_contributor", _("From contributor")
IMPORT_PROCESS = "import_process", _("Import process")
UPDATE_PROCESS = "update_process", _("Update process")
CONTACT_FORM = "contact_form", _("Contact form")
EVENT_REPORT = "event_report", _("Event report")
FROM_CONTRIBUTOR_NO_MSG = "from_contrib_no_msg", _(
"From contributor (without message)"
FROM_CONTRIBUTOR_NO_MSG = (
"from_contrib_no_msg",
_("From contributor (without message)"),
)
WARNING = "warning", _("Warning")
@ -2470,7 +2493,10 @@ class RecurrentImport(models.Model):
class DOWNLOADER(models.TextChoices):
SIMPLE = "simple", _("simple")
CHROMIUMHEADLESS = "chromium headless", _("Headless Chromium")
CHROMIUMHEADLESSPAUSE = "chromium (pause)", _("Headless Chromium (pause)")
CHROMIUMHEADLESSPAUSE = (
"chromium (pause)",
_("Headless Chromium (pause)"),
)
class RECURRENCE(models.TextChoices):
DAILY = (
@ -2489,7 +2515,10 @@ class RecurrentImport(models.Model):
default="",
)
processor = models.CharField(
_("Processor"), max_length=20, choices=PROCESSOR.choices, default=PROCESSOR.ICAL
_("Processor"),
max_length=20,
choices=PROCESSOR.choices,
default=PROCESSOR.ICAL,
)
downloader = models.CharField(
_("Downloader"),
@ -2628,7 +2657,10 @@ class BatchImportation(models.Model):
)
status = models.CharField(
_("Status"), max_length=20, choices=STATUS.choices, default=STATUS.RUNNING
_("Status"),
max_length=20,
choices=STATUS.choices,
default=STATUS.RUNNING,
)
error_message = models.CharField(

View File

@ -1,8 +1,9 @@
from os import getenv as os_getenv, path as os_path # noqa
from os import getenv as os_getenv # noqa
from os import path as os_path
from pathlib import Path
from django.utils.translation import gettext_lazy as _
from django.core.management.utils import get_random_secret_key
from django.utils.translation import gettext_lazy as _
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent.parent

View File

@ -1,6 +1,5 @@
from .base import * # noqa
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",

View File

@ -57,4 +57,4 @@
}
.marker-cluster span {
line-height: 30px;
}
}

File diff suppressed because one or more lines are too long

View File

@ -16,4 +16,4 @@ var _default = {
timeFormat: 'HH:mm',
firstDay: 1
};
exports.default = _default;
exports.default = _default;

View File

@ -1 +1 @@
next_month
next_month

View File

@ -1 +1 @@
.ch-container{display:block}.ch-container,.ch-domain,.ch-domain-container,.ch-domain-container-animation-wrapper{overflow:visible}.ch-domain-container.in-transition .ch-domain-container-animation-wrapper{overflow:hidden}.ch-domain-bg{fill:transparent}.ch-domain-text{fill:currentColor;font-size:10px}.ch-subdomain{overflow:visible}.ch-subdomain-bg{fill:#ededed}.ch-subdomain-bg.highlight{stroke:#444;stroke-width:1px}.ch-subdomain-bg:hover{stroke:#000;stroke-width:1px}.ch-subdomain-text{font-size:8px;pointer-events:none}[data-theme=dark] .ch-subdomain-bg{fill:#2d333b}[data-theme=dark] .ch-subdomain-bg.highlight{stroke:#768390}[data-theme=dark] .ch-subdomain-bg:hover{stroke:#636e7b}#ch-plugin-legend>svg{background:transparent;color:currentColor}#ch-tooltip{background:#222;border-radius:2px;box-shadow:2px 2px 2px rgba(0,0,0,.2);box-sizing:border-box;color:#bbb;display:none;font-size:12px;line-height:1.4;padding:5px 10px;text-align:center}#ch-tooltip[data-show]{display:block}#ch-tooltip-arrow,#ch-tooltip-arrow:before{background:inherit;height:8px;position:absolute;width:8px}#ch-tooltip-arrow{visibility:hidden}#ch-tooltip-arrow:before{content:"";transform:rotate(45deg);visibility:visible}#ch-tooltip[data-popper-placement^=top]>#ch-tooltip-arrow{bottom:-4px;margin-left:-4px}#ch-tooltip[data-popper-placement^=bottom]>#ch-tooltip-arrow{margin-left:-4px;top:-4px}#ch-tooltip[data-popper-placement^=left]>#ch-tooltip-arrow{right:-4px}#ch-tooltip[data-popper-placement^=right]>#ch-tooltip-arrow{left:-4px}#ch-tooltip[data-theme=dark]{background:#636e7b;color:#cdd9e5}
.ch-container{display:block}.ch-container,.ch-domain,.ch-domain-container,.ch-domain-container-animation-wrapper{overflow:visible}.ch-domain-container.in-transition .ch-domain-container-animation-wrapper{overflow:hidden}.ch-domain-bg{fill:transparent}.ch-domain-text{fill:currentColor;font-size:10px}.ch-subdomain{overflow:visible}.ch-subdomain-bg{fill:#ededed}.ch-subdomain-bg.highlight{stroke:#444;stroke-width:1px}.ch-subdomain-bg:hover{stroke:#000;stroke-width:1px}.ch-subdomain-text{font-size:8px;pointer-events:none}[data-theme=dark] .ch-subdomain-bg{fill:#2d333b}[data-theme=dark] .ch-subdomain-bg.highlight{stroke:#768390}[data-theme=dark] .ch-subdomain-bg:hover{stroke:#636e7b}#ch-plugin-legend>svg{background:transparent;color:currentColor}#ch-tooltip{background:#222;border-radius:2px;box-shadow:2px 2px 2px rgba(0,0,0,.2);box-sizing:border-box;color:#bbb;display:none;font-size:12px;line-height:1.4;padding:5px 10px;text-align:center}#ch-tooltip[data-show]{display:block}#ch-tooltip-arrow,#ch-tooltip-arrow:before{background:inherit;height:8px;position:absolute;width:8px}#ch-tooltip-arrow{visibility:hidden}#ch-tooltip-arrow:before{content:"";transform:rotate(45deg);visibility:visible}#ch-tooltip[data-popper-placement^=top]>#ch-tooltip-arrow{bottom:-4px;margin-left:-4px}#ch-tooltip[data-popper-placement^=bottom]>#ch-tooltip-arrow{margin-left:-4px;top:-4px}#ch-tooltip[data-popper-placement^=left]>#ch-tooltip-arrow{right:-4px}#ch-tooltip[data-popper-placement^=right]>#ch-tooltip-arrow{left:-4px}#ch-tooltip[data-theme=dark]{background:#636e7b;color:#cdd9e5}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1 +1 @@
.ch-container{display:block}.ch-container,.ch-domain,.ch-domain-container,.ch-domain-container-animation-wrapper{overflow:visible}.ch-domain-container.in-transition .ch-domain-container-animation-wrapper{overflow:hidden}.ch-domain-bg{fill:transparent}.ch-domain-text{fill:currentColor;font-size:10px}.ch-subdomain{overflow:visible}.ch-subdomain-bg{fill:#ededed}.ch-subdomain-bg.highlight{stroke:#444;stroke-width:1px}.ch-subdomain-bg:hover{stroke:#000;stroke-width:1px}.ch-subdomain-text{font-size:8px;pointer-events:none}[data-theme=dark] .ch-subdomain-bg{fill:#2d333b}[data-theme=dark] .ch-subdomain-bg.highlight{stroke:#768390}[data-theme=dark] .ch-subdomain-bg:hover{stroke:#636e7b}#ch-plugin-legend>svg{background:transparent;color:currentColor}#ch-tooltip{background:#222;border-radius:2px;box-shadow:2px 2px 2px rgba(0,0,0,.2);box-sizing:border-box;color:#bbb;display:none;font-size:12px;line-height:1.4;padding:5px 10px;text-align:center}#ch-tooltip[data-show]{display:block}#ch-tooltip-arrow,#ch-tooltip-arrow:before{background:inherit;height:8px;position:absolute;width:8px}#ch-tooltip-arrow{visibility:hidden}#ch-tooltip-arrow:before{content:"";transform:rotate(45deg);visibility:visible}#ch-tooltip[data-popper-placement^=top]>#ch-tooltip-arrow{bottom:-4px;margin-left:-4px}#ch-tooltip[data-popper-placement^=bottom]>#ch-tooltip-arrow{margin-left:-4px;top:-4px}#ch-tooltip[data-popper-placement^=left]>#ch-tooltip-arrow{right:-4px}#ch-tooltip[data-popper-placement^=right]>#ch-tooltip-arrow{left:-4px}#ch-tooltip[data-theme=dark]{background:#636e7b;color:#cdd9e5}
.ch-container{display:block}.ch-container,.ch-domain,.ch-domain-container,.ch-domain-container-animation-wrapper{overflow:visible}.ch-domain-container.in-transition .ch-domain-container-animation-wrapper{overflow:hidden}.ch-domain-bg{fill:transparent}.ch-domain-text{fill:currentColor;font-size:10px}.ch-subdomain{overflow:visible}.ch-subdomain-bg{fill:#ededed}.ch-subdomain-bg.highlight{stroke:#444;stroke-width:1px}.ch-subdomain-bg:hover{stroke:#000;stroke-width:1px}.ch-subdomain-text{font-size:8px;pointer-events:none}[data-theme=dark] .ch-subdomain-bg{fill:#2d333b}[data-theme=dark] .ch-subdomain-bg.highlight{stroke:#768390}[data-theme=dark] .ch-subdomain-bg:hover{stroke:#636e7b}#ch-plugin-legend>svg{background:transparent;color:currentColor}#ch-tooltip{background:#222;border-radius:2px;box-shadow:2px 2px 2px rgba(0,0,0,.2);box-sizing:border-box;color:#bbb;display:none;font-size:12px;line-height:1.4;padding:5px 10px;text-align:center}#ch-tooltip[data-show]{display:block}#ch-tooltip-arrow,#ch-tooltip-arrow:before{background:inherit;height:8px;position:absolute;width:8px}#ch-tooltip-arrow{visibility:hidden}#ch-tooltip-arrow:before{content:"";transform:rotate(45deg);visibility:visible}#ch-tooltip[data-popper-placement^=top]>#ch-tooltip-arrow{bottom:-4px;margin-left:-4px}#ch-tooltip[data-popper-placement^=bottom]>#ch-tooltip-arrow{margin-left:-4px;top:-4px}#ch-tooltip[data-popper-placement^=left]>#ch-tooltip-arrow{right:-4px}#ch-tooltip[data-popper-placement^=right]>#ch-tooltip-arrow{left:-4px}#ch-tooltip[data-theme=dark]{background:#636e7b;color:#cdd9e5}

File diff suppressed because one or more lines are too long

View File

@ -2,11 +2,11 @@
<html lang="fr">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Y'a un p'tit pépin !</title>
<link rel="icon" type="image/svg+xml" href="/static/images/favicon.svg">
<link rel="stylesheet" href="/static/CACHE/css/style.125efeac89b0.css" type="text/css" media="screen">
</head>
<body>
@ -24,5 +24,5 @@
</div>
</article>
</main>
</body>
</body>

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 54 KiB

After

Width:  |  Height:  |  Size: 54 KiB

View File

@ -5,14 +5,14 @@ function formatDate(date = new Date()) {
month: '2-digit',
});
const day = date.toLocaleString('default', {day: '2-digit'});
return [year, month, day].join('-');
}
function formatTime(date = new Date()) {
const hour = ("0" + date.getHours()).slice(-2);
const minutes = date.toLocaleString('default', {minute: '2-digit'});
return [hour, minutes].join(':');
}
@ -31,7 +31,7 @@ const update_datetimes = (event) => {
if (current == start_day) {
if (end_day.value) {
console.log("update day ", start_day.oldvalue, " -> ", start_day.value);
new_date = new Date(start_day.value);
old_date = new Date(start_day.oldvalue);
@ -68,4 +68,4 @@ const update_datetimes = (event) => {
}
}
};
};

View File

@ -27,8 +27,8 @@ function displayButtons() {
buttonRight.classList.remove("hidden");
buttonRightPage.classList.add("hidden");
}
}
else {
buttonLeft.classList.add("hidden");
@ -52,8 +52,8 @@ document.addEventListener("DOMContentLoaded", function(e) {
grid = agenda.querySelector(".grid");
agenda.scrollLeft -= grid.offsetWidth / 7;
}, false);
// clic on the right button will move the grid one step to the right
document.querySelector('.slider-button.button-right').addEventListener('click', function (event) {
agenda = document.getElementById("calendar");
@ -66,6 +66,6 @@ document.addEventListener("DOMContentLoaded", function(e) {
agenda.addEventListener('scrollend', () => {
displayButtons();
});
});
});

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More