Compare commits

..

9 Commits

Author SHA1 Message Date
cd735ef028 another gcal try without chunking 2026-04-09 17:28:24 +02:00
bd048a245b pagination fix 2026-04-09 16:29:00 +02:00
51a6a8be83 more gcal fixes 2026-04-09 16:26:17 +02:00
6390211d5e another gcal fix 2026-04-09 16:18:50 +02:00
edaf83aaa1 fix in gcal 2026-04-09 16:13:08 +02:00
4e72586372 updated gcal coupling 2026-04-09 16:07:23 +02:00
2352dbe35a Deployment changes 2026-04-09 15:29:35 +02:00
78125921ee added database 2026-04-09 15:18:21 +02:00
4c92a97759 dockerfiles 2026-04-09 15:13:08 +02:00
10 changed files with 231 additions and 61 deletions

8
.dockerignore Normal file
View File

@@ -0,0 +1,8 @@
# The source code is bind-mounted at runtime, not copied into the image.
# .dockerignore only matters for what's sent to the Docker build daemon
# (i.e. the COPY requirements.txt step in the Dockerfile).
.git/
__pycache__/
*.pyc
# Keep requirements.txt (needed for the COPY in Dockerfile)

25
Dockerfile Normal file
View File

@@ -0,0 +1,25 @@
FROM python:3.12-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1
WORKDIR /app
# Install build dependencies (needed for some Python packages, e.g. Pillow)
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc \
&& rm -rf /var/lib/apt/lists/*
# Pre-install requirements at build time so the layer is cached.
# The entrypoint re-runs pip install on every container start to pick up
# any changes that arrived via git pull without needing a rebuild.
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# The source code is NOT copied here — the git repo is bind-mounted
# at /app at runtime (see docker-compose.yml).
EXPOSE 8000
# entrypoint.sh lives in the mounted repo at /app/docker/entrypoint.sh
ENTRYPOINT ["/app/docker/entrypoint.sh"]

Binary file not shown.

BIN
blechreiz/database.sqlite Normal file

Binary file not shown.

View File

@@ -45,7 +45,9 @@ EMAIL_USE_TLS = False
# Hosts/domain names that are valid for this site; required if DEBUG is False # Hosts/domain names that are valid for this site; required if DEBUG is False
ALLOWED_HOSTS = ["localhost", "127.0.0.1", ".blechreiz.com", ".bauer.technology", ".bauer.tech"] ALLOWED_HOSTS = ["localhost", "127.0.0.1", ".blechreiz.com", ".bauer.tech"]
CSRF_TRUSTED_ORIGINS = ["https://br.bauer.tech", "https://*.bauer.tech", "https://*.blechreiz.com"]
# Local time zone for this installation. # Local time zone for this installation.
TIME_ZONE = "Europe/Berlin" TIME_ZONE = "Europe/Berlin"
@@ -179,7 +181,7 @@ GCAL_COUPLING = {
"clientId": "34462582242-4kpdvvbi27ajt4u22uitqurpve9o8ipj.apps.googleusercontent.com", "clientId": "34462582242-4kpdvvbi27ajt4u22uitqurpve9o8ipj.apps.googleusercontent.com",
"client_secret": os.environ.get("GCAL_CLIENT_SECRET", ""), "client_secret": os.environ.get("GCAL_CLIENT_SECRET", ""),
"credentials_file": PROJECT_PATH + "/calendarCredentials.dat", "credentials_file": PROJECT_PATH + "/calendarCredentials.dat",
"push_url": "https://blechreiz.bauer.technology/eventplanner_gcal/gcalApiCallback", "push_url": "https://br.bauer.tech/eventplanner_gcal/gcalApiCallback",
} }

25
docker-compose.yml Normal file
View File

@@ -0,0 +1,25 @@
# Usage:
# cd /docker/websites/blechreiz
# docker compose up -d --build # first start / after Dockerfile changes
# git pull && docker compose restart # deploy new code (entrypoint handles the rest)
services:
blechreiz:
build:
context: .
dockerfile: Dockerfile
volumes:
- .:/app # git repo is the live source
environment:
- VIRTUAL_HOST=br.bauer.tech
- VIRTUAL_PORT=8000
env_file:
- blechreiz/.env
ports:
- "8000:8000" # only localhost; HTTPS handled by external proxy
restart: unless-stopped
networks:
default:
name: nginx-proxy
external: true

0
docker/entrypoint.sh Normal file → Executable file
View File

View File

@@ -79,13 +79,6 @@ def create_gcal_service_object():
return None return None
def _invalidate_service_on_error(exc):
"""Reset the cached service object so the next call retries credential loading."""
global _service_object
logger.warning(f"Invalidating cached GCal service due to error: {exc}")
_service_object = None
def get_service_object(): def get_service_object():
"""Get or create the Google Calendar service object.""" """Get or create the Google Calendar service object."""
global _service_object global _service_object
@@ -204,24 +197,29 @@ def build_gcal_event(event, timezone="Europe/Berlin"):
# ------------------------------ Callback Functions ------------------------------------------------ # ------------------------------ Callback Functions ------------------------------------------------
def on_gcal_event_created(request_id, response, exception=None): def _save_gcal_mapping_from_response(response):
"""Callback function for created events to enter new gcal id in the mapping table.""" """Save a GCal mapping from an insert response."""
if exception is not None:
logger.error(f"Error creating GCal event: {exception}")
raise exception
google_id = response["id"] google_id = response["id"]
django_id = response["extendedProperties"]["private"]["blechreizID"] django_id = response["extendedProperties"]["private"]["blechreizID"]
try: try:
event = Event.objects.get(pk=django_id) event = Event.objects.get(pk=django_id)
mapping = GCalMapping(gcal_id=google_id, event=event) GCalMapping.objects.update_or_create(
mapping.save() event=event, defaults={"gcal_id": google_id}
)
logger.info(f"Created mapping: GCal {google_id} <-> Event {django_id}") logger.info(f"Created mapping: GCal {google_id} <-> Event {django_id}")
except Event.DoesNotExist: except Event.DoesNotExist:
logger.error(f"Event {django_id} not found when creating GCal mapping") logger.error(f"Event {django_id} not found when creating GCal mapping")
def on_gcal_event_created(request_id, response, exception=None):
"""Callback function for batch delete_all — kept for backwards compat."""
if exception is not None:
logger.error(f"Error creating GCal event: {exception}")
return
_save_gcal_mapping_from_response(response)
# ------------------------------ GCal Api Calls ------------------------------------------------- # ------------------------------ GCal Api Calls -------------------------------------------------
@@ -230,6 +228,7 @@ def get_all_gcal_events(service, from_now=False):
Retrieves all gcal events with custom property blechreizEvent=True. Retrieves all gcal events with custom property blechreizEvent=True.
These are all events that have been created by this script. These are all events that have been created by this script.
Handles pagination so all events are returned regardless of count.
""" """
if from_now: if from_now:
now = datetime.datetime.now() now = datetime.datetime.now()
@@ -237,21 +236,32 @@ def get_all_gcal_events(service, from_now=False):
else: else:
min_time = "2000-01-01T00:00:00-00:00" min_time = "2000-01-01T00:00:00-00:00"
all_items = []
page_token = None
try: try:
events = ( while True:
service.events() kwargs = dict(
.list(
calendarId="primary", calendarId="primary",
singleEvents=True, singleEvents=True,
maxResults=1000, maxResults=250, # max allowed per page by the API
orderBy="startTime", orderBy="startTime",
timeMin=min_time, timeMin=min_time,
timeMax="2100-01-01T00:00:00-00:00", timeMax="2100-01-01T00:00:00-00:00",
privateExtendedProperty="blechreizEvent=true", privateExtendedProperty="blechreizEvent=true",
) )
.execute() if page_token:
) kwargs["pageToken"] = page_token
return events.get("items", [])
response = service.events().list(**kwargs).execute()
all_items.extend(response.get("items", []))
page_token = response.get("nextPageToken")
if not page_token:
break
logger.info(f"Fetched {len(all_items)} GCal events (all pages)")
return all_items
except Exception as e: except Exception as e:
logger.error(f"Failed to retrieve GCal events: {e}") logger.error(f"Failed to retrieve GCal events: {e}")
return [] return []
@@ -308,9 +318,7 @@ def delete_all_gcal_events(service=None):
return 0 return 0
# Use batch request for efficiency # Use batch request for efficiency
from googleapiclient.http import BatchHttpRequest batch = service.new_batch_http_request()
batch = BatchHttpRequest()
for gcal_id in gcal_ids: for gcal_id in gcal_ids:
batch.add(service.events().delete(calendarId="primary", eventId=gcal_id)) batch.add(service.events().delete(calendarId="primary", eventId=gcal_id))
@@ -318,7 +326,6 @@ def delete_all_gcal_events(service=None):
batch.execute() batch.execute()
except Exception as e: except Exception as e:
logger.error(f"Error deleting GCal events: {e}") logger.error(f"Error deleting GCal events: {e}")
_invalidate_service_on_error(e)
GCalMapping.objects.all().delete() GCalMapping.objects.all().delete()
@@ -330,6 +337,8 @@ def sync_from_local_to_google(service=None):
Creates a google event for each local event (if it does not exist yet) and Creates a google event for each local event (if it does not exist yet) and
deletes all google events that are not found in local database. deletes all google events that are not found in local database.
Updates participation info of gcal events using local data. Updates participation info of gcal events using local data.
Creates are processed in chunks (future events first) to avoid rate limits.
""" """
if service is None: if service is None:
service = get_service_object() service = get_service_object()
@@ -340,47 +349,75 @@ def sync_from_local_to_google(service=None):
all_events = get_all_gcal_events(service) all_events = get_all_gcal_events(service)
# Map gcal_id -> django_id for every blechreiz-owned event at Google
gcal_id_to_django_id = {}
events_at_google_django_id = set() events_at_google_django_id = set()
events_at_google_google_id = set()
for gcal_ev in all_events: for gcal_ev in all_events:
try: try:
django_id = int(gcal_ev["extendedProperties"]["private"]["blechreizID"]) django_id = int(gcal_ev["extendedProperties"]["private"]["blechreizID"])
events_at_google_django_id.add(django_id) events_at_google_django_id.add(django_id)
events_at_google_google_id.add(gcal_ev["id"]) gcal_id_to_django_id[gcal_ev["id"]] = django_id
except (KeyError, ValueError) as e: except (KeyError, ValueError) as e:
logger.warning(f"Invalid GCal event structure: {e}") logger.warning(f"Invalid GCal event structure: {e}")
local_events_django_id = set(Event.objects.all().values_list("pk", flat=True)) local_events_django_id = set(Event.objects.all().values_list("pk", flat=True))
local_events_google_id = set(
GCalMapping.objects.all().values_list("gcal_id", flat=True)
)
events_to_create_django_id = local_events_django_id - events_at_google_django_id # Repair GCalMapping for events that exist at Google but have no local mapping
events_to_delete_google_id = events_at_google_google_id - local_events_google_id # (can happen when a previous batch failed mid-way)
for gcal_id, django_id in gcal_id_to_django_id.items():
from googleapiclient.http import BatchHttpRequest if django_id in local_events_django_id and not GCalMapping.objects.filter(gcal_id=gcal_id).exists():
batch = BatchHttpRequest()
batch_is_empty = True
for event_django_id in events_to_create_django_id:
try: try:
event = Event.objects.get(pk=event_django_id) event = Event.objects.get(pk=django_id)
batch.add( GCalMapping.objects.get_or_create(event=event, defaults={"gcal_id": gcal_id})
create_gcal_event_request(service, event), logger.info(f"Repaired missing mapping: GCal {gcal_id} <-> Event {django_id}")
callback=on_gcal_event_created,
)
batch_is_empty = False
except Event.DoesNotExist: except Event.DoesNotExist:
pass pass
for event_google_id in events_to_delete_google_id: events_to_create_django_id = local_events_django_id - events_at_google_django_id
batch.add(
service.events().delete(calendarId="primary", eventId=event_google_id)
)
batch_is_empty = False
# Only delete Google events whose local Event no longer exists
# (never delete based on missing GCalMapping — that's just a local cache)
events_to_delete_google_id = {
gcal_id
for gcal_id, django_id in gcal_id_to_django_id.items()
if django_id not in local_events_django_id
}
# --- Deletes: one by one ---
for gcal_id in events_to_delete_google_id:
try:
service.events().delete(calendarId="primary", eventId=gcal_id).execute()
GCalMapping.objects.filter(gcal_id=gcal_id).delete()
except Exception as e:
logger.error(f"Failed to delete GCal event {gcal_id}: {e}")
# --- Creates: future events first (soonest upcoming), then past events ---
today = datetime.date.today()
future_ids = list(
Event.objects.filter(pk__in=events_to_create_django_id, date__gte=today)
.order_by("date")
.values_list("pk", flat=True)
)
past_ids = list(
Event.objects.filter(pk__in=events_to_create_django_id, date__lt=today)
.order_by("-date")
.values_list("pk", flat=True)
)
ordered_create_ids = future_ids # + past_ids
for event_django_id in ordered_create_ids:
try:
event = Event.objects.get(pk=event_django_id)
request = create_gcal_event_request(service, event)
response = request.execute()
_save_gcal_mapping_from_response(response)
except Event.DoesNotExist:
pass
except Exception as e:
logger.error(f"Failed to create GCal event for Event {event_django_id}: {e}")
# --- Updates: attendee status changes ---
for gcal_ev in all_events: for gcal_ev in all_events:
try: try:
event_django_id = int( event_django_id = int(
@@ -391,21 +428,14 @@ def sync_from_local_to_google(service=None):
gcal_attendees = gcal_ev.get("attendees", []) gcal_attendees = gcal_ev.get("attendees", [])
local_attendees = build_gcal_attendees_obj(django_ev) local_attendees = build_gcal_attendees_obj(django_ev)
# Simple comparison - check if attendees differ
if gcal_attendees != local_attendees: if gcal_attendees != local_attendees:
batch.add(update_gcal_event_request(service, django_ev)) update_gcal_event_request(service, django_ev).execute()
batch_is_empty = False
except Event.DoesNotExist: except Event.DoesNotExist:
pass pass
except (KeyError, ValueError): except (KeyError, ValueError):
pass pass
if not batch_is_empty:
try:
batch.execute()
except Exception as e: except Exception as e:
logger.error(f"Error executing batch request: {e}") logger.error(f"Failed to update GCal event: {e}")
_invalidate_service_on_error(e)
return len(events_to_create_django_id), len(events_to_delete_google_id) return len(events_to_create_django_id), len(events_to_delete_google_id)

View File

@@ -0,0 +1,80 @@
"""
One-time OAuth2 setup command for Google Calendar integration.
Run this locally (not in Docker) to authorize the app and generate
the credentials token file (calendarCredentials.dat).
Usage:
python manage.py gcal_setup --client-secrets /path/to/client_secret_*.json
The command opens a browser for the OAuth2 consent flow, then saves
the access+refresh token to the path configured in settings.GCAL_COUPLING['credentials_file'].
Copy that file to the server afterwards.
"""
import os
import pickle
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
SCOPES = ["https://www.googleapis.com/auth/calendar"]
class Command(BaseCommand):
help = (
"Perform one-time OAuth2 authorization for Google Calendar. "
"Run locally, then copy the resulting credentials file to the server."
)
def add_arguments(self, parser):
parser.add_argument(
"--client-secrets",
required=True,
metavar="FILE",
help="Path to the client_secret_*.json downloaded from Google Cloud Console",
)
def handle(self, *args, **options):
try:
from google_auth_oauthlib.flow import InstalledAppFlow
except ImportError:
raise CommandError(
"google-auth-oauthlib is not installed. "
"Run: pip install google-auth-oauthlib"
)
client_secrets = options["client_secrets"]
if not os.path.exists(client_secrets):
raise CommandError(f"Client secrets file not found: {client_secrets}")
credentials_file = settings.GCAL_COUPLING["credentials_file"]
self.stdout.write(
"Starting OAuth2 flow. A browser window will open for authorization."
)
self.stdout.write(
"Make sure you are authorizing with the Google account whose "
"calendar you want to use.\n"
)
flow = InstalledAppFlow.from_client_secrets_file(client_secrets, SCOPES)
creds = flow.run_local_server(port=0)
os.makedirs(os.path.dirname(credentials_file), exist_ok=True) if os.path.dirname(credentials_file) else None
with open(credentials_file, "wb") as token:
pickle.dump(creds, token)
self.stdout.write(
self.style.SUCCESS(
f"Credentials saved to: {credentials_file}\n"
f"Copy this file to the server at the same path inside the container."
)
)
self.stdout.write(
"\nTo copy to server (adjust paths as needed):\n"
f" docker cp {credentials_file} blechreiz:/app/calendarCredentials.dat\n"
"Or via scp:\n"
f" scp {credentials_file} core@server.fritz.box:/docker/blechreiz-website/calendarCredentials.dat"
)

View File

@@ -41,7 +41,7 @@ def event_post_save_handler(sender, instance, created, **kwargs):
Currently disabled - remove the early return to enable. Currently disabled - remove the early return to enable.
""" """
# Disabled - remove this return statement to enable auto-sync # Disabled - remove this return statement to enable auto-sync
return #return
event = instance event = instance
service = get_service_object() service = get_service_object()
@@ -73,7 +73,7 @@ def event_pre_delete_handler(sender, instance, **kwargs):
Currently disabled - remove the early return to enable. Currently disabled - remove the early return to enable.
""" """
# Disabled - remove this return statement to enable auto-sync # Disabled - remove this return statement to enable auto-sync
return #return
event = instance event = instance
service = get_service_object() service = get_service_object()
@@ -100,7 +100,7 @@ def participation_post_save_handler(sender, instance, **kwargs):
Currently disabled - remove the early return to enable. Currently disabled - remove the early return to enable.
""" """
# Disabled - remove this return statement to enable auto-sync # Disabled - remove this return statement to enable auto-sync
return #return
participation = instance participation = instance
service = get_service_object() service = get_service_object()