|
|
@ -2,6 +2,7 @@
|
|
|
|
import json
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import logging
|
|
|
|
import signal
|
|
|
|
import signal
|
|
|
|
|
|
|
|
import zoneinfo
|
|
|
|
from collections import defaultdict
|
|
|
|
from collections import defaultdict
|
|
|
|
from urllib.parse import urlparse
|
|
|
|
from urllib.parse import urlparse
|
|
|
|
|
|
|
|
|
|
|
@ -276,10 +277,13 @@ class EventsSync(SheetSync):
|
|
|
|
"category",
|
|
|
|
"category",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self, name, middleware, stop, dbmanager, reverse_sync=False, media_dir=None):
|
|
|
|
def __init__(self, name, middleware, stop, dbmanager, reverse_sync=False, media_dir=None, timezone=None, shifts=None):
|
|
|
|
super().__init__(name, middleware, stop, dbmanager, reverse_sync)
|
|
|
|
super().__init__(name, middleware, stop, dbmanager, reverse_sync)
|
|
|
|
self.media_dir = media_dir
|
|
|
|
self.media_dir = media_dir
|
|
|
|
self.media_downloads = None if media_dir is None else {}
|
|
|
|
self.media_downloads = None if media_dir is None else {}
|
|
|
|
|
|
|
|
self.timezone = timezone
|
|
|
|
|
|
|
|
self.shifts = shifts
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def observe_rows(self, rows):
|
|
|
|
def observe_rows(self, rows):
|
|
|
|
counts = defaultdict(lambda: 0)
|
|
|
|
counts = defaultdict(lambda: 0)
|
|
|
@ -413,7 +417,14 @@ class PlaylistsSync(SheetSync):
|
|
|
|
event_id: The id of the streamlog event to sync
|
|
|
|
event_id: The id of the streamlog event to sync
|
|
|
|
""",
|
|
|
|
""",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
def main(dbconnect, sync_configs, metrics_port=8005, backdoor_port=0, media_dir="."):
|
|
|
|
@argh.arg('--timezone', help="Local timezone for determining shift times")
|
|
|
|
|
|
|
|
@argh.arg('--shifts', type=json.loads, help="""
|
|
|
|
|
|
|
|
Shift definitions in JSON form.
|
|
|
|
|
|
|
|
Always present:
|
|
|
|
|
|
|
|
repeating: a list of repeating shifts. Each of these consist of a sequence of shift name, start hour and end hour. The start and end hours are in local time.
|
|
|
|
|
|
|
|
one_off: a list of non-repeating shifts. Each of these consist of a sequence of shift name, start and end. A start or end time can be a string repersenting timestamp or a URL or null. If it is a URL, the URL will be queried for a timestamp. If no timezone info is provided the timestamp will be assumed to be UTC. If the start time is None, then the start will be assumed to be the earliest possible datetime; if the end is None, it will be assumed to be the oldest possible datetime. If both the start and end are None, the shift will be ignored.
|
|
|
|
|
|
|
|
""")
|
|
|
|
|
|
|
|
def main(dbconnect, sync_configs, metrics_port=8005, backdoor_port=0, media_dir=".", shifts=None, timezone=None):
|
|
|
|
"""
|
|
|
|
"""
|
|
|
|
Sheet sync constantly scans a Google Sheets sheet and a database, copying inputs from the sheet
|
|
|
|
Sheet sync constantly scans a Google Sheets sheet and a database, copying inputs from the sheet
|
|
|
|
to the DB and outputs from the DB to the sheet.
|
|
|
|
to the DB and outputs from the DB to the sheet.
|
|
|
@ -508,6 +519,9 @@ def main(dbconnect, sync_configs, metrics_port=8005, backdoor_port=0, media_dir=
|
|
|
|
"archive": ArchiveSync,
|
|
|
|
"archive": ArchiveSync,
|
|
|
|
}[config["type"]]
|
|
|
|
}[config["type"]]
|
|
|
|
sync_class_kwargs = {}
|
|
|
|
sync_class_kwargs = {}
|
|
|
|
|
|
|
|
if config["type"] == "events":
|
|
|
|
|
|
|
|
sync_class_kwargs["timezone"] = zoneinfo.ZoneInfo(timezone)
|
|
|
|
|
|
|
|
sync_class_kwargs["shifts"] = shifts
|
|
|
|
if config["type"] == "events" and config.get("download_media", False):
|
|
|
|
if config["type"] == "events" and config.get("download_media", False):
|
|
|
|
sync_class_kwargs["media_dir"] = media_dir
|
|
|
|
sync_class_kwargs["media_dir"] = media_dir
|
|
|
|
sync = sync_class(config["name"], middleware, stop, dbmanager, reverse_sync, **sync_class_kwargs)
|
|
|
|
sync = sync_class(config["name"], middleware, stop, dbmanager, reverse_sync, **sync_class_kwargs)
|
|
|
|