Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion bbblb/cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ async def main(ctx, obj, config_file, config, verbose):

config_.populate()
config_.set(WORKER=False)
ctx.obj = await bootstrap(config_, autostart=False, logging=False)
ctx.obj = await bootstrap(config_, logging=False)


# Auto-load all modules in the bbblb.cli package to load all commands.
Expand Down
35 changes: 35 additions & 0 deletions bbblb/migrations/versions/cbee8afa1ca2_meeting_stats_ts_index.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
"""MeetingStats ts index

Revision ID: cbee8afa1ca2
Revises: 988e3ce2a20e
Create Date: 2026-04-10 11:28:44.741959

"""

from typing import Sequence, Union

from alembic import op


# revision identifiers, used by Alembic.
revision: str = "cbee8afa1ca2"
down_revision: Union[str, Sequence[str], None] = "988e3ce2a20e"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
with op.batch_alter_table("meeting_stats", schema=None) as batch_op:
batch_op.create_index(
batch_op.f("ix_meeting_stats_ts"),
["ts"],
unique=False,
postgresql_using="brin",
)


def downgrade() -> None:
"""Downgrade schema."""
with op.batch_alter_table("meeting_stats", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_meeting_stats_ts"), postgresql_using="brin")
47 changes: 47 additions & 0 deletions bbblb/migrations/versions/faacfea3b608_protected_recordings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
"""Protected recordings

Revision ID: faacfea3b608
Revises: cbee8afa1ca2
Create Date: 2026-04-10 14:55:08.788193

"""

from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = "faacfea3b608"
down_revision: Union[str, Sequence[str], None] = "cbee8afa1ca2"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Upgrade schema."""
op.create_table(
"view_tickets",
sa.Column("uuid", sa.Uuid(), nullable=False),
sa.Column("recording_fk", sa.Integer(), nullable=False),
sa.Column("expire", sa.DateTime(), nullable=False),
sa.Column("consumed", sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(
["recording_fk"],
["recordings.id"],
name=op.f("fk_view_tickets_recording_fk_recordings"),
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("uuid", name=op.f("pk_view_tickets")),
)
with op.batch_alter_table("recordings", schema=None) as batch_op:
batch_op.add_column(sa.Column("protected", sa.Boolean(), nullable=False))


def downgrade() -> None:
"""Downgrade schema."""
with op.batch_alter_table("recordings", schema=None) as batch_op:
batch_op.drop_column("protected")

op.drop_table("view_tickets")
94 changes: 83 additions & 11 deletions bbblb/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import enum
import logging
import typing
import uuid
from uuid import UUID

import datetime
Expand Down Expand Up @@ -148,6 +149,29 @@ def process_result_value(self, value: int | None, dialect):
return None


class TZDateTime(TypeDecorator):
"""A DateTime column type that stores timestamps as UTC without
timezone, but returns `datetime` with a timezone.

This is a workaround for sqlalchemy+sqlite3 which ignores DateTime(timezone)
setting and returns `datetime` objects without a timezone."""

impl = DateTime
cache_ok = True

def process_bind_param(self, value, dialect):
if value is not None:
if not value.tzinfo or value.tzinfo.utcoffset(value) is None:
raise TypeError("tzinfo is required")
value = value.astimezone(datetime.timezone.utc).replace(tzinfo=None)
return value

def process_result_value(self, value, dialect):
if value is not None and value.tzinfo is None:
value = value.replace(tzinfo=datetime.timezone.utc)
return value


class ORMMixin:
@classmethod
def select(cls, *a, **filter):
Expand Down Expand Up @@ -205,7 +229,7 @@ class Lock(Base):
name: Mapped[str] = mapped_column(primary_key=True)
owner: Mapped[str] = mapped_column(nullable=False)
ts: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), insert_default=utcnow, onupdate=utcnow, nullable=False
TZDateTime(), insert_default=utcnow, onupdate=utcnow, nullable=False
)

def __str__(self):
Expand Down Expand Up @@ -426,10 +450,10 @@ class Meeting(Base):
server: Mapped["Server"] = relationship(lazy=False)

created: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), insert_default=utcnow, nullable=False
TZDateTime(), insert_default=utcnow, nullable=False
)
modified: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), insert_default=utcnow, onupdate=utcnow, nullable=False
TZDateTime(), insert_default=utcnow, onupdate=utcnow, nullable=False
)

def __str__(self):
Expand All @@ -449,7 +473,7 @@ class MeetingStats(Base):
#: entries created during the same poll interval, so we can group
#: over the timestamp later.
ts: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), insert_default=utcnow, nullable=False
TZDateTime(), insert_default=utcnow, nullable=False
)
uuid: Mapped[UUID] = mapped_column(nullable=False)
meeting_id: Mapped[str] = mapped_column(nullable=False)
Expand Down Expand Up @@ -482,7 +506,7 @@ class Callback(Base):
#: Original callback URL (optional)
forward: Mapped[str] = mapped_column(nullable=True)
created: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), insert_default=utcnow, nullable=False
TZDateTime(), insert_default=utcnow, nullable=False
)


Expand Down Expand Up @@ -510,13 +534,10 @@ class Recording(Base):
)

# Non-essential but nice to have attributes
started: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), nullable=False
)
ended: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), nullable=False
)
started: Mapped[datetime.datetime] = mapped_column(TZDateTime(), nullable=False)
ended: Mapped[datetime.datetime] = mapped_column(TZDateTime(), nullable=False)
participants: Mapped[int] = mapped_column(nullable=False, default=0)
protected: Mapped[bool] = mapped_column(nullable=False, default=False)

@validates("meta")
def validate_meta(self, key, meta):
Expand Down Expand Up @@ -550,6 +571,57 @@ class PlaybackFormat(Base):
xml: Mapped[str] = mapped_column(nullable=False)


class ViewTicket(Base):
__tablename__ = "view_tickets"

uuid: Mapped[UUID] = mapped_column(primary_key=True)
recording_fk: Mapped[int] = mapped_column(
ForeignKey("recordings.id", ondelete="CASCADE"), nullable=False
)
recording: Mapped[Recording] = relationship(lazy=False)
expire: Mapped[datetime.datetime] = mapped_column(TZDateTime(), nullable=False)
consumed: Mapped[bool] = mapped_column(nullable=False, default=False)

def is_expired(self):
return utcnow() > self.expire

@classmethod
def create(cls, recording: Recording, lifetime: datetime.timedelta) -> "ViewTicket":
return cls(
uuid=uuid.uuid4(),
recording=recording,
expire=utcnow() + lifetime,
)

@classmethod
def delete_expired(cls):
return cls.delete(cls.expire < utcnow())

async def consume(self, session: AsyncSession, commit=False) -> bool:
"""Atomically mark a valid ticket as consumed.

Returns True if the ticket existed, was not expired and not already consumed.
"""
result = await session.execute(
update(ViewTicket)
.where(ViewTicket.uuid == self.uuid)
.where(ViewTicket.expire > utcnow())
.where(ViewTicket.consumed.is_(False))
.values(consumed=True)
.returning(ViewTicket.uuid)
)
row = result.fetchone()
if row is None:
return False
if commit:
await session.commit()
self.consumed = True
return True

def __str__(self):
return f"ViewTicket(rec={self.recording.record_id} ticket={self.uuid})"


# class Task(Base):
# __tablename__ = "tasks"
# id: Mapped[int] = mapped_column(primary_key=True)
Expand Down
7 changes: 1 addition & 6 deletions bbblb/services/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,9 +285,7 @@ def configure_logging(config: BBBLBConfig):
)


async def bootstrap(
config: BBBLBConfig, autostart=True, logging=True
) -> ServiceRegistry:
async def bootstrap(config: BBBLBConfig, logging=True) -> ServiceRegistry:
import bbblb.services.poller
import bbblb.services.recording
import bbblb.services.analytics
Expand Down Expand Up @@ -317,9 +315,6 @@ def watch_debug_level(name, old, new):
ctx.register(bbblb.services.analytics.AnalyticsHandler)
ctx.register(bbblb.services.tenants.TenantCache)

if autostart:
await ctx.start_all()

LOG.debug("Bootstrapping completed!")

return ctx
3 changes: 3 additions & 0 deletions bbblb/services/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ async def on_start(self):

current, target = await check_migration_state(self._db_url)
if current != target and self._migrate:
LOG.info(
f"Migrating database from schema revision {current!r} to {target!r} ..."
)
await migrate_db(self._db_url)
elif current != target:
LOG.error(f"Expected schema revision {target!r} but found {current!r}.")
Expand Down
8 changes: 7 additions & 1 deletion bbblb/services/recording.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,11 @@ class RecordingImportError(RuntimeError):
pass


def playback_to_xml(config: BBBLBConfig, playback: model.PlaybackFormat) -> Element:
def playback_to_xml(
config: BBBLBConfig,
playback: model.PlaybackFormat,
ticket_prefix: str | None = None,
) -> Element:
orig = lxml.etree.fromstring(playback.xml)
playback_domain = config.PLAYBACK_DOMAIN.format(
DOMAIN=config.DOMAIN, REALM=playback.recording.tenant.realm
Expand Down Expand Up @@ -80,6 +84,8 @@ def playback_to_xml(config: BBBLBConfig, playback: model.PlaybackFormat) -> Elem
url = url._replace(scheme="https", netloc=playback_domain)
if url.path.startswith(f"/{playback.format}"):
url = url._replace(path=f"/playback{url.path}")
if ticket_prefix and url.path.startswith("/playback/"):
url = url._replace(path=ticket_prefix + url.path)
node.text = url.geturl()

return result
Expand Down
26 changes: 26 additions & 0 deletions bbblb/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,11 +210,37 @@ class BBBLBConfig(BaseConfig):
then BBBLB will import new recordings as 'unpublished' regardless of
their original state. """

PROTECTED_RECORDINGS: bool = False
""" If enabled, BBBLB will support *protected recordings* similar to
the experimental and unofficial API extention implemented by
Scalelite and Greenlight.

For protected recordings, the getRecordings API will replace
recording links with a one-time ticket that allow a single user to
watch the protected recording for a limited amount of time.

Warning: This feature needs additional configuration if recordings
are not served through BBBLB, and does not prevent downloads. Read
the documentation to understand requirements and limitations of this
feature."""

PROTECTED_RECORDINGS_TIMEOUT: int = 360
""" Number of minutes a protected recording can be watched wth a
ticket after it has been issued. """

PLAYBACK_DOMAIN: str = "{DOMAIN}"
""" Domain where recordings are hostet. The wildcards {DOMAIN} or {REALM}
can be used to refer to the global DOMAIN config, or the realm of the
current tenant. """

PLAYBACK_PLAYER_ROOT: Path | None = None
""" Absolute path to a copy of the bbb-playback presentation player.
You can leave this blank if you serve the bbb-playback assets via
a front-end webserver or CDN.

Defaults to `{PATH_DATA}/htdocs/playback/presentation/2.3/`
"""

POLL_INTERVAL: int = 30
""" Poll interval in seconds for the background server health and meeting
checker. This also defines the timeout for each individual poll, and
Expand Down
17 changes: 17 additions & 0 deletions bbblb/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Copyright (C) 2025, 2026 Marcel Hellkamp
# SPDX-License-Identifier: AGPL-3.0-or-later

import hashlib
import hmac
import typing
import re

Expand Down Expand Up @@ -64,3 +66,18 @@ def checked_cast(type_: type[T], value: typing.Any) -> T:
if isinstance(value, type_):
return value
raise TypeError(f"Expected {type_} but got {type(value)}")


def hmac_sign(payload: str, secret: str) -> str:
sig = hmac.digest(secret.encode("UTF8"), payload.encode("UTF8"), hashlib.sha256)
return f"{sig.hex()}:{payload}"


def hmac_verify(untrtusted: str, secret: str) -> str | None:
sig, sep, payload = untrtusted.partition(":")
if sig and sep:
check = hmac.digest(
secret.encode("UTF8"), payload.encode("UTF8"), hashlib.sha256
)
if hmac.compare_digest(check, bytes.fromhex(sig)):
return payload
Loading