Skip to content

Commit

Permalink
fix code style
Browse files Browse the repository at this point in the history
  • Loading branch information
Kunsi committed Sep 30, 2024
1 parent 5a9f445 commit 0452623
Show file tree
Hide file tree
Showing 3 changed files with 71 additions and 49 deletions.
42 changes: 24 additions & 18 deletions frontend.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
from collections import defaultdict
import random
import socket
from collections import defaultdict
from datetime import datetime
from secrets import token_hex
from typing import Iterable

import iso8601
from prometheus_client.metrics_core import Metric
from prometheus_client.core import GaugeMetricFamily, REGISTRY
from prometheus_client.registry import Collector
from flask import (
Flask,
abort,
Expand All @@ -22,13 +19,15 @@
)
from flask_github import GitHub
from prometheus_client import generate_latest
from prometheus_client.core import REGISTRY, GaugeMetricFamily
from prometheus_client.metrics_core import Metric
from prometheus_client.registry import Collector
from werkzeug.middleware.proxy_fix import ProxyFix

from conf import CONFIG
from helper import (
State,
admin_required,
cached_asset_name,
error,
get_all_live_assets,
get_asset,
Expand All @@ -44,9 +43,9 @@

app = Flask(
__name__,
static_folder=CONFIG.get('STATIC_PATH', 'static'),
static_folder=CONFIG.get("STATIC_PATH", "static"),
)
app.secret_key = CONFIG.get('URL_KEY')
app.secret_key = CONFIG.get("URL_KEY")
app.wsgi_app = ProxyFix(app.wsgi_app)

for copy_key in (
Expand All @@ -67,7 +66,9 @@ def collect(self) -> Iterable[Metric]:
counts = defaultdict(int)
for a in get_assets():
counts[a.state] += 1
g = GaugeMetricFamily("submissions", "Counts of content submissions", labels=["state"])
g = GaugeMetricFamily(
"submissions", "Counts of content submissions", labels=["state"]
)
for state in State:
# Add any states that we know about but have 0 assets in them
if state.value not in counts.keys():
Expand All @@ -76,17 +77,26 @@ def collect(self) -> Iterable[Metric]:
g.add_metric([s], c)
yield g


class InfobeamerCollector(Collector):
"""Prometheus collector for general infobeamer metrics available from the hosted API."""

last_got = 0
devices = []

def collect(self) -> Iterable[Metric]:
if (self.last_got + 10) < datetime.now().timestamp():
self.devices = ib.get("device/list")["devices"]
self.last_got = datetime.now().timestamp()
yield GaugeMetricFamily("devices", "Infobeamer devices", len(self.devices))
yield GaugeMetricFamily("devices_online", "Infobeamer devices online", len([d for d in self.devices if d["is_online"]]))
m = GaugeMetricFamily("device_model", "Infobeamer device models", labels=["model"])
yield GaugeMetricFamily(
"devices_online",
"Infobeamer devices online",
len([d for d in self.devices if d["is_online"]]),
)
m = GaugeMetricFamily(
"device_model", "Infobeamer device models", labels=["model"]
)
counts = defaultdict(int)
for d in self.devices:
if d.get("hw"):
Expand Down Expand Up @@ -224,6 +234,7 @@ def content_list():
assets=assets,
)


@app.route("/content/awaiting_moderation")
@admin_required
def content_awaiting_moderation():
Expand Down Expand Up @@ -321,13 +332,11 @@ def content_request_review(asset_id):
)
return jsonify(ok=True)

moderation_url = url_for(
"content_moderate", asset_id=asset_id, _external=True
)
moderation_url = url_for("content_moderate", asset_id=asset_id, _external=True)

app.logger.info("moderation url for {} is {}".format(asset["id"], moderation_url))

update_asset_userdata(asset, state="review")
update_asset_userdata(asset, state=State.REVIEW)
return jsonify(ok=True)


Expand Down Expand Up @@ -355,10 +364,7 @@ def content_moderate(asset_id):
)
abort(404)

return render_template(
"moderate.jinja",
asset=asset.to_dict(mod_data=True)
)
return render_template("moderate.jinja", asset=asset.to_dict(mod_data=True))


@app.route(
Expand Down
73 changes: 43 additions & 30 deletions helper.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import enum
import os
import random
from datetime import datetime
from functools import wraps
import shutil
import tempfile
from typing import Iterable, NamedTuple, Optional
from datetime import datetime
from functools import wraps
from typing import NamedTuple, Optional

from flask import abort, current_app, g, jsonify, url_for
import requests
from flask import abort, current_app, g, jsonify, url_for

from conf import CONFIG
from ib_hosted import ib
Expand All @@ -17,24 +17,29 @@
def error(msg):
return jsonify(error=msg), 400


def user_is_admin(user) -> bool:
return user is not None and user.lower() in CONFIG.get("ADMIN_USERS", set())


def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not g.user_is_admin:
abort(401)
return f(*args, **kwargs)

return decorated_function


class State(enum.StrEnum):
NEW = "new"
CONFIRMED = "confirmed"
REJECTED = "rejected"
DELETED = "deleted"
REVIEW = "review"


class Asset(NamedTuple):
id: str
filetype: str
Expand All @@ -53,18 +58,28 @@ def to_dict(self, mod_data=False):
"filetype": self.filetype,
"thumb": self.thumb,
"url": url_for("static", filename=cached_asset_name(self)),
} | ({
"moderate_url": url_for(
"content_moderate", asset_id=self.id, _external=True
),
"moderated_by": self.moderated_by,
"state": self.state,
"starts": self.starts,
"ends": self.ends
} if mod_data else {})
} | (
{
"moderate_url": url_for(
"content_moderate", asset_id=self.id, _external=True
),
"moderated_by": self.moderated_by,
"state": self.state,
"starts": self.starts,
"ends": self.ends,
}
if mod_data
else {}
)


def to_int(num):
return num if isinstance(num, int) else int(num) if (isinstance(num,str) and num.isdigit()) else None
return (
num
if isinstance(num, int)
else int(num) if (isinstance(num, str) and num.isdigit()) else None
)


def parse_asset(asset):
return Asset(
Expand All @@ -74,28 +89,29 @@ def parse_asset(asset):
user=asset["userdata"]["user"],
state=State(asset["userdata"].get("state", "new")),
starts=to_int(asset["userdata"].get("starts")),
ends=to_int(asset["userdata"].get("ends"))
ends=to_int(asset["userdata"].get("ends")),
)


def get_asset(id):
return parse_asset(ib.get(f"asset/{id}"))


def get_assets():
assets = ib.get("asset/list")["assets"]
return [ parse_asset(asset) for asset in assets if asset["userdata"].get("user") != None]

def get_user_assets():
return [
a for a in get_assets()
if a.user == g.user and a.state != State.DELETED
parse_asset(asset)
for asset in assets
if asset["userdata"].get("user") is not None
]


def get_user_assets():
return [a for a in get_assets() if a.user == g.user and a.state != State.DELETED]


def get_assets_awaiting_moderation():
return [
asset
for asset in get_assets()
if asset.state == State.NEW
]
return [asset for asset in get_assets() if asset.state == State.NEW]


def get_all_live_assets(no_time_filter=False):
Expand All @@ -106,10 +122,7 @@ def get_all_live_assets(no_time_filter=False):
if asset.state in (State.CONFIRMED,)
and (
no_time_filter
or (
(asset.starts or now) <= now
and (asset.ends or now) >= now
)
or ((asset.starts or now) <= now and (asset.ends or now) >= now)
)
]

Expand All @@ -132,7 +145,7 @@ def cached_asset_name(asset: Asset):
asset_id,
"jpg" if asset.filetype == "image" else "mp4",
)
cache_name = os.path.join(CONFIG.get('STATIC_PATH', 'static'), filename)
cache_name = os.path.join(CONFIG.get("STATIC_PATH", "static"), filename)

if not os.path.exists(cache_name):
current_app.logger.info(f"fetching {asset_id} to {cache_name}")
Expand Down
5 changes: 4 additions & 1 deletion syncer.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,10 @@ def asset_to_tiles(asset: Asset):
pages.append(
{
"auto_duration": SLIDE_TIME,
"duration": SLIDE_TIME - (FADE_TIME*2), # Because it seems like the fade time is exclusive of the 10 sec, so videos play for 11 secs.
"duration": SLIDE_TIME
- (
FADE_TIME * 2
), # Because it seems like the fade time is exclusive of the 10 sec, so videos play for 11 secs.
"interaction": {"key": ""},
"layout_id": -1, # Use first layout
"overlap": 0,
Expand Down

0 comments on commit 0452623

Please sign in to comment.