Skip to content

Commit

Permalink
Merge pull request #13 from emfcamp/master
Browse files Browse the repository at this point in the history
EMFcamp changes
  • Loading branch information
Kunsi authored Sep 30, 2024
2 parents fb735b9 + a4c0f6a commit 5a9f445
Show file tree
Hide file tree
Showing 19 changed files with 336 additions and 255 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
env/
static/asset-*
settings.toml
__pycache__
1 change: 1 addition & 0 deletions .python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.11
19 changes: 19 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python Debugger: Current File",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"env": {
"SETTINGS": "./settings.toml",
"PYENV_VERSION": "3.11.9"
}
}
]
}
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ systemctl restart nginx
# adapt those settings
cp settings.example.toml settings.toml
# start via systemd
# make a user that can't logic with no home dir or shell.
useradd -M infobeamer-cms
usermod -L infobeamer-cms
usermod -s /bin/false infobeamer-cms
cp infobeamer-cms.service /etc/systemd/system/
cp infobeamer-cms-runperiodic.service /etc/systemd/system/
cp infobeamer-cms-runperiodic.timer /etc/systemd/system/
Expand Down
175 changes: 87 additions & 88 deletions frontend.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import os
from collections import defaultdict
import random
import shutil
import socket
import tempfile
from datetime import datetime
from secrets import token_hex
from typing import Iterable

import iso8601
import requests
from prometheus_client.metrics_core import Metric
from prometheus_client.core import GaugeMetricFamily, REGISTRY
from prometheus_client.registry import Collector
from flask import (
Flask,
abort,
Expand All @@ -20,25 +21,32 @@
url_for,
)
from flask_github import GitHub
from prometheus_client import generate_latest
from werkzeug.middleware.proxy_fix import ProxyFix

from conf import CONFIG
from helper import (
State,
admin_required,
cached_asset_name,
error,
get_all_live_assets,
get_asset,
get_assets,
get_assets_awaiting_moderation,
get_random,
get_user_assets,
login_disabled_for_user,
mk_sig,
user_is_admin,
)
from ib_hosted import get_scoped_api_key, ib, update_asset_userdata
from redis_session import RedisSessionStore
from voc_mqtt import send_message

app = Flask(
__name__,
static_folder=CONFIG.get('STATIC_PATH', 'static'),
)
app.secret_key = CONFIG.get('URL_KEY')
app.wsgi_app = ProxyFix(app.wsgi_app)

for copy_key in (
Expand All @@ -54,35 +62,55 @@
socket.setdefaulttimeout(3) # for mqtt


github = GitHub(app)
app.session_interface = RedisSessionStore()


def cached_asset_name(asset):
asset_id = asset["id"]
filename = "asset-{}.{}".format(
asset_id,
"jpg" if asset["filetype"] == "image" else "mp4",
)
cache_name = os.path.join(CONFIG.get('STATIC_PATH', 'static'), filename)
class SubmissionsCollector(Collector):
def collect(self) -> Iterable[Metric]:
counts = defaultdict(int)
for a in get_assets():
counts[a.state] += 1
g = GaugeMetricFamily("submissions", "Counts of content submissions", labels=["state"])
for state in State:
# Add any states that we know about but have 0 assets in them
if state.value not in counts.keys():
counts[state.value] = 0
for s, c in counts.items():
g.add_metric([s], c)
yield g

class InfobeamerCollector(Collector):
"""Prometheus collector for general infobeamer metrics available from the hosted API."""
last_got = 0
devices = []
def collect(self) -> Iterable[Metric]:
if (self.last_got + 10) < datetime.now().timestamp():
self.devices = ib.get("device/list")["devices"]
self.last_got = datetime.now().timestamp()
yield GaugeMetricFamily("devices", "Infobeamer devices", len(self.devices))
yield GaugeMetricFamily("devices_online", "Infobeamer devices online", len([d for d in self.devices if d["is_online"]]))
m = GaugeMetricFamily("device_model", "Infobeamer device models", labels=["model"])
counts = defaultdict(int)
for d in self.devices:
if d.get("hw"):
counts[d["hw"]["model"]] += 1
else:
counts["unknown"] += 1
for model, count in counts.items():
m.add_metric([model], count)
yield m


REGISTRY.register(SubmissionsCollector())
REGISTRY.register(InfobeamerCollector())

if not os.path.exists(cache_name):
app.logger.info(f"fetching {asset_id} to {cache_name}")
dl = ib.get(f"asset/{asset_id}/download")
r = requests.get(dl["download_url"], stream=True, timeout=5)
r.raise_for_status()
with tempfile.NamedTemporaryFile(delete=False) as f:
shutil.copyfileobj(r.raw, f)
shutil.move(f.name, cache_name)
os.chmod(cache_name, 0o664)
del r
github = GitHub(app)

return filename
if CONFIG.get("REDIS_HOST"):
app.session_interface = RedisSessionStore(host=CONFIG.get("REDIS_HOST"))


@app.before_request
def before_request():
user = session.get("gh_login")
g.user_is_admin = user_is_admin(user)

if login_disabled_for_user(user):
g.user = None
Expand Down Expand Up @@ -111,8 +139,6 @@ def authorized(access_token):
if login_disabled_for_user(github_user["login"]):
return render_template("time_error.jinja")

# app.logger.debug(github_user)

age = datetime.utcnow() - iso8601.parse_date(github_user["created_at"]).replace(
tzinfo=None
)
Expand Down Expand Up @@ -192,20 +218,25 @@ def content_list():
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))
assets = get_user_assets()
assets = [a._asdict() for a in get_user_assets()]
random.shuffle(assets)
return jsonify(
assets=assets,
)

@app.route("/content/awaiting_moderation")
@admin_required
def content_awaiting_moderation():
return jsonify([a.to_dict(mod_data=True) for a in get_assets_awaiting_moderation()])


@app.route("/content/upload", methods=["POST"])
def content_upload():
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))

if g.user.lower() not in CONFIG.get("ADMIN_USERS", set()):
if not g.user_is_admin:
max_uploads = CONFIG["MAX_UPLOADS"]
if len(get_user_assets()) >= max_uploads:
return error("You have reached your upload limit")
Expand Down Expand Up @@ -281,8 +312,8 @@ def content_request_review(asset_id):
if "state" in asset["userdata"]: # not in new state?
return error("Cannot review")

if g.user.lower() in CONFIG.get("ADMIN_USERS", set()):
update_asset_userdata(asset, state="confirmed")
if g.user_is_admin:
update_asset_userdata(asset, state=State.CONFIRMED)
app.logger.warn(
"auto-confirming {} because it was uploaded by admin {}".format(
asset["id"], g.user
Expand All @@ -291,18 +322,7 @@ def content_request_review(asset_id):
return jsonify(ok=True)

moderation_url = url_for(
"content_moderate", asset_id=asset_id, sig=mk_sig(asset_id), _external=True
)

assert (
send_message(
"{asset} uploaded by {user}. Check it at {url}".format(
user=g.user,
asset=asset["filetype"].capitalize(),
url=moderation_url,
),
)[0]
== 0
"content_moderate", asset_id=asset_id, _external=True
)

app.logger.info("moderation url for {} is {}".format(asset["id"], moderation_url))
Expand All @@ -311,62 +331,46 @@ def content_request_review(asset_id):
return jsonify(ok=True)


@app.route("/content/moderate/<int:asset_id>-<sig>")
def content_moderate(asset_id, sig):
if sig != mk_sig(asset_id):
app.logger.info(
f"request to moderate asset {asset_id} rejected because of missing or wrong signature"
)
abort(404)
@app.route("/content/moderate/<int:asset_id>")
@admin_required
def content_moderate(asset_id):
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))
elif g.user.lower() not in CONFIG.get("ADMIN_USERS", set()):
elif not g.user_is_admin:
app.logger.warning(f"request to moderate {asset_id} by non-admin user {g.user}")
abort(401)

try:
asset = ib.get(f"asset/{asset_id}")
asset = get_asset(asset_id)
except Exception:
app.logger.info(
f"request to moderate asset {asset_id} failed because asset does not exist"
)
abort(404)

state = asset["userdata"].get("state", "new")
if state == "deleted":
if asset.state == State.DELETED:
app.logger.info(
f"request to moderate asset {asset_id} failed because asset was deleted by user"
)
abort(404)

return render_template(
"moderate.jinja",
asset={
"id": asset["id"],
"user": asset["userdata"]["user"],
"filetype": asset["filetype"],
"url": url_for("static", filename=cached_asset_name(asset)),
"state": state,
},
sig=mk_sig(asset_id),
asset=asset.to_dict(mod_data=True)
)


@app.route(
"/content/moderate/<int:asset_id>-<sig>/<any(confirm,reject):result>",
"/content/moderate/<int:asset_id>/<any(confirm,reject):result>",
methods=["POST"],
)
def content_moderate_result(asset_id, sig, result):
if sig != mk_sig(asset_id):
app.logger.info(
f"request to moderate asset {asset_id} rejected because of missing or wrong signature"
)
abort(404)
@admin_required
def content_moderate_result(asset_id, result):
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))
elif g.user.lower() not in CONFIG.get("ADMIN_USERS", set()):
elif not g.user_is_admin:
app.logger.warning(f"request to moderate {asset_id} by non-admin user {g.user}")
abort(401)

Expand All @@ -379,18 +383,18 @@ def content_moderate_result(asset_id, sig, result):
abort(404)

state = asset["userdata"].get("state", "new")
if state == "deleted":
if state == State.DELETED:
app.logger.info(
f"request to moderate asset {asset_id} failed because asset was deleted by user"
)
abort(404)

if result == "confirm":
app.logger.info("Asset {} was confirmed".format(asset["id"]))
update_asset_userdata(asset, state="confirmed")
update_asset_userdata(asset, state=State.CONFIRMED, moderated_by=g.user)
else:
app.logger.info("Asset {} was rejected".format(asset["id"]))
update_asset_userdata(asset, state="rejected")
update_asset_userdata(asset, state=State.REJECTED, moderated_by=g.user)

return jsonify(ok=True)

Expand Down Expand Up @@ -436,7 +440,7 @@ def content_delete(asset_id):
return error("Cannot delete")

try:
update_asset_userdata(asset, state="deleted")
update_asset_userdata(asset, state=State.DELETED)
except Exception as e:
app.logger.error(f"content_delete({asset_id}) {repr(e)}")
return error("Cannot delete")
Expand All @@ -449,21 +453,16 @@ def content_live():
no_time_filter = request.values.get("all")
assets = get_all_live_assets(no_time_filter=no_time_filter)
random.shuffle(assets)
resp = jsonify(
assets=[
{
"user": asset["userdata"]["user"],
"filetype": asset["filetype"],
"thumb": asset["thumb"],
"url": url_for("static", filename=cached_asset_name(asset)),
}
for asset in assets
]
)
resp = jsonify([a.to_dict(mod_data=g.user_is_admin) for a in assets])
resp.headers["Cache-Control"] = "public, max-age=30"
return resp


@app.route("/metrics")
def metrics():
return generate_latest()


# @app.route("/content/last")
# def content_last():
# assets = get_all_live_assets()
Expand Down
Loading

0 comments on commit 5a9f445

Please sign in to comment.