Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

EMFcamp changes #13

Merged
merged 29 commits into from
Sep 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
9df8b02
Make redis session store optional
wlcx May 10, 2024
a433c46
Replace utcnow use
wlcx May 10, 2024
58c49d6
Update all the deps
wlcx May 10, 2024
57b096f
Update faq for emf, add link to gh repo
wlcx May 13, 2024
74157c5
Add user admin helper
wlcx May 13, 2024
abdb5d7
Add unmoderated view on homepage
wlcx May 15, 2024
b7e54fd
Add prometheus metrics
wlcx May 15, 2024
bc1270b
Add all known states to metrics
wlcx May 15, 2024
2e29478
Add general infobeamer metrics
wlcx May 15, 2024
3254593
Unbreak all the stuff I broke
wlcx May 17, 2024
ddb3ca0
Fix moderation
wlcx May 17, 2024
d8a08f8
Basic updates to python 3.11
mstratford May 18, 2024
3dd9aee
Update syncer to new infobeamer data
mstratford May 18, 2024
923dbf4
Update systemd timer service to use the python syncer.py
mstratford May 18, 2024
ac5407a
syncer needs the config too :)
mstratford May 18, 2024
3fbaab7
Add readme for user creation for systemd service
mstratford May 18, 2024
cb5dfa4
Fix state display and show the user's selected start/end time during …
mstratford May 18, 2024
7356d01
Make sure start and ends are integers like the time checking code exp…
mstratford May 18, 2024
d20071a
Merge branch 'mstratford/syncing'
mstratford May 18, 2024
29a88da
Do .getLocaleString on the date, not the number :P
mstratford May 18, 2024
23d61a8
Account for fade durations for videos
mstratford May 25, 2024
f8adc65
Add more text to hopefully tell people to stop uploading loads of times.
mstratford May 31, 2024
f1e3460
Remove mqtt stuff
wlcx Jun 1, 2024
2ebf8d7
Nav is hidden on mobile, make that obvious
mstratford Jun 1, 2024
40ed277
Merge branch 'master' of https://github.com/emfcamp/infobeamer-conten…
mstratford Jun 1, 2024
b33984a
More text
mstratford Jun 1, 2024
e99785b
Add a space.
mstratford Jun 1, 2024
8552d21
Add review state
mstratford Jun 1, 2024
a4c0f6a
Open submission direct link in a new tab
wlcx Jun 3, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
env/
static/asset-*
settings.toml
__pycache__
1 change: 1 addition & 0 deletions .python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.11
19 changes: 19 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python Debugger: Current File",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"env": {
"SETTINGS": "./settings.toml",
"PYENV_VERSION": "3.11.9"
}
}
]
}
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ systemctl restart nginx
# adapt those settings
cp settings.example.toml settings.toml


# start via systemd
# make a user that can't logic with no home dir or shell.
useradd -M infobeamer-cms
usermod -L infobeamer-cms
usermod -s /bin/false infobeamer-cms
cp infobeamer-cms.service /etc/systemd/system/
cp infobeamer-cms-runperiodic.service /etc/systemd/system/
cp infobeamer-cms-runperiodic.timer /etc/systemd/system/
Expand Down
175 changes: 87 additions & 88 deletions frontend.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import os
from collections import defaultdict
import random
import shutil
import socket
import tempfile
from datetime import datetime
from secrets import token_hex
from typing import Iterable

import iso8601
import requests
from prometheus_client.metrics_core import Metric
from prometheus_client.core import GaugeMetricFamily, REGISTRY
from prometheus_client.registry import Collector
from flask import (
Flask,
abort,
Expand All @@ -20,25 +21,32 @@
url_for,
)
from flask_github import GitHub
from prometheus_client import generate_latest
from werkzeug.middleware.proxy_fix import ProxyFix

from conf import CONFIG
from helper import (
State,
admin_required,
cached_asset_name,
error,
get_all_live_assets,
get_asset,
get_assets,
get_assets_awaiting_moderation,
get_random,
get_user_assets,
login_disabled_for_user,
mk_sig,
user_is_admin,
)
from ib_hosted import get_scoped_api_key, ib, update_asset_userdata
from redis_session import RedisSessionStore
from voc_mqtt import send_message

app = Flask(
__name__,
static_folder=CONFIG.get('STATIC_PATH', 'static'),
)
app.secret_key = CONFIG.get('URL_KEY')
app.wsgi_app = ProxyFix(app.wsgi_app)

for copy_key in (
Expand All @@ -54,35 +62,55 @@
socket.setdefaulttimeout(3) # for mqtt


github = GitHub(app)
app.session_interface = RedisSessionStore()


def cached_asset_name(asset):
asset_id = asset["id"]
filename = "asset-{}.{}".format(
asset_id,
"jpg" if asset["filetype"] == "image" else "mp4",
)
cache_name = os.path.join(CONFIG.get('STATIC_PATH', 'static'), filename)
class SubmissionsCollector(Collector):
def collect(self) -> Iterable[Metric]:
counts = defaultdict(int)
for a in get_assets():
counts[a.state] += 1
g = GaugeMetricFamily("submissions", "Counts of content submissions", labels=["state"])
for state in State:
# Add any states that we know about but have 0 assets in them
if state.value not in counts.keys():
counts[state.value] = 0
for s, c in counts.items():
g.add_metric([s], c)
yield g

class InfobeamerCollector(Collector):
"""Prometheus collector for general infobeamer metrics available from the hosted API."""
last_got = 0
devices = []
def collect(self) -> Iterable[Metric]:
if (self.last_got + 10) < datetime.now().timestamp():
self.devices = ib.get("device/list")["devices"]
self.last_got = datetime.now().timestamp()
yield GaugeMetricFamily("devices", "Infobeamer devices", len(self.devices))
yield GaugeMetricFamily("devices_online", "Infobeamer devices online", len([d for d in self.devices if d["is_online"]]))
m = GaugeMetricFamily("device_model", "Infobeamer device models", labels=["model"])
counts = defaultdict(int)
for d in self.devices:
if d.get("hw"):
counts[d["hw"]["model"]] += 1
else:
counts["unknown"] += 1
for model, count in counts.items():
m.add_metric([model], count)
yield m


REGISTRY.register(SubmissionsCollector())
REGISTRY.register(InfobeamerCollector())

if not os.path.exists(cache_name):
app.logger.info(f"fetching {asset_id} to {cache_name}")
dl = ib.get(f"asset/{asset_id}/download")
r = requests.get(dl["download_url"], stream=True, timeout=5)
r.raise_for_status()
with tempfile.NamedTemporaryFile(delete=False) as f:
shutil.copyfileobj(r.raw, f)
shutil.move(f.name, cache_name)
os.chmod(cache_name, 0o664)
del r
github = GitHub(app)

return filename
if CONFIG.get("REDIS_HOST"):
app.session_interface = RedisSessionStore(host=CONFIG.get("REDIS_HOST"))


@app.before_request
def before_request():
user = session.get("gh_login")
g.user_is_admin = user_is_admin(user)

if login_disabled_for_user(user):
g.user = None
Expand Down Expand Up @@ -111,8 +139,6 @@ def authorized(access_token):
if login_disabled_for_user(github_user["login"]):
return render_template("time_error.jinja")

# app.logger.debug(github_user)

age = datetime.utcnow() - iso8601.parse_date(github_user["created_at"]).replace(
tzinfo=None
)
Expand Down Expand Up @@ -192,20 +218,25 @@ def content_list():
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))
assets = get_user_assets()
assets = [a._asdict() for a in get_user_assets()]
random.shuffle(assets)
return jsonify(
assets=assets,
)

@app.route("/content/awaiting_moderation")
@admin_required
def content_awaiting_moderation():
return jsonify([a.to_dict(mod_data=True) for a in get_assets_awaiting_moderation()])


@app.route("/content/upload", methods=["POST"])
def content_upload():
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))

if g.user.lower() not in CONFIG.get("ADMIN_USERS", set()):
if not g.user_is_admin:
max_uploads = CONFIG["MAX_UPLOADS"]
if len(get_user_assets()) >= max_uploads:
return error("You have reached your upload limit")
Expand Down Expand Up @@ -281,8 +312,8 @@ def content_request_review(asset_id):
if "state" in asset["userdata"]: # not in new state?
return error("Cannot review")

if g.user.lower() in CONFIG.get("ADMIN_USERS", set()):
update_asset_userdata(asset, state="confirmed")
if g.user_is_admin:
update_asset_userdata(asset, state=State.CONFIRMED)
app.logger.warn(
"auto-confirming {} because it was uploaded by admin {}".format(
asset["id"], g.user
Expand All @@ -291,18 +322,7 @@ def content_request_review(asset_id):
return jsonify(ok=True)

moderation_url = url_for(
"content_moderate", asset_id=asset_id, sig=mk_sig(asset_id), _external=True
)

assert (
send_message(
"{asset} uploaded by {user}. Check it at {url}".format(
user=g.user,
asset=asset["filetype"].capitalize(),
url=moderation_url,
),
)[0]
== 0
"content_moderate", asset_id=asset_id, _external=True
)

app.logger.info("moderation url for {} is {}".format(asset["id"], moderation_url))
Expand All @@ -311,62 +331,46 @@ def content_request_review(asset_id):
return jsonify(ok=True)


@app.route("/content/moderate/<int:asset_id>-<sig>")
def content_moderate(asset_id, sig):
if sig != mk_sig(asset_id):
app.logger.info(
f"request to moderate asset {asset_id} rejected because of missing or wrong signature"
)
abort(404)
@app.route("/content/moderate/<int:asset_id>")
@admin_required
def content_moderate(asset_id):
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))
elif g.user.lower() not in CONFIG.get("ADMIN_USERS", set()):
elif not g.user_is_admin:
app.logger.warning(f"request to moderate {asset_id} by non-admin user {g.user}")
abort(401)

try:
asset = ib.get(f"asset/{asset_id}")
asset = get_asset(asset_id)
except Exception:
app.logger.info(
f"request to moderate asset {asset_id} failed because asset does not exist"
)
abort(404)

state = asset["userdata"].get("state", "new")
if state == "deleted":
if asset.state == State.DELETED:
app.logger.info(
f"request to moderate asset {asset_id} failed because asset was deleted by user"
)
abort(404)

return render_template(
"moderate.jinja",
asset={
"id": asset["id"],
"user": asset["userdata"]["user"],
"filetype": asset["filetype"],
"url": url_for("static", filename=cached_asset_name(asset)),
"state": state,
},
sig=mk_sig(asset_id),
asset=asset.to_dict(mod_data=True)
)


@app.route(
"/content/moderate/<int:asset_id>-<sig>/<any(confirm,reject):result>",
"/content/moderate/<int:asset_id>/<any(confirm,reject):result>",
methods=["POST"],
)
def content_moderate_result(asset_id, sig, result):
if sig != mk_sig(asset_id):
app.logger.info(
f"request to moderate asset {asset_id} rejected because of missing or wrong signature"
)
abort(404)
@admin_required
def content_moderate_result(asset_id, result):
if not g.user:
session["redirect_after_login"] = request.url
return redirect(url_for("login"))
elif g.user.lower() not in CONFIG.get("ADMIN_USERS", set()):
elif not g.user_is_admin:
app.logger.warning(f"request to moderate {asset_id} by non-admin user {g.user}")
abort(401)

Expand All @@ -379,18 +383,18 @@ def content_moderate_result(asset_id, sig, result):
abort(404)

state = asset["userdata"].get("state", "new")
if state == "deleted":
if state == State.DELETED:
app.logger.info(
f"request to moderate asset {asset_id} failed because asset was deleted by user"
)
abort(404)

if result == "confirm":
app.logger.info("Asset {} was confirmed".format(asset["id"]))
update_asset_userdata(asset, state="confirmed")
update_asset_userdata(asset, state=State.CONFIRMED, moderated_by=g.user)
else:
app.logger.info("Asset {} was rejected".format(asset["id"]))
update_asset_userdata(asset, state="rejected")
update_asset_userdata(asset, state=State.REJECTED, moderated_by=g.user)

return jsonify(ok=True)

Expand Down Expand Up @@ -436,7 +440,7 @@ def content_delete(asset_id):
return error("Cannot delete")

try:
update_asset_userdata(asset, state="deleted")
update_asset_userdata(asset, state=State.DELETED)
except Exception as e:
app.logger.error(f"content_delete({asset_id}) {repr(e)}")
return error("Cannot delete")
Expand All @@ -449,21 +453,16 @@ def content_live():
no_time_filter = request.values.get("all")
assets = get_all_live_assets(no_time_filter=no_time_filter)
random.shuffle(assets)
resp = jsonify(
assets=[
{
"user": asset["userdata"]["user"],
"filetype": asset["filetype"],
"thumb": asset["thumb"],
"url": url_for("static", filename=cached_asset_name(asset)),
}
for asset in assets
]
)
resp = jsonify([a.to_dict(mod_data=g.user_is_admin) for a in assets])
resp.headers["Cache-Control"] = "public, max-age=30"
return resp


@app.route("/metrics")
def metrics():
return generate_latest()


# @app.route("/content/last")
# def content_last():
# assets = get_all_live_assets()
Expand Down
Loading
Loading