Skip to content

Commit

Permalink
Merge pull request #181 from neutrons/autoreducer_last_message
Browse files Browse the repository at this point in the history
Show the last workflow RunStatus that a autoreducer node has sent
  • Loading branch information
rosswhitfield authored Sep 17, 2024
2 parents 587720b + 2118ee0 commit 154c713
Show file tree
Hide file tree
Showing 5 changed files with 70 additions and 15 deletions.
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ services:
CONFIG_FILE: ./tests/configuration/post_process_consumer.conf
hostname: autoreducer
healthcheck:
test: ["CMD", "pgrep", "python"]
test: ["CMD", "pgrep", "queueProcessor"]
depends_on:
activemq:
condition: service_healthy
Expand All @@ -134,7 +134,7 @@ services:
CONFIG_FILE: ./tests/configuration/post_process_consumer.himem.conf
hostname: autoreducer.himem
healthcheck:
test: ["CMD", "pgrep", "python"]
test: ["CMD", "pgrep", "queueProcessor"]
depends_on:
activemq:
condition: service_healthy
Expand Down
29 changes: 20 additions & 9 deletions src/webmon_app/reporting/dasmon/view_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
@copyright: 2014 Oak Ridge National Laboratory
"""

from reporting.report.models import Instrument, DataRun, WorkflowSummary
from reporting.report.models import Instrument, DataRun, WorkflowSummary, Information
from reporting.dasmon.models import (
Parameter,
StatusVariable,
Expand Down Expand Up @@ -545,26 +545,37 @@ def postprocessing_diagnostics(timeout=None):
)
nodes.append(
{
"node": "%s PID %s"
% (
item.name[
len(settings.SYSTEM_STATUS_PREFIX) : len(item.name) - 4 # noqa E203
],
last_value.value,
),
"node": item.name[
len(settings.SYSTEM_STATUS_PREFIX) : len(item.name) - 4 # noqa E203
],
"time": timezone.localtime(last_value.timestamp),
"msg": f"PID: {last_value.value}",
}
)
else:
node = item.name[len(settings.SYSTEM_STATUS_PREFIX) :] # noqa E203
last_value = StatusCache.objects.filter(instrument_id=common_services, key_id=item).latest(
"timestamp"
)
nodes.append(
{
"node": item.name[len(settings.SYSTEM_STATUS_PREFIX) :], # noqa E203
"node": node,
"time": timezone.localtime(last_value.timestamp),
}
)

# get last run status performed from Information table by matching node name to description
try:
last_status = Information.objects.filter(description=node).latest("id")
nodes.append(
{
"node": node, # noqa E203
"time": timezone.localtime(last_status.run_status_id.created_on),
"msg": f"Last msg: {last_status.run_status_id}",
}
)
except:
pass
except: # noqa: E722
nodes.append(
{
Expand Down
2 changes: 1 addition & 1 deletion src/webmon_app/reporting/templates/dasmon/diagnostics.html
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@
<table>
<tbody>
{% for item in post_diagnostics.ar_nodes %}
<tr><td>{{ item.node }}: </td><td>{{ item.time }}</td></tr>
<tr><td style="white-space:nowrap;">{{ item.node }}: </td><td style="white-space:nowrap;">{{ item.time }}</td><td>{{ item.msg }}</td></tr>
{% endfor %}
</tbody>
</table>
Expand Down
48 changes: 46 additions & 2 deletions src/webmon_app/reporting/tests/test_dasmon/test_view_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from django.contrib.auth.models import Group
from django.utils import timezone

from reporting.report.models import Instrument
from reporting.report.models import Instrument, Information, RunStatus, StatusQueue
from reporting.dasmon.models import ActiveInstrument, Parameter, StatusCache, StatusVariable, Signal
from workflow.database.report.models import DataRun
from workflow.database.report.models import IPTS
Expand Down Expand Up @@ -446,14 +446,58 @@ def test_workflow_diagnostics(self):
def test_postprocessing_diagnostics(self):
from reporting.dasmon.view_util import postprocessing_diagnostics

# add postprocessing services
common = Instrument.objects.get(name="common")
name_postprocessor = settings.SYSTEM_STATUS_PREFIX + "autoreducer4.com"
para_postprocessor = Parameter.objects.create(name=name_postprocessor)
para_postprocessor.save()
StatusCache.objects.create(
instrument_id=common,
key_id=para_postprocessor,
value=0,
timestamp=timezone.now(),
)
para_postprocessor_pid = Parameter.objects.create(name=name_postprocessor + "_pid")
para_postprocessor_pid.save()
StatusCache.objects.create(
instrument_id=common,
key_id=para_postprocessor_pid,
value=7,
timestamp=timezone.now(),
)

# create StatusQueue, DataRun, RunStatus and Information needed for test
inst = Instrument.objects.get(name="testinst")
queue = StatusQueue(name="REDUCTION.COMPLETE")
queue.save()
ipts = IPTS(expt_name="IPTS-42")
ipts.save()
dataRun = DataRun(run_number=42, ipts_id=ipts, instrument_id=inst, file="/filename")
dataRun.save()
runStatus = RunStatus(run_id=dataRun, queue_id=queue)
runStatus.save()
info = Information(run_status_id=runStatus, description="autoreducer4.com")
info.save()

red_diag = postprocessing_diagnostics()
# NOTE: we don't have any postprocessing data during testing, so only
# test the entry that does exist
assert red_diag["catalog_status"] == 0
assert red_diag["reduction_status"] == 0
assert len(red_diag["ar_nodes"]) == 0
assert len(red_diag["conditions"]) == 0

# for nodes we have data to check
assert len(red_diag["ar_nodes"]) == 3
for i in range(3):
assert "time" in red_diag["ar_nodes"][i]
assert red_diag["ar_nodes"][i]["node"] == "autoreducer4.com"

msgs = [node["msg"] for node in red_diag["ar_nodes"] if "msg" in node]
print(msgs)
assert len(msgs) == 2
assert "PID: 7" in msgs
assert "Last msg: testinst_42: REDUCTION.COMPLETE" in msgs

def test_pvstreamer_diagnostics(self):
from reporting.dasmon.view_util import pvstreamer_diagnostics

Expand Down
2 changes: 1 addition & 1 deletion tests/test_DASMONPageView.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def testVerifyDASMONPageView(self, dasmon_diagnostics):
tree = etree.parse(StringIO(dasmon_diagnostics.text), parser)
table_content = tree.xpath("//tr/td//text()")
# verify number of entries in the tables
expected_number_of_entries = 43
expected_number_of_entries = 48
assert len(table_content) == expected_number_of_entries
# -- DASMON diagnostics
status = table_content[1]
Expand Down

0 comments on commit 154c713

Please sign in to comment.