Skip to content

Commit

Permalink
Merge pull request #132 from ORNL/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
renan-souza authored Sep 13, 2024
2 parents 69f9de9 + 95b16b3 commit 9549911
Show file tree
Hide file tree
Showing 62 changed files with 10,523 additions and 1,132 deletions.
8 changes: 5 additions & 3 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Unit, integration, and notebook tests
name: Unit, integration, and notebook tests (Py39)
on: [push]
# branches: [ "disabled" ]

Expand All @@ -10,10 +10,10 @@ jobs:
if: "!contains(github.event.head_commit.message, 'CI Bot')"
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
- name: Set up Python 3.9
uses: actions/setup-python@v3
with:
python-version: "3.8"
python-version: "3.9"
- name: Check python version
run: python --version
- name: Install our dependencies
Expand All @@ -28,6 +28,8 @@ jobs:
pytest --ignore=tests/decorator_tests/ml_tests/llm_tests
- name: Test notebooks
run: |
pip install -e .
python flowcept/flowcept_webserver/app.py &
sleep 3
export FLOWCEPT_SETTINGS_PATH=~/.flowcept/settings.yaml
pytest --nbmake "notebooks/" --nbmake-timeout=600 --ignore=notebooks/dask_from_CLI.ipynb
11 changes: 8 additions & 3 deletions .github/workflows/test-python-310-macos.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
name: Test Python 3.10 - MacOS
on:
pull_request:
branches: [ "dev", "main" ]
branches: [ "disabled" ] #[ "dev", "main" ]
types: [opened, synchronize, reopened]
jobs:
build:
runs-on: macos-latest
Expand All @@ -24,10 +25,13 @@ jobs:
pip install -r extra_requirements/dev-requirements.txt
- name: Install docker
run: |
brew install docker docker-compose
brew install docker docker-compose
brew install colima
colima start
mkdir -p ~/.docker/cli-plugins
ln -sfn /usr/local/opt/docker-compose/bin/docker-compose ~/.docker/cli-plugins/docker-compose
echo $HOMEBREW_PREFIX
ln -sfn $HOMEBREW_PREFIX/opt/docker-compose/bin/docker-compose ~/.docker/cli-plugins/docker-compose
#ln -sfn /usr/local/opt/docker-compose/bin/docker-compose ~/.docker/cli-plugins/docker-compose
- name: Run Docker Compose
run: |
docker compose version
Expand All @@ -39,4 +43,5 @@ jobs:
run: |
python flowcept/flowcept_webserver/app.py &
sleep 3
export FLOWCEPT_SETTINGS_PATH=~/.flowcept/settings.yaml
pytest --nbmake "notebooks/" --nbmake-timeout=600 --ignore=notebooks/dask_from_CLI.ipynb
7 changes: 4 additions & 3 deletions .github/workflows/test-python-310.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ name: Test Python 3.10
on:
pull_request:
branches: [ "dev", "main" ]
types: [opened, synchronize, reopened]
# branches: [ "disabled" ]

jobs:
Expand All @@ -10,8 +11,6 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 60
if: "!contains(github.event.head_commit.message, 'CI Bot')"
# env:
# FLOWCEPT_SETTINGS_PATH: 'resources/settings.yaml'
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
Expand All @@ -29,9 +28,11 @@ jobs:
run: docker compose -f deployment/compose.yml up -d
- name: Test with pytest
run: |
pytest --ignore=tests/decorator_tests/ml_tests/llm_tests/
pytest --ignore=tests/decorator_tests/ml_tests/llm_tests
- name: Test notebooks
run: |
pip install -e .
python flowcept/flowcept_webserver/app.py &
sleep 3
export FLOWCEPT_SETTINGS_PATH=~/.flowcept/settings.yaml
pytest --nbmake "notebooks/" --nbmake-timeout=600 --ignore=notebooks/dask_from_CLI.ipynb
1 change: 1 addition & 0 deletions .github/workflows/test-python-311.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,4 +36,5 @@ jobs:
run: |
python flowcept/flowcept_webserver/app.py &
sleep 3
export FLOWCEPT_SETTINGS_PATH=~/.flowcept/settings.yaml
pytest --nbmake "notebooks/" --ignore=notebooks/dask_from_CLI.ipynb
37 changes: 0 additions & 37 deletions .github/workflows/test-python-39.yml

This file was deleted.

2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,5 @@ notebooks/tb_*
notebooks/scheduler_file.json
test.py
**/*dump*
time.txt
tmp/
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,10 @@ plugin:
And other variables depending on the Plugin. For instance, in Dask, timestamp creation by workers add interception overhead.
## Install AMD GPU Lib
https://rocm.docs.amd.com/projects/amdsmi/en/latest/
## See also
- [Zambeze Repository](https://github.com/ORNL/zambeze)
Expand Down
26 changes: 14 additions & 12 deletions deployment/compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,23 @@ services:
flowcept_mongo:
container_name: flowcept_mongo
image: mongo:latest
# volumes:
# - /Users/rsr/Downloads/mongo_data/db:/data/db
ports:
- 27017:27017


# This is just for the cases where one does not want to use the same Redis instance for caching and messaging, but
# it's not required to have separate instances.
# local_interceptor_cache:
# container_name: local_interceptor_cache
# image: redis
# ports:
# - 60379:6379
# # This is just for the cases where one does not want to use the same Redis instance for caching and messaging, but
# # it's not required to have separate instances.
# # local_interceptor_cache:
# # container_name: local_interceptor_cache
# # image: redis
# # ports:
# # - 60379:6379

zambeze_rabbitmq:
container_name: zambeze_rabbitmq
image: rabbitmq:3.11-management
ports:
- 5672:5672
- 15672:15672
container_name: zambeze_rabbitmq
image: rabbitmq:3.11-management
ports:
- 5672:5672
- 15672:15672
1 change: 0 additions & 1 deletion extra_requirements/amd-requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@
pyamdgpuinfo==2.1.6
2 changes: 1 addition & 1 deletion extra_requirements/dask-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
tomli==1.1.0
dask[distributed]==2022.12.0
dask[distributed]==2023.11.0
#dask[distributed]==2023.5.0


2 changes: 2 additions & 0 deletions extra_requirements/dev-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,5 @@ torch
torchvision
datasets
torchtext
sacremoses
nltk
2 changes: 1 addition & 1 deletion extra_requirements/responsible_ai-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
shap==0.42.1
#shap==0.42.1
torch
15 changes: 8 additions & 7 deletions flowcept/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,14 @@
WorkflowObject,
)

try:
from flowcept.instrumentation.decorators.responsible_ai import (
model_explainer,
model_profiler,
)
except:
pass
# These resp_ai imports below are adding long wait in flowcept imports!
# try:
# from flowcept.instrumentation.decorators.responsible_ai import (
# #model_explainer,
# #model_profiler,
# )
# except:
# pass

if Vocabulary.Settings.ZAMBEZE_KIND in flowcept.configs.ADAPTERS:
try:
Expand Down
2 changes: 0 additions & 2 deletions flowcept/analytics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,3 @@
describe_col,
describe_cols,
)

from flowcept.analytics.plot import heatmap, scatter2d_with_colors
97 changes: 97 additions & 0 deletions flowcept/commons/daos/autoflush_buffer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
from queue import Queue
from typing import Union, List, Dict, Callable

import msgpack
from redis import Redis
from redis.client import PubSub
from threading import Thread, Lock, Event
from time import time, sleep

import flowcept.commons
from flowcept.commons.daos.keyvalue_dao import KeyValueDAO
from flowcept.commons.utils import perf_log
from flowcept.commons.flowcept_logger import FlowceptLogger
from flowcept.configs import (
REDIS_HOST,
REDIS_PORT,
REDIS_CHANNEL,
REDIS_PASSWORD,
JSON_SERIALIZER,
REDIS_BUFFER_SIZE,
REDIS_INSERTION_BUFFER_TIME,
PERF_LOG,
REDIS_URI,
)

from flowcept.commons.utils import GenericJSONEncoder


class AutoflushBuffer:
def __init__(
self,
max_size,
flush_interval,
flush_function: Callable,
*flush_function_args,
**flush_function_kwargs,
):
self.logger = FlowceptLogger()
self._max_size = max_size
self._flush_interval = flush_interval
self._buffers = [[], []]
self._current_buffer_index = 0
self._swap_event = Event()
self._stop_event = Event()

self._timer_thread = Thread(target=self.time_based_flush)
self._timer_thread.start()

self._flush_thread = Thread(target=self._flush_buffers)
self._flush_thread.start()

self._flush_function = flush_function
self._flush_function_args = flush_function_args
self._flush_function_kwargs = flush_function_kwargs

def append(self, item):
# if self.stop_event.is_set():
# return
buffer = self._buffers[self._current_buffer_index]
buffer.append(item)
if len(buffer) >= self._max_size:
self._swap_event.set()

def time_based_flush(self):
while not self._stop_event.is_set():
self._swap_event.wait(self._flush_interval)
if not self._stop_event.is_set():
self._swap_event.set()

def _do_flush(self):
old_buffer_index = self._current_buffer_index
self._current_buffer_index = 1 - self._current_buffer_index
old_buffer = self._buffers[old_buffer_index]
if old_buffer:
self._flush_function(
old_buffer[:],
*self._flush_function_args,
**self._flush_function_kwargs,
)
self._buffers[old_buffer_index] = []

def _flush_buffers(self):
while not self._stop_event.is_set() or any(self._buffers):
self._swap_event.wait()
self._swap_event.clear()

self._do_flush()

if self._stop_event.is_set():
break

def stop(self):
self._stop_event.set()
self._swap_event.set()
self._flush_thread.join()
self._timer_thread.join()
self._do_flush()
Loading

0 comments on commit 9549911

Please sign in to comment.