Skip to content

Commit

Permalink
3595 notebook summary - model stratify operator Part 1 (#3732)
Browse files Browse the repository at this point in the history
  • Loading branch information
jryu01 authored Jun 3, 2024
1 parent cf7e637 commit 4916a2a
Show file tree
Hide file tree
Showing 10 changed files with 191 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,16 @@
:state="node.state"
@update-state="(state: any) => emit('update-state', state)"
/>
<!-- TODO: Uncomment and enable this when summary UI is ready -->
<!-- <tera-operator-annotation
v-if="activeOutputSummary === undefined"
:state="node.state"
@update-state="(state: any) => emit('update-state', state)"
/> -->
<!-- TODO: Make this as a summary component and render for the operator supporting AI generated summary-->
<!-- <div v-else>
{{ activeOutputSummary === '' ? 'Generating AI summary...' : activeOutputSummary }}
</div> -->
</template>
</tera-drilldown-header>
<tera-columnar-panel>
Expand Down Expand Up @@ -75,6 +85,7 @@ import { isEmpty } from 'lodash';
import Menu from 'primevue/menu';
import Button from 'primevue/button';
import TeraOutputDropdown from '@/components/drilldown/tera-output-dropdown.vue';
// import { getActiveOutputSummary } from '@/services/workflow';

const props = defineProps<{
node: WorkflowNode<any>;
Expand Down Expand Up @@ -126,6 +137,7 @@ const outputOptions = computed(() => {
}
return [];
});
// const activeOutputSummary = computed(() => getActiveOutputSummary(props.node));

const toggle = (event) => {
menu.value.toggle(event);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,8 @@ const submitQuestion = () => {
const message = props.kernelManager.sendMessage('llm_request', {
request: questionString.value
});
emit('question-asked');
emit('question-asked', questionString.value);
// May prefer to use a manual status rather than following this. TBD. Both options work for now
message.register('status', (data) => {
kernelStatus.value = data.content.execution_state;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
:context-language="contextLanguage"
@llm-output="(data: any) => appendCode(data, 'code')"
@llm-thought-output="(data: any) => llmThoughts.push(data)"
@question-asked="llmThoughts = []"
@question-asked="updateLlmQuery"
>
<template #toolbar-right-side>
<Button
Expand Down Expand Up @@ -166,6 +166,7 @@ const contextLanguage = ref<string>('python3');
const defaultCodeText =
'# This environment contains the variable "model" \n# which is displayed on the right';
const codeText = ref(defaultCodeText);
const llmQuery = ref('');
const llmThoughts = ref<any[]>([]);
const executeResponse = ref({
Expand All @@ -185,6 +186,11 @@ const menuItems = computed(() => [
}
]);
const updateLlmQuery = (query: string) => {
llmThoughts.value = [];
llmQuery.value = query;
};
const appendCode = (data: any, property: string) => {
const newCode = data.content[property] as string;
if (newCode) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { getModel } from '@/services/model';
import { createNotebookFromCode } from '@/services/notebook';
import type { Operation, BaseState } from '@/types/workflow';
import { WorkflowOperationTypes } from '@/types/workflow';

Expand All @@ -18,6 +20,8 @@ export interface StratifyGroup {
}

export interface StratifyCode {
llmQuery: string;
llmThoughts: any[];
code: string;
timestamp: number;
}
Expand Down Expand Up @@ -70,5 +74,13 @@ export const StratifyMiraOperation: Operation = {
hasCodeBeenRun: false
};
return init;
},
createNotebook: async (state: StratifyOperationStateMira, value?: any[] | null) => {
const modelIdToLoad = value?.[0];
const outputModel = await getModel(modelIdToLoad);
const code = state.strataCodeHistory?.[0].code ?? '';
// TODO: Add llm query and thought to the notebook
const notebook = createNotebookFromCode(code, 'python3', { 'application/json': outputModel });
return notebook;
}
};
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
:context-language="'python3'"
@llm-output="(data: any) => processLLMOutput(data)"
@llm-thought-output="(data: any) => llmThoughts.push(data)"
@question-asked="llmThoughts = []"
@question-asked="updateLlmQuery"
>
<template #toolbar-right-side>
<Button label="Run" size="small" icon="pi pi-play" @click="runCodeStratify" />
Expand Down Expand Up @@ -116,6 +116,7 @@ import TeraNotebookJupyterInput from '@/components/llm/tera-notebook-jupyter-inp
import teraNotebookJupyterThoughtOutput from '@/components/llm/tera-notebook-jupyter-thought-output.vue';
import { createModel, getModel } from '@/services/model';
import { WorkflowNode, OperatorStatus } from '@/types/workflow';
import { logger } from '@/utils/logger';
import Button from 'primevue/button';
Expand Down Expand Up @@ -180,6 +181,7 @@ const kernelManager = new KernelSessionManager();
let editor: VAceEditorInstance['_editor'] | null;
const codeText = ref('');
const llmQuery = ref('');
const llmThoughts = ref<any[]>([]);
const sampleAgentQuestions = [
Expand All @@ -188,6 +190,11 @@ const sampleAgentQuestions = [
'What is cartesian_control in stratify?'
];
const updateLlmQuery = (query: string) => {
llmThoughts.value = [];
llmQuery.value = query;
};
const updateStratifyGroupForm = (config: StratifyGroup) => {
const state = _.cloneDeep(props.node.state);
state.strataGroup = config;
Expand Down Expand Up @@ -421,20 +428,19 @@ const runCodeStratify = () => {
});
};
// FIXME: Copy pasted in 3 locations, could be written cleaner and in a service
// FIXME: Copy pasted in 3 locations, could be written cleaner and in a service. Migrate it to use saveCodeToState from @/services/notebook
const saveCodeToState = (code: string, hasCodeBeenRun: boolean) => {
const state = _.cloneDeep(props.node.state);
state.hasCodeBeenRun = hasCodeBeenRun;
// for now only save the last code executed, may want to save all code executed in the future
const codeHistoryLength = props.node.state.strataCodeHistory.length;
const timestamp = Date.now();
const llm = { llmQuery: llmQuery.value, llmThoughts: llmThoughts.value };
if (codeHistoryLength > 0) {
state.strataCodeHistory[0] = { code, timestamp };
state.strataCodeHistory[0] = { code, timestamp, ...llm };
} else {
state.strataCodeHistory.push({ code, timestamp });
state.strataCodeHistory.push({ code, timestamp, ...llm });
}
emit('update-state', state);
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,13 @@ function appendInputPort(
* */
function appendOutput(
node: WorkflowNode<any> | null,
port: { type: string; label?: string; value: any; state?: any; isSelected?: boolean }
port: {
type: string;
label?: string;
value: any;
state?: any;
isSelected?: boolean;
}
) {
if (!node) return;
Expand Down Expand Up @@ -389,10 +395,26 @@ function appendOutput(
node.outputs = node.outputs.filter((d) => d.value);
selectOutput(node, uuid);
generateSummary(node, outputPort);
workflowDirty = true;
}
async function generateSummary(node: WorkflowNode<any>, outputPort: WorkflowOutput<any>) {
outputPort.summary = ''; // Indicating that the summary generation is initiated
const result = await workflowService.generateSummary(
node,
outputPort,
registry.getOperation(node.operationType)?.createNotebook ?? null
);
if (!result) return;
const updateNode = wf.value.nodes.find((n) => n.id === node.id);
const updateOutput = (updateNode?.outputs ?? []).find((o) => o.id === outputPort.id);
if (!updateNode || !updateOutput) return;
updateOutput.summary = result.summary;
updateOutput.label = result.title;
updateOutputPort(updateNode, updateOutput);
}
function updateWorkflowNodeState(node: WorkflowNode<any> | null, state: any) {
if (!node) return;
workflowService.updateNodeState(wf.value, node.id, state);
Expand Down
11 changes: 11 additions & 0 deletions packages/client/hmi-client/src/services/beaker.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import axios, { AxiosHeaders } from 'axios';

const BeakerAPI = axios.create({
baseURL: '/beaker',
headers: new AxiosHeaders()
});

export const summarizeNotebook = async (notebook: any) => {
const { data } = await BeakerAPI.post('/summary', { notebook });
return data;
};
83 changes: 80 additions & 3 deletions packages/client/hmi-client/src/services/notebook.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,21 @@ import { WorkflowNode } from '@/types/workflow';

export interface NotebookHistory {
code: string;
llmQuery: string;
llmThoughts: any[];
timestamp: number;
}

// A common pattern used to save code from a notebook within an operator
// This is ready to be ported to nodes such as tera-model-config and tera-stratify-mira
// Not ported yet since this will ruin the states of the nodes that already exist due to their differently named properties
export const saveCodeToState = (node: WorkflowNode<any>, code: string, hasCodeRun: boolean) => {
export const saveCodeToState = (
node: WorkflowNode<any>,
code: string,
hasCodeRun: boolean,
llmQuery: string = '',
llmThoughts: any[] = []
) => {
const state = cloneDeep(node.state);

if (!('notebookHistory' in state) || !('hasCodeRun' in state)) return state;
Expand All @@ -19,9 +27,78 @@ export const saveCodeToState = (node: WorkflowNode<any>, code: string, hasCodeRu
const notebookHistoryLength = state.notebookHistory.length;
const timestamp = Date.now();
if (notebookHistoryLength > 0) {
state.notebookHistory[0] = { code, timestamp };
state.notebookHistory[0] = { code, timestamp, llmQuery, llmThoughts };
} else {
state.notebookHistory.push({ code, timestamp });
state.notebookHistory.push({ code, timestamp, llmQuery, llmThoughts });
}
return state;
};

/**
* Create a notebook from a code
* @param code code to be added to the notebook
* @param executeResult output of the code execution if any
* @param language language of the code
* @param llmQuery llm query used to generate the code if any
* @param thought llm thought generated from the query if any
* @returns
*/
export const createNotebookFromCode = (
code: string,
language: string,
executionResult?: any,
llmQuery?: string,
llmThoughts: any[] = []
) => {
// TODO: Consider using jataware/beaker-kernel library to generate notebook (https://github.com/jataware/beaker-kernel/blob/886b2b3913ca1460f0301a5cd97cbcf15de609bc/beaker-ts/src/notebook.ts#L414)
const notebook = {
nbformat: 4,
nbformat_minor: 5,
cells: [] as any[],
metadata: {
kernelspec: {
display_name: 'Beaker Kernel',
name: 'beaker',
language: 'beaker'
},
language_info: {
name: language,
display_name: language
}
}
};
if (llmQuery) {
const beakerQueryCell = {
cell_type: 'query',
events: llmThoughts.map((thought) => ({ type: 'thought', content: thought.content })),
metadata: {},
source: llmQuery,
status: 'idle'
};
notebook.cells.push(beakerQueryCell);
}
const beakerCodeCell = {
cell_type: 'code',
execution_count: 1,
metadata: {},
outputs: [] as any[],
source: code,
status: 'idle'
};
if (executionResult) {
// Make a shallow copy of the execution result
const data = { ...executionResult };
// Make sure the values of the data is stringified as the beaker summary endpoint seem to have issue with object json value
Object.keys(data).forEach((type) => {
if (typeof data[type] !== 'string') {
data[type] = JSON.stringify(data[type]);
}
});
beakerCodeCell.outputs.push({
output_type: 'execute_result',
data
});
}
notebook.cells.push(beakerCodeCell);
return notebook;
};
27 changes: 27 additions & 0 deletions packages/client/hmi-client/src/services/workflow.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import type {
WorkflowOutput
} from '@/types/workflow';
import { WorkflowPortStatus, OperatorStatus } from '@/types/workflow';
import { summarizeNotebook } from './beaker';

/**
* Captures common actions performed on workflow nodes/edges. The functions here are
Expand Down Expand Up @@ -432,12 +433,38 @@ export function selectOutput(
cascadeInvalidateDownstream(operator, nodeCache);
}

export function getActiveOutputSummary(node: WorkflowNode<any>) {
const output = node.outputs.find((o) => o.id === node.active);
return output?.summary;
}

export function updateOutputPort(node: WorkflowNode<any>, updatedOutputPort: WorkflowOutput<any>) {
let outputPort = node.outputs.find((port) => port.id === updatedOutputPort.id);
if (!outputPort) return;
outputPort = Object.assign(outputPort, updatedOutputPort);
}

// Keep track of the summary generation requests to prevent multiple requests for the same workflow output
const summaryGenerationRequestIds = new Set<string>();

export async function generateSummary(
node: WorkflowNode<any>,
outputPort: WorkflowOutput<any>,
createNotebookFn: ((state: any, value: WorkflowPort['value']) => Promise<any>) | null
) {
if (!node || !createNotebookFn || summaryGenerationRequestIds.has(outputPort.id)) return null;
try {
summaryGenerationRequestIds.add(outputPort.id);
const notebook = await createNotebookFn(outputPort.state, outputPort.value);
const result = await summarizeNotebook(notebook);
return result;
} catch {
return { title: outputPort.label, summary: 'Generating AI summary has failed.' };
} finally {
summaryGenerationRequestIds.delete(outputPort.id);
}
}

// Check if the current-state matches that of the output-state.
// Note operatorState subsumes the keys of the outputState
export const isOperatorStateInSync = (
Expand Down
4 changes: 4 additions & 0 deletions packages/client/hmi-client/src/types/workflow.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@ export interface Operation {

inputs: OperationData[];
outputs: OperationData[];

createNotebook?: (state: any, value: WorkflowPort['value']) => Promise<any>;
}

// Defines the data-exchange between WorkflowNode
Expand All @@ -87,6 +89,8 @@ export interface WorkflowPort {
export interface WorkflowOutput<S> extends WorkflowPort {
isSelected?: boolean;
operatorStatus?: OperatorStatus;
summary?: string;
summaryHasBeenEdited?: boolean;
state?: Partial<S>;
timestamp?: Date;
}
Expand Down

0 comments on commit 4916a2a

Please sign in to comment.