Skip to content

Add typings result #458

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ export PERF=${BACKEND}/tests/performance
export PERF_SCENARIO_V1=${PERF}/scenarios/test-backend-v1.yml
export PERF_REPORTS=${PERF}/reports/
export PERF_NAMES=${BACKEND}/tests/clients_test.csv
export PERF_UTILS=${PERF}/scenarios/utils

-include ${APP_PATH}/${GIT_TOOLS}/artifacts.SCW
export SCW_REGION?=fr-par
Expand Down Expand Up @@ -334,7 +335,6 @@ backend/tests/clients_test.csv:

# test artillery
test-perf-v1:
sed -i -E "s/;/,/g" backend/tests/clients_test.csv
make -C ${APP_PATH}/${GIT_TOOLS} test-api-generic PERF_SCENARIO=${PERF_SCENARIO_V1} PERF_TEST_ENV=api-perf PERF_REPORTS=${PERF_REPORTS} DC_NETWORK=${DC_NETWORK} PERF_NAMES=${PERF_NAMES};

backend-perf-clinic:
Expand Down
4 changes: 2 additions & 2 deletions backend/src/buildRequest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -460,14 +460,14 @@ const buildAggregation = (aggs: string[], aggsSize: number, afterKey: number): a
}
return aggregation
})
aggregationRequest.myBuckets = {
aggregationRequest.bucketResults = {
composite: {
size: 1000,
sources: aggregationArray
}
}
if (afterKey !== undefined) {
aggregationRequest.myBuckets.composite.after = afterKey
aggregationRequest.bucketResults.composite.after = afterKey
} else {
aggs.map((agg: string) => {
aggregationRequest[`${agg}_count`] = {
Expand Down
32 changes: 18 additions & 14 deletions backend/src/controllers/aggregation.controller.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ export class AggregationController extends Controller {
this.setStatus(400);
return { msg: "error - simple and complex request at the same time" };
}
await this.streamAggs((request).res, requestInput, accept)
await this.streamAggs(request.res, requestInput, accept)
}

/**
Expand Down Expand Up @@ -107,10 +107,10 @@ export class AggregationController extends Controller {
this.setStatus(400);
return { msg: requestInput.errors };
}
await this.streamAggs((request).res, requestInput, accept)
await this.streamAggs(request.res, requestInput, accept)
}

private async streamAggs(response: any, requestInput: any, accept: string) {
private async streamAggs(response: express.Response, requestInput: RequestInput, accept: string) {
let requestBuild = buildRequest(requestInput);
const transformedAggs = requestInput.aggs.mask.transform(requestInput.aggs.value)
let result = await runRequest(requestBuild, null);
Expand All @@ -119,13 +119,17 @@ export class AggregationController extends Controller {
let buckets
const cardinality: any = {}
let { took: delay } = result
if (result.aggregations.myBuckets) {
afterKey = result.aggregations.myBuckets.after_key
if (result.error) {
this.setStatus(result.status);
return { msg: result.error };
}
if (result.aggregations.bucketResults) {
afterKey = result.aggregations.bucketResults.after_key
transformedAggs.forEach((agg: string) => {
cardinality[agg] = result.aggregations[`${agg}_count`].value
response.setHeader(`total-results-${agg}`, result.aggregations[`${agg}_count`].value);
});
buckets = result.aggregations.myBuckets.buckets
buckets = result.aggregations.bucketResults.buckets
} else {
transformedAggs.forEach((agg: string) => {
cardinality[agg] = result.aggregations[agg].buckets.length
Expand All @@ -138,7 +142,7 @@ export class AggregationController extends Controller {
if (buckets.length > 0) {
buckets.forEach((bucketItem: any, ind: number) => {
const aggKeys: any = {}
if (result.aggregations.myBuckets) {
if (result.aggregations.bucketResults) {
Object.entries(bucketItem.key).forEach(([key, value]) => {
aggKeys[key] = value
})
Expand All @@ -155,13 +159,13 @@ export class AggregationController extends Controller {
response.write(Object.values(aggKeys).join(",") + '\n')
})
}
while (result.aggregations.myBuckets && result.aggregations.myBuckets.buckets.length > 0 ) {
while (result.aggregations.bucketResults && result.aggregations.bucketResults.buckets.length > 0 ) {
requestInput.afterKey = afterKey
requestBuild = buildRequest(requestInput);
result = await runRequest(requestBuild, null);
afterKey = result.aggregations.myBuckets.after_key
afterKey = result.aggregations.bucketResults.after_key
delay += result.took
const { buckets: afterBucket } = result.aggregations.myBuckets
const { buckets: afterBucket } = result.aggregations.bucketResults
if (afterBucket.length > 0 ) {
afterBucket.forEach((bucketItem: any) => {
const aggKeys: any = {}
Expand Down Expand Up @@ -202,18 +206,18 @@ export class AggregationController extends Controller {
response.write(JSON.stringify(firstItem[0]))
buckets.forEach((bucketItem: any) => response.write("," + JSON.stringify(bucketItem)))
}
while (result.aggregations.myBuckets && result.aggregations.myBuckets.buckets.length > 0 ) {
while (result.aggregations.bucketResults && result.aggregations.bucketResults.buckets.length > 0 ) {
requestInput.afterKey = afterKey
requestBuild = buildRequest(requestInput);
result = await runRequest(requestBuild, null);
afterKey = result.aggregations.myBuckets.after_key
afterKey = result.aggregations.bucketResults.after_key
delay += result.took
const { buckets: afterBucket } = result.aggregations.myBuckets
const { buckets: afterBucket } = result.aggregations.bucketResults
if (afterBucket.length > 0 ) {
afterBucket.forEach((bucketItem: any) => response.write("," + JSON.stringify(bucketItem)))
}
}
response.write(`],"delay": ${delay as string}}}`)
response.write(`],"delay": ${String(delay)}}}`)
response.end();
}
}
Expand Down
12 changes: 11 additions & 1 deletion backend/src/models/result.ts
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,20 @@ export interface ResultRawES {
hits: ResultRawHit[]
};
aggregations?: {
[key: string]: any;
'doc_count_error_upper_bound': number;
'sum_other_doc_count': number;
buckets: any[];
bucketResults: {
buckets: {
key: string;
doc_count: number;
}[];
after_key: number;
}
}
status?: number;
statusText?: string;
error?: boolean;
}

export interface ResultRawHit {
Expand Down
23 changes: 10 additions & 13 deletions backend/src/runRequest.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { estypes } from '@elastic/elasticsearch';
import { getClient } from './elasticsearch';
import loggerStream from './logger';
import { BodyResponse, ScrolledResponse } from './models/body';
Expand Down Expand Up @@ -52,26 +53,22 @@ export const runRequest = async (body: BodyResponse|ScrolledResponse, scroll: st
});

return {
took: 0,
hits: {
total: {
value: 1
value: 0
},
hits: [
{
_id: 0,
_source: {
status: error.statusCode || 500,
statusText: error.message || 'Internal Server Error',
error: true
}
}
]
}
max_score: 0,
hits: []
},
status: error.statusCode || 500,
statusText: error.message || 'Internal Server Error',
error: true
};
}
};

export const runBulkRequest = async (bulkRequest: any): Promise<any> => {
export const runBulkRequest = async (bulkRequest: any): Promise<estypes.MsearchResponse> => {
const client = getClient();
try {
const response = await client.msearch(bulkRequest);
Expand Down
29 changes: 25 additions & 4 deletions backend/tests/performance/scenarios/test-backend-v1.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
config:
plugins:
expect:
reportFailuresAsErrors: true
outputFormat: prettyError
ensure:
maxErrorRate: 1
max: 500
p95: 200
environments:
api-perf:
target: "http://backend:8080"
Expand All @@ -18,24 +26,37 @@ config:
payload:
- path: names.csv
fields:
- firstName
- lastName
- id
- country
- commune
- birthDate
- lastName
- firstName
order: random
# the following doesn't work
skipHeader: true
#delimiter: ";"
delimiter: ";"
Comment on lines 36 to +37
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Ensure correct CSV parsing settings

The comment indicates skipHeader isn’t working. According to Artillery’s payload docs, the key should be skipFirstRow rather than skipHeader. Without this change, the header row may still be injected as data. For example:

 payload:
   - path: names.csv
     fields: [...]
-    skipHeader: true
+    skipFirstRow: true
     delimiter: ";"
🤖 Prompt for AI Agents
In backend/tests/performance/scenarios/test-backend-v1.yml around lines 37 to
38, the CSV parsing setting uses the incorrect key `skipHeader`. Replace
`skipHeader` with `skipFirstRow` to correctly skip the header row during CSV
parsing as per Artillery's payload documentation, ensuring the header is not
injected as data.

scenarios:
- name: backend-v1
flow:
- get:
url: "/deces/api/v1/search?firstName={{ firstName }}&lastName={{ lastName }}&birthDate={{ birthDate }}&fuzzy=false"
expect:
- statusCode: 200
- contentType: json
capture:
- json: '$'
as: resultGet
#- log: "Res GET: {{ resultGet }}"
- post:
url: /deces/api/v1/search
json:
firstName: "{{ firstName }}"
lastName: "{{ lastName }}"
birthDate: "{{ birthDate }}"
capture:
- json: '$'
as: resultPost
expect:
- statusCode: 200
- contentType: json
#- log: "Res POST: {{ resultPost }}"