Skip to content

Commit

Permalink
Update language identifiers for coverity and sonarqube
Browse files Browse the repository at this point in the history
  • Loading branch information
lylebarner committed Feb 10, 2025
1 parent 8f9ccf3 commit 3cf2ce7
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 33 deletions.
2 changes: 2 additions & 0 deletions scrub/tools/templates/coverity.template
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ fi
if [[ "${{SOURCE_LANG}}" =~ "python" ]]; then
find ${{SOURCE_DIR}} -iname "*.py" >> "$file_list"
fi
if [[ "${{SOURCE_LANG}}" =~ "py" ]]; then
find ${{SOURCE_DIR}} -iname "*.py" >> "$file_list"
if [[ "${{SOURCE_LANG}}" =~ "ruby" ]]; then
find ${{SOURCE_DIR}} -iname "*.rb" >> "$file_list"
fi
Expand Down
75 changes: 44 additions & 31 deletions scrub/tools/templates/sonarqube.template
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ fi
if [[ "${{SOURCE_LANG}}" =~ "python" ]]; then
file_extension_filters="$file_extension_filters,**/*.py"
fi
if [[ "${{SOURCE_LANG}}" =~ "py" ]]; then
file_extension_filters="$file_extension_filters,**/*.py"
fi
if [[ "${{SOURCE_LANG}}" =~ "rpg" ]]; then
file_extension_filters="$file_extension_filters,**/*.rpg,**/*.rpgle,**/*.sqlrpgle,**/*.RPG,**/*.RPGLE,**/*.SQLRPGLE"
fi
Expand Down Expand Up @@ -141,7 +144,7 @@ fi
PAGE_SIZE=500
# Get the first page
RESULTS_FILE=${{TOOL_ANALYSIS_DIR}}/sonarqube_issues_1.json
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/issues/search?ps=$PAGE_SIZE&componentKeys=${{SONARQUBE_PROJECT}}&p=1&${{SONARQUBE_CURL_FLAGS}}" -o $RESULTS_FILE
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/issues/search?ps=$PAGE_SIZE&componentKeys=${{SONARQUBE_PROJECT}}&p=1&languages=${{SOURCE_LANG}}${{SONARQUBE_CURL_FLAGS}}" -o $RESULTS_FILE

# Get the number of remaining pages
TOTAL_RESULTS=$(grep -E '[0-9]+' -m 1 -o -a $RESULTS_FILE | sed -n 1p)
Expand All @@ -154,23 +157,15 @@ fi
for ((CURRENT_PAGE=2; CURRENT_PAGE <= TOTAL_PAGES; CURRENT_PAGE++));
do
# Get the page
RESULTS_FILE=${{TOOL_ANALYSIS_DIR}}/sonarqube_issues_$PAGE.json
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/issues/search?ps=500&componentKeys=${{SONARQUBE_PROJECT}}&languages=${{SOURCE_LANG}}&p=$PAGE&${{SONARQUBE_CURL_FLAGS}}" -o $RESULTS_FILE
# Check to see if the file is empty
RESULTS_FILE=${{TOOL_ANALYSIS_DIR}}/sonarqube_issues_$CURRENT_PAGE.json
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/issues/search?ps=500&componentKeys=${{SONARQUBE_PROJECT}}&languages=${{SOURCE_LANG}}&p=$CURRENT_PAGE&${{SONARQUBE_CURL_FLAGS}}" -o $RESULTS_FILE

# Check the file contents
if [ ! -s "$RESULTS_FILE" ]; then
exit 1
fi
# Check the contents, verify file is not empty, and make sure the max page hasn't been reached
if grep -q "Can return only the first 10000 results" $RESULTS_FILE; then
echo "WARNING: Not all results have been retrieved."
MORE_RESULTS=false
elif [ $PAGE -gt 20 ]; then
MORE_RESULTS=false
elif grep -q "\"issues\":\[\]" $RESULTS_FILE; then
rm -f $RESULTS_FILE
MORE_RESULTS=false
else
PAGE=$((PAGE+1))
elif grep -q "Can return only the first 10000 results" $RESULTS_FILE; then
rm -f $RESULTS_FILE
break
fi
done

Expand All @@ -192,25 +187,43 @@ do
# Get the page
RESULTS_FILE=${{TOOL_ANALYSIS_DIR}}/sonarqube_hotspots_$CURRENT_PAGE.json
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/hotspots/search?ps=$PAGE_SIZE&projectKey=${{SONARQUBE_PROJECT}}&p=$CURRENT_PAGE&${{SONARQUBE_CURL_FLAGS}}" -o $RESULTS_FILE

# Check the file contents
if [ ! -s "$RESULTS_FILE" ]; then
exit 1
elif grep -q "Can return only the first 10000 results" $RESULTS_FILE; then
rm -f $RESULTS_FILE
break
fi
done

# Get project metrics from the SonarQube server
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/measures/component_tree?component=${{SONARQUBE_PROJECT}}&ps=500&qualifiers=TRK&metricKeys=files,functions,lines,ncloc,comment_lines,complexity,cognitive_complexity,violations,vulnerabilities,security_hotspots,coverage,line_coverage,branch_coverage,sqale_index,duplicated_lines_density" -o "${{TOOL_ANALYSIS_DIR}}/sonarqube_metrics_project.json"

# Get the file level metrics from the SonarQube server
PAGE=1
MORE_RESULTS=true
while $MORE_RESULTS; do
METRICS_FILE="${{TOOL_ANALYSIS_DIR}}/sonarqube_metrics_file_$PAGE.json"
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/measures/component_tree?component=${{SONARQUBE_PROJECT}}&ps=500&p=$PAGE&qualifiers=FIL&strategy=all&metricKeys=files,functions,lines,ncloc,comment_lines,complexity,cognitive_complexity,violations,vulnerabilities,security_hotspots,coverage,line_coverage,branch_coverage,sqale_index,duplicated_lines_density" -o "${{TOOL_ANALYSIS_DIR}}/sonarqube_metrics_file_$PAGE.json"

# Check if there are more results
if grep -q "\"components\":\[\]" $METRICS_FILE; then
rm -f $METRICS_FILE
MORE_RESULTS=false
elif [ $PAGE -gt 20 ]; then
MORE_RESULTS=false
else
PAGE=$((PAGE+1))
# Retrieve the file metrics from the SonarQube server
# Get the first page
METRICS_FILE="${{TOOL_ANALYSIS_DIR}}/sonarqube_metrics_file_1.json"
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/measures/component_tree?component=${{SONARQUBE_PROJECT}}&ps=500&p=1&qualifiers=FIL&strategy=all&metricKeys=files,functions,lines,ncloc,comment_lines,complexity,cognitive_complexity,violations,vulnerabilities,security_hotspots,coverage,line_coverage,branch_coverage,sqale_index,duplicated_lines_density" -o $METRICS_FILE

# Get the number of remaining pages
TOTAL_RESULTS=$(( $(grep -E '[0-9]+' -m 1 -o -a $METRICS_FILE | sed -n 3p) ))
TOTAL_PAGES=$(( ( TOTAL_RESULTS / PAGE_SIZE ) + ( TOTAL_RESULTS % PAGE_SIZE > 0 ) ))
if (( TOTAL_PAGES > 20 )); then
TOTAL_PAGES=20
fi

# Get the rest of the metrics pages
for ((CURRENT_PAGE=2; CURRENT_PAGE <= TOTAL_PAGES; CURRENT_PAGE++));
do
# Get the page
METRICS_FILE="${{TOOL_ANALYSIS_DIR}}/sonarqube_metrics_file_$CURRENT_PAGE.json"
curl -u ${{SONARQUBE_TOKEN}}: "${{SONARQUBE_SERVER}}/api/measures/component_tree?component=${{SONARQUBE_PROJECT}}&ps=500&p=$CURRENT_PAGE&qualifiers=FIL&strategy=all&metricKeys=files,functions,lines,ncloc,comment_lines,complexity,cognitive_complexity,violations,vulnerabilities,security_hotspots,coverage,line_coverage,branch_coverage,sqale_index,duplicated_lines_density" -o $METRICS_FILE

# Check the file contents
if [ ! -s "$METRICS_FILE" ]; then
exit 1
elif grep -q "Can return only the first 10000 results" $METRICS_FILE; then
rm -f $METRICS_FILE
break
fi
done
4 changes: 2 additions & 2 deletions scrub/utils/scrub_utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,8 +372,8 @@ def parse_common_configs(user_conf_file, raw_override_values):
source_langs[i] = 'c,cpp'
elif source_lang == 'j':
source_langs[i] = 'java'
elif source_lang == 'p':
source_langs[i] = 'python'
elif source_lang == 'p' or source_lang == 'python':
source_langs[i] = 'py'
elif source_lang == 'js':
source_langs[i] = 'javascript'
scrub_conf_data.update({'source_lang': ','.join(source_langs)})
Expand Down

0 comments on commit 3cf2ce7

Please sign in to comment.