Skip to content

Commit

Permalink
Commented out the Upload trivy scanned_results step made alignment ch…
Browse files Browse the repository at this point in the history
…nages

Signed-off-by: hansinikarunarathne <[email protected]>
  • Loading branch information
hansinikarunarathne committed Jun 27, 2024
1 parent bc7a2e6 commit e140f6c
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 31 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/trivy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@ jobs:
cd hack
python3 trivy_scan.py
- name: Upload trivy scanned_results
uses: actions/upload-artifact@v4
with:
name: trivy_scanned_results
path: image_lists/
# - name: Upload trivy scanned_results
# uses: actions/upload-artifact@v4
# with:
# name: trivy_scanned_results
# path: image_lists/


51 changes: 25 additions & 26 deletions hack/trivy_scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def save_images(wg, images, version):
output_file = f"../image_lists/kf_{version}_{wg}_images.txt"
with open(output_file, 'w') as f:
f.write('\n'.join(images))
log(f"File {output_file} successfully created", flush=True)
log(f"File {output_file} successfully created")

def validate_semantic_version(version):
# Validates a semantic version string (e.g., "0.1.2" or "latest").
Expand All @@ -72,7 +72,7 @@ def validate_semantic_version(version):

def extract_images(version):
version = validate_semantic_version(version)
log(f"Running the script using Kubeflow version: {version}", flush=True)
log(f"Running the script using Kubeflow version: {version}")

all_images = set() # Collect all unique images across workgroups

Expand All @@ -87,7 +87,7 @@ def extract_images(version):
# Execute `kustomize build` to render the kustomization file
result = subprocess.run(['kustomize', 'build', root], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
except subprocess.CalledProcessError as e:
log(f"ERROR:\t Failed \"kustomize build\" command for directory: {root}. See error above", flush=True)
log(f"ERROR:\t Failed \"kustomize build\" command for directory: {root}. See error above")
continue

# Use regex to find lines with 'image: <image-name>:<version>' or 'image: <image-name>'
Expand All @@ -112,14 +112,14 @@ def extract_images(version):



log("Started scanning images", flush=True)
log("Started scanning images")

# Get list of text files excluding "kf_latest_all_images.txt"
files = [f for f in glob.glob(os.path.join(DIRECTORY, "*.txt")) if not f.endswith("kf_latest_all_images.txt")]

# Loop through each text file in the specified directory
for file in files:
log(f"Scanning images in {file}", flush=True)
log(f"Scanning images in {file}")

file_base_name = os.path.basename(file).replace('.txt', '')

Expand All @@ -146,31 +146,30 @@ def extract_images(version):

scan_output_file = os.path.join(file_reports_dir, f"{image_name_scan}_scan.json")

log(f"Scanning ",line, flush=True)
log(f"Scanning ",line)

try:
result = subprocess.run(["trivy", "image", "--format", "json", "--output", scan_output_file, line], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
log(result.stdout, flush=True)

with open(scan_output_file, 'r') as json_file:
scan_data = json.load(json_file)

if not scan_data.get('Results'):
log(f"No vulnerabilitiy found in {image_name}:{image_tag}", flush=True)
log(f"No vulnerabilities found in {image_name}:{image_tag}")
else:
vulnerabilitiy = [
vulnerabilities_list = [
result['Vulnerabilities']
for result in scan_data['Results']
if 'Vulnerabilities' in result and result['Vulnerabilities']
]

if not vulnerabilitiy:
log(f"The vulnerability detection may be insufficient because security updates are not provided for {image_name}:{image_tag}", flush=True)
if not vulnerabilities_list:
log(f"The vulnerabilities detection may be insufficient because security updates are not provided for {image_name}:{image_tag}\n")
else:
severity_counts = {"LOW": 0, "MEDIUM": 0, "HIGH": 0, "CRITICAL": 0}
for vulns in vulnerabilitiy:
for vuln in vulns:
severity = vuln.get('Severity', 'UNKNOWN')
for vulnerabilities in vulnerabilities_list:
for vulnerability in vulnerabilities:
severity = vulnerability.get('Severity', 'UNKNOWN')
if severity == 'UNKNOWN':
continue
elif severity in severity_counts:
Expand All @@ -197,16 +196,16 @@ def extract_images(version):
json.dump(report, report_file, indent=4)

except subprocess.CalledProcessError as e:
log(f"Error scanning {image_name}:{image_tag}", flush=True)
log(e.stderr, flush=True)
log(f"Error scanning {image_name}:{image_tag}")
log(e.stderr)

# Combine all the JSON files into a single file with severity counts for all images
json_files = glob.glob(os.path.join(severity_count, "*.json"))

output_file = os.path.join(ALL_SEVERITY_COUNTS, f"{file_base_name}.json")

if not json_files:
log(f"No JSON files found in '{severity_count}'. Skipping combination.", flush=True)
log(f"No JSON files found in '{severity_count}'. Skipping combination.")
else:
combined_data = []
for json_file in json_files:
Expand All @@ -216,7 +215,7 @@ def extract_images(version):
with open(output_file, 'w') as of:
json.dump({"data": combined_data}, of, indent=4)

log(f"JSON files successfully combined into '{output_file}'", flush=True)
log(f"JSON files successfully combined into '{output_file}'")

# File to save summary of the severity counts for WGs as JSON format.
summary_file = os.path.join(SUMMARY_OF_SEVERITY_COUNTS, "severity_summary_in_json_format.json")
Expand All @@ -243,7 +242,7 @@ def extract_images(version):
filename = filename.capitalize()

else:
log(f"Skipping invalid filename format: {file_path}", flush=True)
log(f"Skipping invalid filename format: {file_path}")
continue

with open(file_path, 'r') as f:
Expand Down Expand Up @@ -285,15 +284,15 @@ def extract_images(version):
}


log("Summary in Json Format:", flush=True)
log(json.dumps(merged_data, indent=4), flush=True)
log("Summary in Json Format:")
log(json.dumps(merged_data, indent=4))


# Write the final output to a file
with open(summary_file, 'w') as summary_f:
json.dump(merged_data, summary_f, indent=4)

log(f"Summary written to: {summary_file} as JSON format", flush=True)
log(f"Summary written to: {summary_file} as JSON format")

# Load JSON content from the file
with open(summary_file, 'r') as file:
Expand Down Expand Up @@ -322,14 +321,14 @@ def extract_images(version):
table.add_row([groupnames[group_name], value["images"], value["CRITICAL"], value["HIGH"], value["MEDIUM"], value["LOW"]])

# log the table
log(table, flush=True)
log(table)


# Write the table output to a file in the specified folder
output_file = SUMMARY_OF_SEVERITY_COUNTS + '/summary_of_severity_counts_for_WGs_in_table.txt'
with open(output_file, 'w') as f:
f.write(str(table))

log("Output saved to:", output_file, flush=True)
log("Severity counts with images respect to WGs are saved in the",ALL_SEVERITY_COUNTS, flush=True)
log("Scanned Json reports on images are saved in" ,SCAN_REPORTS_DIR, flush=True)
log("Output saved to:", output_file)
log("Severity counts with images respect to WGs are saved in the",ALL_SEVERITY_COUNTS)
log("Scanned Json reports on images are saved in" ,SCAN_REPORTS_DIR)

0 comments on commit e140f6c

Please sign in to comment.