import argparse import requests from bs4 import BeautifulSoup from packaging.version import Version, InvalidVersion import sys from reportlab.lib.pagesizes import letter from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer from colorama import Fore, Style, init from tqdm import tqdm import html init(autoreset=True) # 初始化colorama,并在每次打印后自动重置颜色 def fetch_html(url: str) -> str: try: response = requests.get(url) response.raise_for_status() return response.text except requests.RequestException as e: print(f"Error fetching {url}: {e}") return "" def parse_html(html: str) -> list: soup = BeautifulSoup(html, "html.parser") table = soup.find("table", id="sortable-table") if not table: return [] rows = table.find_all("tr", class_="vue--table__row") results = [] for row in rows: info = {} link = row.find("a") chip = row.find("span", class_="vue--chip__value") if link and chip: info["link"] = link.get_text(strip=True) info["chip"] = chip.get_text(strip=True) results.append(info) return results def load_requirements(file_path: str) -> list: requirements = [] try: with open(file_path, "r") as file: for line in file: line = line.strip() if line and not line.startswith("#"): requirements.append(line) except FileNotFoundError: print(f"Error: File {file_path} not found.") sys.exit(1) return requirements def version_in_range(version, range_str: str) -> bool: if version is not None: try: v = Version(version) except InvalidVersion: return False else: if range_str[-2] == ",": return True ranges = range_str.split(",") for range_part in ranges: range_part = range_part.strip("[]()") if range_part: try: if range_part.endswith(")"): upper = Version(range_part[:-1]) if v >= upper: return False elif range_part.startswith("["): lower = Version(range_part[1:]) if v < lower: return False except InvalidVersion: return False return True def check_vulnerabilities(requirements: list, base_url: str) -> str: results = [] for req in tqdm(requirements, desc="Checking vulnerabilities", unit="dependency"): version = "" if "==" in req: package_name, version = req.split("==") else: package_name, version = req, None url = f"{base_url}{package_name}" # print(f"Fetching data for {package_name} from {url}") html_content = fetch_html(url) if html_content: extracted_data = parse_html(html_content) if extracted_data: relevant_vulns = [] for vuln in extracted_data: if version_in_range(version, vuln["chip"]): relevant_vulns.append(vuln) if relevant_vulns: result = f"Vulnerabilities found for {package_name}:\n" for vuln in relevant_vulns: result += f" - {vuln['link']}\n" results.append(result) return "\n".join(results) def save_to_file(output_path: str, data: str): if output_path.endswith(".html"): save_as_html(output_path, data) elif output_path.endswith(".pdf"): save_as_pdf(output_path, data) elif output_path.endswith(".md"): save_as_markdown(output_path, data) else: save_as_txt(output_path, data) def save_as_html(output_path: str, data: str): escaped_data = html.escape(data) html_content = f"""
{escaped_data}